From 2762c398193e2efa3f857f2e1d3a7560d3ee9931 Mon Sep 17 00:00:00 2001 From: Russell Keith-Magee Date: Mon, 9 Sep 2024 11:08:49 +0800 Subject: [PATCH] Update patch to Python 3.12.6. Also includes updates to: - XZ 5.6.2 - OpenSSL 3.0.15 --- Makefile | 2 +- patch/Python/Python.patch | 46498 +----------------------------------- 2 files changed, 334 insertions(+), 46166 deletions(-) diff --git a/Makefile b/Makefile index 689091d..6e29d5a 100644 --- a/Makefile +++ b/Makefile @@ -18,7 +18,7 @@ BUILD_NUMBER=custom # of a release cycle, as official binaries won't be published. # PYTHON_MICRO_VERSION is the full version number, without any alpha/beta/rc suffix. (e.g., 3.10.0) # PYTHON_VER is the major/minor version (e.g., 3.10) -PYTHON_VERSION=3.12.4 +PYTHON_VERSION=3.12.6 PYTHON_PKG_VERSION=$(PYTHON_VERSION) PYTHON_MICRO_VERSION=$(shell echo $(PYTHON_VERSION) | grep -Eo "\d+\.\d+\.\d+") PYTHON_PKG_MICRO_VERSION=$(shell echo $(PYTHON_PKG_VERSION) | grep -Eo "\d+\.\d+\.\d+") diff --git a/patch/Python/Python.patch b/patch/Python/Python.patch index fdec128..0bcb11c 100644 --- a/patch/Python/Python.patch +++ b/patch/Python/Python.patch @@ -1,4117 +1,3 @@ -diff --git a/.azure-pipelines/ci.yml b/.azure-pipelines/ci.yml -index b5b2765e438..d3e842d9f31 100644 ---- a/.azure-pipelines/ci.yml -+++ b/.azure-pipelines/ci.yml -@@ -1,4 +1,4 @@ --trigger: ['main', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7'] -+trigger: ['main', '3.13', '3.12', '3.11', '3.10', '3.9', '3.8'] - - jobs: - - job: Prebuild -diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml -index 663a11897d9..c64ca318677 100644 ---- a/.pre-commit-config.yaml -+++ b/.pre-commit-config.yaml -@@ -3,13 +3,21 @@ - rev: v0.3.4 - hooks: - - id: ruff -- name: Run Ruff on Lib/test/ -+ name: Run Ruff (lint) on Doc/ -+ args: [--exit-non-zero-on-fix] -+ files: ^Doc/ -+ - id: ruff -+ name: Run Ruff (lint) on Lib/test/ - args: [--exit-non-zero-on-fix] - files: ^Lib/test/ - - id: ruff -- name: Run Ruff on Argument Clinic -+ name: Run Ruff (lint) on Argument Clinic - args: [--exit-non-zero-on-fix, --config=Tools/clinic/.ruff.toml] - files: ^Tools/clinic/|Lib/test/test_clinic.py -+ - id: ruff-format -+ name: Run Ruff (format) on Doc/ -+ args: [--check] -+ files: ^Doc/ - - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 -diff --git a/.readthedocs.yml b/.readthedocs.yml -index 898a9ae89db..250d7ea0233 100644 ---- a/.readthedocs.yml -+++ b/.readthedocs.yml -@@ -8,11 +8,14 @@ - configuration: Doc/conf.py - - build: -- os: ubuntu-22.04 -+ os: ubuntu-24.04 - tools: - python: "3" - - commands: -+ - asdf plugin add uv -+ - asdf install uv latest -+ - asdf global uv latest - - make -C Doc venv html - - mkdir _readthedocs - - mv Doc/build/html _readthedocs/html ---- /dev/null -+++ b/Doc/.ruff.toml -@@ -0,0 +1,42 @@ -+target-version = "py312" # Align with the version in oldest_supported_sphinx -+fix = true -+output-format = "full" -+line-length = 79 -+extend-exclude = [ -+ "includes/*", -+ # Temporary exclusions: -+ "tools/extensions/pyspecific.py", -+] -+ -+[lint] -+preview = true -+select = [ -+ "C4", # flake8-comprehensions -+ "B", # flake8-bugbear -+ "E", # pycodestyle -+ "F", # pyflakes -+ "FA", # flake8-future-annotations -+ "FLY", # flynt -+ "FURB", # refurb -+ "G", # flake8-logging-format -+ "I", # isort -+ "LOG", # flake8-logging -+ "N", # pep8-naming -+ "PERF", # perflint -+ "PGH", # pygrep-hooks -+ "PT", # flake8-pytest-style -+ "TCH", # flake8-type-checking -+ "UP", # pyupgrade -+ "W", # pycodestyle -+] -+ignore = [ -+ "E501", # Ignore line length errors (we use auto-formatting) -+] -+ -+[format] -+preview = true -+quote-style = "preserve" -+docstring-code-format = true -+exclude = [ -+ "tools/extensions/lexers/*", -+] -diff --git a/Doc/Makefile b/Doc/Makefile -index 1cbfc722b01..dbd799fd400 100644 ---- a/Doc/Makefile -+++ b/Doc/Makefile -@@ -6,6 +6,7 @@ - # You can set these variables from the command line. - PYTHON = python3 - VENVDIR = ./venv -+UV = uv - SPHINXBUILD = PATH=$(VENVDIR)/bin:$$PATH sphinx-build - BLURB = PATH=$(VENVDIR)/bin:$$PATH blurb - JOBS = auto -@@ -150,14 +151,10 @@ - htmlview: html - $(PYTHON) -c "import os, webbrowser; webbrowser.open('file://' + os.path.realpath('build/html/index.html'))" - --.PHONY: ensure-sphinx-autobuild --ensure-sphinx-autobuild: venv -- $(VENVDIR)/bin/sphinx-autobuild --version > /dev/null || $(VENVDIR)/bin/python3 -m pip install sphinx-autobuild -- - .PHONY: htmllive - htmllive: SPHINXBUILD = $(VENVDIR)/bin/sphinx-autobuild - htmllive: SPHINXOPTS = --re-ignore="/venv/" --open-browser --delay 0 --htmllive: ensure-sphinx-autobuild html -+htmllive: _ensure-sphinx-autobuild html - - .PHONY: clean - clean: clean-venv -@@ -174,9 +171,14 @@ - echo "To recreate it, remove it first with \`make clean-venv'."; \ - else \ - echo "Creating venv in $(VENVDIR)"; \ -- $(PYTHON) -m venv $(VENVDIR); \ -- $(VENVDIR)/bin/python3 -m pip install --upgrade pip; \ -- $(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \ -+ if $(UV) --version >/dev/null 2>&1; then \ -+ $(UV) venv $(VENVDIR); \ -+ VIRTUAL_ENV=$(VENVDIR) $(UV) pip install -r $(REQUIREMENTS); \ -+ else \ -+ $(PYTHON) -m venv $(VENVDIR); \ -+ $(VENVDIR)/bin/python3 -m pip install --upgrade pip; \ -+ $(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \ -+ fi; \ - echo "The venv has been created in the $(VENVDIR) directory"; \ - fi - -@@ -186,58 +188,88 @@ - mkdir -p dist - - # archive the HTML -- make html -+ @echo "Building HTML..." -+ $(MAKE) html - cp -pPR build/html dist/python-$(DISTVERSION)-docs-html - tar -C dist -cf dist/python-$(DISTVERSION)-docs-html.tar python-$(DISTVERSION)-docs-html - bzip2 -9 -k dist/python-$(DISTVERSION)-docs-html.tar - (cd dist; zip -q -r -9 python-$(DISTVERSION)-docs-html.zip python-$(DISTVERSION)-docs-html) - rm -r dist/python-$(DISTVERSION)-docs-html - rm dist/python-$(DISTVERSION)-docs-html.tar -+ @echo "Build finished and archived!" - - # archive the text build -- make text -+ @echo "Building text..." -+ $(MAKE) text - cp -pPR build/text dist/python-$(DISTVERSION)-docs-text - tar -C dist -cf dist/python-$(DISTVERSION)-docs-text.tar python-$(DISTVERSION)-docs-text - bzip2 -9 -k dist/python-$(DISTVERSION)-docs-text.tar - (cd dist; zip -q -r -9 python-$(DISTVERSION)-docs-text.zip python-$(DISTVERSION)-docs-text) - rm -r dist/python-$(DISTVERSION)-docs-text - rm dist/python-$(DISTVERSION)-docs-text.tar -+ @echo "Build finished and archived!" - - # archive the A4 latex -+ @echo "Building LaTeX (A4 paper)..." - rm -rf build/latex -- make latex PAPER=a4 -- -sed -i 's/makeindex/makeindex -q/' build/latex/Makefile -- (cd build/latex; make clean && make all-pdf && make FMT=pdf zip bz2) -+ $(MAKE) latex PAPER=a4 -+ # remove zip & bz2 dependency on all-pdf, -+ # as otherwise the full latexmk process is run twice. -+ # ($$ is needed to escape the $; https://www.gnu.org/software/make/manual/make.html#Basics-of-Variable-References) -+ -sed -i 's/: all-$$(FMT)/:/' build/latex/Makefile -+ (cd build/latex; $(MAKE) clean && $(MAKE) --jobs=$$((`nproc`+1)) --output-sync LATEXMKOPTS='-quiet' all-pdf && $(MAKE) FMT=pdf zip bz2) - cp build/latex/docs-pdf.zip dist/python-$(DISTVERSION)-docs-pdf-a4.zip - cp build/latex/docs-pdf.tar.bz2 dist/python-$(DISTVERSION)-docs-pdf-a4.tar.bz2 -+ @echo "Build finished and archived!" - - # archive the letter latex -+ @echo "Building LaTeX (US paper)..." - rm -rf build/latex -- make latex PAPER=letter -- -sed -i 's/makeindex/makeindex -q/' build/latex/Makefile -- (cd build/latex; make clean && make all-pdf && make FMT=pdf zip bz2) -+ $(MAKE) latex PAPER=letter -+ -sed -i 's/: all-$$(FMT)/:/' build/latex/Makefile -+ (cd build/latex; $(MAKE) clean && $(MAKE) --jobs=$$((`nproc`+1)) --output-sync LATEXMKOPTS='-quiet' all-pdf && $(MAKE) FMT=pdf zip bz2) - cp build/latex/docs-pdf.zip dist/python-$(DISTVERSION)-docs-pdf-letter.zip - cp build/latex/docs-pdf.tar.bz2 dist/python-$(DISTVERSION)-docs-pdf-letter.tar.bz2 -+ @echo "Build finished and archived!" - - # copy the epub build -+ @echo "Building EPUB..." - rm -rf build/epub -- make epub -+ $(MAKE) epub - cp -pPR build/epub/Python.epub dist/python-$(DISTVERSION)-docs.epub -+ @echo "Build finished and archived!" - - # archive the texinfo build -+ @echo "Building Texinfo..." - rm -rf build/texinfo -- make texinfo -- make info --directory=build/texinfo -+ $(MAKE) texinfo -+ $(MAKE) info --directory=build/texinfo - cp -pPR build/texinfo dist/python-$(DISTVERSION)-docs-texinfo - tar -C dist -cf dist/python-$(DISTVERSION)-docs-texinfo.tar python-$(DISTVERSION)-docs-texinfo - bzip2 -9 -k dist/python-$(DISTVERSION)-docs-texinfo.tar - (cd dist; zip -q -r -9 python-$(DISTVERSION)-docs-texinfo.zip python-$(DISTVERSION)-docs-texinfo) - rm -r dist/python-$(DISTVERSION)-docs-texinfo - rm dist/python-$(DISTVERSION)-docs-texinfo.tar -+ @echo "Build finished and archived!" -+ -+.PHONY: _ensure-package -+_ensure-package: venv -+ if $(UV) --version >/dev/null 2>&1; then \ -+ VIRTUAL_ENV=$(VENVDIR) $(UV) pip install $(PACKAGE); \ -+ else \ -+ $(VENVDIR)/bin/python3 -m pip install $(PACKAGE); \ -+ fi -+ -+.PHONY: _ensure-pre-commit -+_ensure-pre-commit: -+ $(MAKE) _ensure-package PACKAGE=pre-commit -+ -+.PHONY: _ensure-sphinx-autobuild -+_ensure-sphinx-autobuild: -+ $(MAKE) _ensure-package PACKAGE=sphinx-autobuild - - .PHONY: check --check: venv -- $(VENVDIR)/bin/python3 -m pre_commit --version > /dev/null || $(VENVDIR)/bin/python3 -m pip install pre-commit -+check: _ensure-pre-commit - $(VENVDIR)/bin/python3 -m pre_commit run --all-files - - .PHONY: serve -@@ -254,12 +286,12 @@ - # for development releases: always build - .PHONY: autobuild-dev - autobuild-dev: -- make dist SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' -+ $(MAKE) dist SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' - - # for quick rebuilds (HTML only) - .PHONY: autobuild-dev-html - autobuild-dev-html: -- make html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' -+ $(MAKE) html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' - - # for stable releases: only build if not in pre-release stage (alpha, beta) - # release candidate downloads are okay, since the stable tree can be in that stage -@@ -269,7 +301,7 @@ - echo "Not building; $(DISTVERSION) is not a release version."; \ - exit 1;; \ - esac -- @make autobuild-dev -+ @$(MAKE) autobuild-dev - - .PHONY: autobuild-stable-html - autobuild-stable-html: -@@ -277,4 +309,4 @@ - echo "Not building; $(DISTVERSION) is not a release version."; \ - exit 1;; \ - esac -- @make autobuild-dev-html -+ @$(MAKE) autobuild-dev-html -diff --git a/Doc/README.rst b/Doc/README.rst -index a3bb5fa5445..efcee0db428 100644 ---- a/Doc/README.rst -+++ b/Doc/README.rst -@@ -28,7 +28,7 @@ - Using make - ---------- - --To get started on UNIX, you can create a virtual environment and build -+To get started on Unix, you can create a virtual environment and build - documentation with the commands:: - - make venv -@@ -40,13 +40,13 @@ - you can specify it using the ``VENVDIR`` variable. - - You can also skip creating the virtual environment altogether, in which case --the Makefile will look for instances of ``sphinx-build`` and ``blurb`` -+the ``Makefile`` will look for instances of ``sphinx-build`` and ``blurb`` - installed on your process ``PATH`` (configurable with the ``SPHINXBUILD`` and - ``BLURB`` variables). - --On Windows, we try to emulate the Makefile as closely as possible with a -+On Windows, we try to emulate the ``Makefile`` as closely as possible with a - ``make.bat`` file. If you need to specify the Python interpreter to use, --set the PYTHON environment variable. -+set the ``PYTHON`` environment variable. - - Available make targets are: - -@@ -62,15 +62,19 @@ - * "htmlview", which re-uses the "html" builder, but then opens the main page - in your default web browser. - -+* "htmllive", which re-uses the "html" builder, rebuilds the docs, -+ starts a local server, and automatically reloads the page in your browser -+ when you make changes to reST files (Unix only). -+ - * "htmlhelp", which builds HTML files and a HTML Help project file usable to - convert them into a single Compiled HTML (.chm) file -- these are popular - under Microsoft Windows, but very handy on every platform. - - To create the CHM file, you need to run the Microsoft HTML Help Workshop -- over the generated project (.hhp) file. The make.bat script does this for -+ over the generated project (.hhp) file. The ``make.bat`` script does this for - you on Windows. - --* "latex", which builds LaTeX source files as input to "pdflatex" to produce -+* "latex", which builds LaTeX source files as input to ``pdflatex`` to produce - PDF documents. - - * "text", which builds a plain text file for each source file. -@@ -95,8 +99,6 @@ - - * "check", which checks for frequent markup errors. - --* "serve", which serves the build/html directory on port 8000. -- - * "dist", (Unix only) which creates distributable archives of HTML, text, - PDF, and EPUB builds. - -diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst -index 657b10d3e0a..b8af24f53c3 100644 ---- a/Doc/c-api/arg.rst -+++ b/Doc/c-api/arg.rst -@@ -280,10 +280,10 @@ - length 1, to a C :c:expr:`int`. - - ``f`` (:class:`float`) [float] -- Convert a Python floating point number to a C :c:expr:`float`. -+ Convert a Python floating-point number to a C :c:expr:`float`. - - ``d`` (:class:`float`) [double] -- Convert a Python floating point number to a C :c:expr:`double`. -+ Convert a Python floating-point number to a C :c:expr:`double`. - - ``D`` (:class:`complex`) [Py_complex] - Convert a Python complex number to a C :c:type:`Py_complex` structure. -@@ -607,10 +607,10 @@ - object of length 1. - - ``d`` (:class:`float`) [double] -- Convert a C :c:expr:`double` to a Python floating point number. -+ Convert a C :c:expr:`double` to a Python floating-point number. - - ``f`` (:class:`float`) [float] -- Convert a C :c:expr:`float` to a Python floating point number. -+ Convert a C :c:expr:`float` to a Python floating-point number. - - ``D`` (:class:`complex`) [Py_complex \*] - Convert a C :c:type:`Py_complex` structure to a Python complex number. -diff --git a/Doc/c-api/bytearray.rst b/Doc/c-api/bytearray.rst -index 456f7d89bca..9045689a6be 100644 ---- a/Doc/c-api/bytearray.rst -+++ b/Doc/c-api/bytearray.rst -@@ -42,17 +42,22 @@ - Return a new bytearray object from any object, *o*, that implements the - :ref:`buffer protocol `. - -+ On failure, return ``NULL`` with an exception set. -+ - - .. c:function:: PyObject* PyByteArray_FromStringAndSize(const char *string, Py_ssize_t len) - -- Create a new bytearray object from *string* and its length, *len*. On -- failure, ``NULL`` is returned. -+ Create a new bytearray object from *string* and its length, *len*. -+ -+ On failure, return ``NULL`` with an exception set. - - - .. c:function:: PyObject* PyByteArray_Concat(PyObject *a, PyObject *b) - - Concat bytearrays *a* and *b* and return a new bytearray with the result. - -+ On failure, return ``NULL`` with an exception set. -+ - - .. c:function:: Py_ssize_t PyByteArray_Size(PyObject *bytearray) - -diff --git a/Doc/c-api/cell.rst b/Doc/c-api/cell.rst -index f8cd0344fdd..61eb994c370 100644 ---- a/Doc/c-api/cell.rst -+++ b/Doc/c-api/cell.rst -@@ -39,7 +39,8 @@ - - .. c:function:: PyObject* PyCell_Get(PyObject *cell) - -- Return the contents of the cell *cell*. -+ Return the contents of the cell *cell*, which can be ``NULL``. -+ If *cell* is not a cell object, returns ``NULL`` with an exception set. - - - .. c:function:: PyObject* PyCell_GET(PyObject *cell) -@@ -52,8 +53,10 @@ - - Set the contents of the cell object *cell* to *value*. This releases the - reference to any current content of the cell. *value* may be ``NULL``. *cell* -- must be non-``NULL``; if it is not a cell object, ``-1`` will be returned. On -- success, ``0`` will be returned. -+ must be non-``NULL``. -+ -+ On success, return ``0``. -+ If *cell* is not a cell object, set an exception and return ``-1``. - - - .. c:function:: void PyCell_SET(PyObject *cell, PyObject *value) -diff --git a/Doc/c-api/complex.rst b/Doc/c-api/complex.rst -index e3fd001c599..77cb67d8de2 100644 ---- a/Doc/c-api/complex.rst -+++ b/Doc/c-api/complex.rst -@@ -25,12 +25,16 @@ - - The C structure which corresponds to the value portion of a Python complex - number object. Most of the functions for dealing with complex number objects -- use structures of this type as input or output values, as appropriate. It is -- defined as:: -+ use structures of this type as input or output values, as appropriate. -+ -+ .. c:member:: double real -+ double imag -+ -+ The structure is defined as:: - - typedef struct { -- double real; -- double imag; -+ double real; -+ double imag; - } Py_complex; - - -@@ -106,17 +110,22 @@ - .. c:function:: PyObject* PyComplex_FromCComplex(Py_complex v) - - Create a new Python complex number object from a C :c:type:`Py_complex` value. -+ Return ``NULL`` with an exception set on error. - - - .. c:function:: PyObject* PyComplex_FromDoubles(double real, double imag) - - Return a new :c:type:`PyComplexObject` object from *real* and *imag*. -+ Return ``NULL`` with an exception set on error. - - - .. c:function:: double PyComplex_RealAsDouble(PyObject *op) - - Return the real part of *op* as a C :c:expr:`double`. - -+ Upon failure, this method returns ``-1.0`` with an exception set, so one -+ should call :c:func:`PyErr_Occurred` to check for errors. -+ - - .. c:function:: double PyComplex_ImagAsDouble(PyObject *op) - -@@ -131,8 +140,11 @@ - method, this method will first be called to convert *op* to a Python complex - number object. If :meth:`!__complex__` is not defined then it falls back to - :meth:`~object.__float__`. If :meth:`!__float__` is not defined then it falls back -- to :meth:`~object.__index__`. Upon failure, this method returns ``-1.0`` as a real -- value. -+ to :meth:`~object.__index__`. -+ -+ Upon failure, this method returns :c:type:`Py_complex` -+ with :c:member:`~Py_complex.real` set to ``-1.0`` and with an exception set, so one -+ should call :c:func:`PyErr_Occurred` to check for errors. - - .. versionchanged:: 3.8 - Use :meth:`~object.__index__` if available. -diff --git a/Doc/c-api/datetime.rst b/Doc/c-api/datetime.rst -index 97522da7734..d2d4d5309c7 100644 ---- a/Doc/c-api/datetime.rst -+++ b/Doc/c-api/datetime.rst -@@ -318,10 +318,10 @@ - .. c:function:: PyObject* PyDateTime_FromTimestamp(PyObject *args) - - Create and return a new :class:`datetime.datetime` object given an argument -- tuple suitable for passing to :meth:`datetime.datetime.fromtimestamp()`. -+ tuple suitable for passing to :meth:`datetime.datetime.fromtimestamp`. - - - .. c:function:: PyObject* PyDate_FromTimestamp(PyObject *args) - - Create and return a new :class:`datetime.date` object given an argument -- tuple suitable for passing to :meth:`datetime.date.fromtimestamp()`. -+ tuple suitable for passing to :meth:`datetime.date.fromtimestamp`. -diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst -index 7ddecb24734..9913273421a 100644 ---- a/Doc/c-api/exceptions.rst -+++ b/Doc/c-api/exceptions.rst -@@ -34,7 +34,7 @@ - and may fail in mysterious ways. - - .. note:: -- The error indicator is **not** the result of :func:`sys.exc_info()`. -+ The error indicator is **not** the result of :func:`sys.exc_info`. - The former corresponds to an exception that is not yet caught (and is - therefore still propagating), while the latter returns an exception after - it is caught (and has therefore stopped propagating). -diff --git a/Doc/c-api/float.rst b/Doc/c-api/float.rst -index 4f6ac0d8175..1da37a5bcae 100644 ---- a/Doc/c-api/float.rst -+++ b/Doc/c-api/float.rst -@@ -2,20 +2,20 @@ - - .. _floatobjects: - --Floating Point Objects -+Floating-Point Objects - ====================== - --.. index:: pair: object; floating point -+.. index:: pair: object; floating-point - - - .. c:type:: PyFloatObject - -- This subtype of :c:type:`PyObject` represents a Python floating point object. -+ This subtype of :c:type:`PyObject` represents a Python floating-point object. - - - .. c:var:: PyTypeObject PyFloat_Type - -- This instance of :c:type:`PyTypeObject` represents the Python floating point -+ This instance of :c:type:`PyTypeObject` represents the Python floating-point - type. This is the same object as :class:`float` in the Python layer. - - -@@ -45,7 +45,7 @@ - .. c:function:: double PyFloat_AsDouble(PyObject *pyfloat) - - Return a C :c:expr:`double` representation of the contents of *pyfloat*. If -- *pyfloat* is not a Python floating point object but has a :meth:`~object.__float__` -+ *pyfloat* is not a Python floating-point object but has a :meth:`~object.__float__` - method, this method will first be called to convert *pyfloat* into a float. - If :meth:`!__float__` is not defined then it falls back to :meth:`~object.__index__`. - This method returns ``-1.0`` upon failure, so one should call -diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst -index 380465b817d..b8687c61c26 100644 ---- a/Doc/c-api/import.rst -+++ b/Doc/c-api/import.rst -@@ -174,7 +174,7 @@ - - .. versionadded:: 3.2 - .. versionchanged:: 3.3 -- Uses :func:`!imp.source_from_cache()` in calculating the source path if -+ Uses :func:`!imp.source_from_cache` in calculating the source path if - only the bytecode path is provided. - .. versionchanged:: 3.12 - No longer uses the removed :mod:`!imp` module. -diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst -index a51f1da6b66..8b7b28ae319 100644 ---- a/Doc/c-api/init.rst -+++ b/Doc/c-api/init.rst -@@ -388,9 +388,16 @@ - :c:func:`Py_NewInterpreter` below) that were created and not yet destroyed since - the last call to :c:func:`Py_Initialize`. Ideally, this frees all memory - allocated by the Python interpreter. This is a no-op when called for a second -- time (without calling :c:func:`Py_Initialize` again first). Normally the -- return value is ``0``. If there were errors during finalization -- (flushing buffered data), ``-1`` is returned. -+ time (without calling :c:func:`Py_Initialize` again first). -+ -+ Since this is the reverse of :c:func:`Py_Initialize`, it should be called -+ in the same thread with the same interpreter active. That means -+ the main thread and the main interpreter. -+ This should never be called while :c:func:`Py_RunMain` is running. -+ -+ Normally the return value is ``0``. -+ If there were errors during finalization (flushing buffered data), -+ ``-1`` is returned. - - This function is provided for a number of reasons. An embedding application - might want to restart Python without having to restart the application itself. -diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst -index 7c5465b5bfa..c586cfb39e8 100644 ---- a/Doc/c-api/init_config.rst -+++ b/Doc/c-api/init_config.rst -@@ -311,7 +311,7 @@ - * Set :c:member:`PyConfig.filesystem_encoding` to ``"mbcs"``, - * Set :c:member:`PyConfig.filesystem_errors` to ``"replace"``. - -- Initialized the from :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment -+ Initialized from the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment - variable value. - - Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for -@@ -499,7 +499,7 @@ - The :c:func:`PyConfig_Read` function only parses - :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` - is set to ``2`` after arguments are parsed. Since Python arguments are -- strippped from :c:member:`PyConfig.argv`, parsing arguments twice would -+ stripped from :c:member:`PyConfig.argv`, parsing arguments twice would - parse the application options as Python options. - - :ref:`Preinitialize Python ` if needed. -@@ -1000,7 +1000,7 @@ - The :c:func:`PyConfig_Read` function only parses - :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` - is set to ``2`` after arguments are parsed. Since Python arguments are -- strippped from :c:member:`PyConfig.argv`, parsing arguments twice would -+ stripped from :c:member:`PyConfig.argv`, parsing arguments twice would - parse the application options as Python options. - - Default: ``1`` in Python mode, ``0`` in isolated mode. -diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst -index 76ac80322f8..af86810c6b1 100644 ---- a/Doc/c-api/long.rst -+++ b/Doc/c-api/long.rst -@@ -324,6 +324,17 @@ - Returns ``NULL`` on error. Use :c:func:`PyErr_Occurred` to disambiguate. - - -+.. c:function:: PyObject* PyLong_GetInfo(void) -+ -+ On success, return a read only :term:`named tuple`, that holds -+ information about Python's internal representation of integers. -+ See :data:`sys.int_info` for description of individual fields. -+ -+ On failure, return ``NULL`` with an exception set. -+ -+ .. versionadded:: 3.1 -+ -+ - .. c:function:: int PyUnstable_Long_IsCompact(const PyLongObject* op) - - Return 1 if *op* is compact, 0 otherwise. -diff --git a/Doc/c-api/marshal.rst b/Doc/c-api/marshal.rst -index 489f1580a41..b9085ad3ec3 100644 ---- a/Doc/c-api/marshal.rst -+++ b/Doc/c-api/marshal.rst -@@ -15,7 +15,7 @@ - - The module supports two versions of the data format: version 0 is the - historical version, version 1 shares interned strings in the file, and upon --unmarshalling. Version 2 uses a binary format for floating point numbers. -+unmarshalling. Version 2 uses a binary format for floating-point numbers. - ``Py_MARSHAL_VERSION`` indicates the current file format (currently 2). - - -diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst -index f941f0c7d42..9340a942656 100644 ---- a/Doc/c-api/module.rst -+++ b/Doc/c-api/module.rst -@@ -43,6 +43,8 @@ - to ``None``); the caller is responsible for providing a :attr:`__file__` - attribute. - -+ Return ``NULL`` with an exception set on error. -+ - .. versionadded:: 3.3 - - .. versionchanged:: 3.4 -@@ -265,6 +267,8 @@ - API version *module_api_version*. If that version does not match the version - of the running interpreter, a :exc:`RuntimeWarning` is emitted. - -+ Return ``NULL`` with an exception set on error. -+ - .. note:: - - Most uses of this function should be using :c:func:`PyModule_Create` -@@ -338,7 +342,8 @@ - The *value* pointer of this slot must point to a function of the signature: - - .. c:function:: PyObject* create_module(PyObject *spec, PyModuleDef *def) -- :noindex: -+ :no-index-entry: -+ :no-contents-entry: - - The function receives a :py:class:`~importlib.machinery.ModuleSpec` - instance, as defined in :PEP:`451`, and the module definition. -@@ -373,7 +378,8 @@ - The signature of the function is: - - .. c:function:: int exec_module(PyObject* module) -- :noindex: -+ :no-index-entry: -+ :no-contents-entry: - - If multiple ``Py_mod_exec`` slots are specified, they are processed in the - order they appear in the *m_slots* array. -@@ -436,6 +442,8 @@ - If that version does not match the version of the running interpreter, - a :exc:`RuntimeWarning` is emitted. - -+ Return ``NULL`` with an exception set on error. -+ - .. note:: - - Most uses of this function should be using :c:func:`PyModule_FromDefAndSpec` -@@ -486,7 +494,7 @@ - - On success, return ``0``. On error, raise an exception and return ``-1``. - -- Return ``NULL`` if *value* is ``NULL``. It must be called with an exception -+ Return ``-1`` if *value* is ``NULL``. It must be called with an exception - raised in this case. - - Example usage:: -@@ -579,15 +587,16 @@ - .. c:function:: int PyModule_AddIntConstant(PyObject *module, const char *name, long value) - - Add an integer constant to *module* as *name*. This convenience function can be -- used from the module's initialization function. Return ``-1`` on error, ``0`` on -- success. -+ used from the module's initialization function. -+ Return ``-1`` with an exception set on error, ``0`` on success. - - - .. c:function:: int PyModule_AddStringConstant(PyObject *module, const char *name, const char *value) - - Add a string constant to *module* as *name*. This convenience function can be - used from the module's initialization function. The string *value* must be -- ``NULL``-terminated. Return ``-1`` on error, ``0`` on success. -+ ``NULL``-terminated. -+ Return ``-1`` with an exception set on error, ``0`` on success. - - - .. c:macro:: PyModule_AddIntMacro(module, macro) -@@ -595,7 +604,7 @@ - Add an int constant to *module*. The name and the value are taken from - *macro*. For example ``PyModule_AddIntMacro(module, AF_INET)`` adds the int - constant *AF_INET* with the value of *AF_INET* to *module*. -- Return ``-1`` on error, ``0`` on success. -+ Return ``-1`` with an exception set on error, ``0`` on success. - - - .. c:macro:: PyModule_AddStringMacro(module, macro) -@@ -608,7 +617,7 @@ - The type object is finalized by calling internally :c:func:`PyType_Ready`. - The name of the type object is taken from the last component of - :c:member:`~PyTypeObject.tp_name` after dot. -- Return ``-1`` on error, ``0`` on success. -+ Return ``-1`` with an exception set on error, ``0`` on success. - - .. versionadded:: 3.9 - -@@ -647,14 +656,14 @@ - - The caller must hold the GIL. - -- Return 0 on success or -1 on failure. -+ Return ``-1`` with an exception set on error, ``0`` on success. - - .. versionadded:: 3.3 - - .. c:function:: int PyState_RemoveModule(PyModuleDef *def) - - Removes the module object created from *def* from the interpreter state. -- Return 0 on success or -1 on failure. -+ Return ``-1`` with an exception set on error, ``0`` on success. - - The caller must hold the GIL. - -diff --git a/Doc/c-api/number.rst b/Doc/c-api/number.rst -index 13d3c5af956..ad8b5935258 100644 ---- a/Doc/c-api/number.rst -+++ b/Doc/c-api/number.rst -@@ -51,8 +51,8 @@ - - Return a reasonable approximation for the mathematical value of *o1* divided by - *o2*, or ``NULL`` on failure. The return value is "approximate" because binary -- floating point numbers are approximate; it is not possible to represent all real -- numbers in base two. This function can return a floating point value when -+ floating-point numbers are approximate; it is not possible to represent all real -+ numbers in base two. This function can return a floating-point value when - passed two integers. This is the equivalent of the Python expression ``o1 / o2``. - - -@@ -177,8 +177,8 @@ - - Return a reasonable approximation for the mathematical value of *o1* divided by - *o2*, or ``NULL`` on failure. The return value is "approximate" because binary -- floating point numbers are approximate; it is not possible to represent all real -- numbers in base two. This function can return a floating point value when -+ floating-point numbers are approximate; it is not possible to represent all real -+ numbers in base two. This function can return a floating-point value when - passed two integers. The operation is done *in-place* when *o1* supports it. - This is the equivalent of the Python statement ``o1 /= o2``. - -diff --git a/Doc/c-api/slice.rst b/Doc/c-api/slice.rst -index 9e880c6b7f2..819929a0e60 100644 ---- a/Doc/c-api/slice.rst -+++ b/Doc/c-api/slice.rst -@@ -23,7 +23,9 @@ - Return a new slice object with the given values. The *start*, *stop*, and - *step* parameters are used as the values of the slice object attributes of - the same names. Any of the values may be ``NULL``, in which case the -- ``None`` will be used for the corresponding attribute. Return ``NULL`` if -+ ``None`` will be used for the corresponding attribute. -+ -+ Return ``NULL`` with an exception set if - the new object could not be allocated. - - -@@ -52,7 +54,7 @@ - of bounds indices are clipped in a manner consistent with the handling of - normal slices. - -- Returns ``0`` on success and ``-1`` on error with exception set. -+ Return ``0`` on success and ``-1`` on error with an exception set. - - .. note:: - This function is considered not safe for resizable sequences. -@@ -95,7 +97,7 @@ - ``PY_SSIZE_T_MIN`` to ``PY_SSIZE_T_MIN``, and silently boost the step - values less than ``-PY_SSIZE_T_MAX`` to ``-PY_SSIZE_T_MAX``. - -- Return ``-1`` on error, ``0`` on success. -+ Return ``-1`` with an exception set on error, ``0`` on success. - - .. versionadded:: 3.6.1 - -diff --git a/Doc/c-api/tuple.rst b/Doc/c-api/tuple.rst -index e0186c1f522..82ef4bcd147 100644 ---- a/Doc/c-api/tuple.rst -+++ b/Doc/c-api/tuple.rst -@@ -33,12 +33,14 @@ - - .. c:function:: PyObject* PyTuple_New(Py_ssize_t len) - -- Return a new tuple object of size *len*, or ``NULL`` on failure. -+ Return a new tuple object of size *len*, -+ or ``NULL`` with an exception set on failure. - - - .. c:function:: PyObject* PyTuple_Pack(Py_ssize_t n, ...) - -- Return a new tuple object of size *n*, or ``NULL`` on failure. The tuple values -+ Return a new tuple object of size *n*, -+ or ``NULL`` with an exception set on failure. The tuple values - are initialized to the subsequent *n* C arguments pointing to Python objects. - ``PyTuple_Pack(2, a, b)`` is equivalent to ``Py_BuildValue("(OO)", a, b)``. - -@@ -46,12 +48,12 @@ - .. c:function:: Py_ssize_t PyTuple_Size(PyObject *p) - - Take a pointer to a tuple object, and return the size of that tuple. -+ On error, return ``-1`` and with an exception set. - - - .. c:function:: Py_ssize_t PyTuple_GET_SIZE(PyObject *p) - -- Return the size of the tuple *p*, which must be non-``NULL`` and point to a tuple; -- no error checking is performed. -+ Like :c:func:`PyTuple_Size`, but without error checking. - - - .. c:function:: PyObject* PyTuple_GetItem(PyObject *p, Py_ssize_t pos) -@@ -74,8 +76,10 @@ - .. c:function:: PyObject* PyTuple_GetSlice(PyObject *p, Py_ssize_t low, Py_ssize_t high) - - Return the slice of the tuple pointed to by *p* between *low* and *high*, -- or ``NULL`` on failure. This is the equivalent of the Python expression -- ``p[low:high]``. Indexing from the end of the tuple is not supported. -+ or ``NULL`` with an exception set on failure. -+ -+ This is the equivalent of the Python expression ``p[low:high]``. -+ Indexing from the end of the tuple is not supported. - - - .. c:function:: int PyTuple_SetItem(PyObject *p, Py_ssize_t pos, PyObject *o) -@@ -132,6 +136,8 @@ - Create a new struct sequence type from the data in *desc*, described below. Instances - of the resulting type can be created with :c:func:`PyStructSequence_New`. - -+ Return ``NULL`` with an exception set on failure. -+ - - .. c:function:: void PyStructSequence_InitType(PyTypeObject *type, PyStructSequence_Desc *desc) - -@@ -140,8 +146,8 @@ - - .. c:function:: int PyStructSequence_InitType2(PyTypeObject *type, PyStructSequence_Desc *desc) - -- The same as ``PyStructSequence_InitType``, but returns ``0`` on success and ``-1`` on -- failure. -+ Like :c:func:`PyStructSequence_InitType`, but returns ``0`` on success -+ and ``-1`` with an exception set on failure. - - .. versionadded:: 3.4 - -@@ -198,6 +204,8 @@ - Creates an instance of *type*, which must have been created with - :c:func:`PyStructSequence_NewType`. - -+ Return ``NULL`` with an exception set on failure. -+ - - .. c:function:: PyObject* PyStructSequence_GetItem(PyObject *p, Py_ssize_t pos) - -diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst -index f6d865f2f52..90896572046 100644 ---- a/Doc/c-api/typeobj.rst -+++ b/Doc/c-api/typeobj.rst -@@ -1584,7 +1584,7 @@ - weak references to the type object itself. - - It is an error to set both the :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` bit and -- :c:member:`~PyTypeObject.tp_weaklist`. -+ :c:member:`~PyTypeObject.tp_weaklistoffset`. - - **Inheritance:** - -@@ -1596,7 +1596,7 @@ - **Default:** - - If the :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` bit is set in the -- :c:member:`~PyTypeObject.tp_dict` field, then -+ :c:member:`~PyTypeObject.tp_flags` field, then - :c:member:`~PyTypeObject.tp_weaklistoffset` will be set to a negative value, - to indicate that it is unsafe to use this field. - -diff --git a/Doc/conf.py b/Doc/conf.py -index e292bdd5003..319cdf60790 100644 ---- a/Doc/conf.py -+++ b/Doc/conf.py -@@ -6,9 +6,11 @@ - # The contents of this file are pickled, so don't put values in the namespace - # that aren't pickleable (module imports are okay, they're removed automatically). - -+import importlib - import os - import sys - import time -+ - sys.path.append(os.path.abspath('tools/extensions')) - sys.path.append(os.path.abspath('includes')) - -@@ -18,11 +20,10 @@ - # --------------------- - - extensions = [ -- 'asdl_highlight', -+ 'audit_events', - 'c_annotations', -- 'escape4chm', - 'glossary_search', -- 'peg_highlight', -+ 'lexers', - 'pyspecific', - 'sphinx.ext.coverage', - 'sphinx.ext.doctest', -@@ -31,7 +32,7 @@ - - # Skip if downstream redistributors haven't installed it - try: -- import sphinxext.opengraph -+ import sphinxext.opengraph # noqa: F401 - except ImportError: - pass - else: -@@ -58,8 +59,8 @@ - - # We look for the Include/patchlevel.h file in the current Python source tree - # and replace the values accordingly. --import patchlevel --version, release = patchlevel.get_version_info() -+# See Doc/tools/extensions/patchlevel.py -+version, release = importlib.import_module('patchlevel').get_version_info() - - rst_epilog = f""" - .. |python_version_literal| replace:: ``Python {version}`` -@@ -75,7 +76,7 @@ - highlight_language = 'python3' - - # Minimum version of sphinx required --needs_sphinx = '4.2' -+needs_sphinx = '6.2.1' - - # Create table of contents entries for domain objects (e.g. functions, classes, - # attributes, etc.). Default is True. -@@ -256,6 +257,9 @@ - ('c:data', 'PyExc_UnicodeWarning'), - ('c:data', 'PyExc_UserWarning'), - ('c:data', 'PyExc_Warning'), -+ # Undocumented public C macros -+ ('c:macro', 'Py_BUILD_ASSERT'), -+ ('c:macro', 'Py_BUILD_ASSERT_EXPR'), - # Do not error nit-picky mode builds when _SubParsersAction.add_parser cannot - # be resolved, as the method is currently undocumented. For context, see - # https://github.com/python/cpython/pull/103289. -@@ -280,7 +284,8 @@ - - # Disable Docutils smartquotes for several translations - smartquotes_excludes = { -- 'languages': ['ja', 'fr', 'zh_TW', 'zh_CN'], 'builders': ['man', 'text'], -+ 'languages': ['ja', 'fr', 'zh_TW', 'zh_CN'], -+ 'builders': ['man', 'text'], - } - - # Avoid a warning with Sphinx >= 4.0 -@@ -289,6 +294,7 @@ - # Allow translation of index directives - gettext_additional_targets = [ - 'index', -+ 'literal-block', - ] - - # Options for HTML output -@@ -301,11 +307,13 @@ - 'collapsiblesidebar': True, - 'issues_url': '/bugs.html', - 'license_url': '/license.html', -- 'root_include_title': False # We use the version switcher instead. -+ 'root_include_title': False, # We use the version switcher instead. - } - - if os.getenv("READTHEDOCS"): -- html_theme_options["hosted_on"] = 'Read the Docs' -+ html_theme_options["hosted_on"] = ( -+ 'Read the Docs' -+ ) - - # Override stylesheet fingerprinting for Windows CHM htmlhelp to fix GH-91207 - # https://github.com/python/cpython/issues/91207 -@@ -319,15 +327,21 @@ - - # Deployment preview information - # (See .readthedocs.yml and https://docs.readthedocs.io/en/stable/reference/environment-variables.html) --repository_url = os.getenv("READTHEDOCS_GIT_CLONE_URL") -+is_deployment_preview = os.getenv("READTHEDOCS_VERSION_TYPE") == "external" -+repository_url = os.getenv("READTHEDOCS_GIT_CLONE_URL", "") -+repository_url = repository_url.removesuffix(".git") - html_context = { -- "is_deployment_preview": os.getenv("READTHEDOCS_VERSION_TYPE") == "external", -- "repository_url": repository_url.removesuffix(".git") if repository_url else None, -- "pr_id": os.getenv("READTHEDOCS_VERSION") -+ "is_deployment_preview": is_deployment_preview, -+ "repository_url": repository_url or None, -+ "pr_id": os.getenv("READTHEDOCS_VERSION"), -+ "enable_analytics": os.getenv("PYTHON_DOCS_ENABLE_ANALYTICS"), - } - - # This 'Last updated on:' timestamp is inserted at the bottom of every page. --html_last_updated_fmt = time.strftime('%b %d, %Y (%H:%M UTC)', time.gmtime()) -+html_time = int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) -+html_last_updated_fmt = time.strftime( -+ '%b %d, %Y (%H:%M UTC)', time.gmtime(html_time) -+) - - # Path to find HTML templates. - templates_path = ['tools/templates'] -@@ -387,30 +401,70 @@ - # (source start file, target name, title, author, document class [howto/manual]). - _stdauthor = 'Guido van Rossum and the Python development team' - latex_documents = [ -- ('c-api/index', 'c-api.tex', -- 'The Python/C API', _stdauthor, 'manual'), -- ('extending/index', 'extending.tex', -- 'Extending and Embedding Python', _stdauthor, 'manual'), -- ('installing/index', 'installing.tex', -- 'Installing Python Modules', _stdauthor, 'manual'), -- ('library/index', 'library.tex', -- 'The Python Library Reference', _stdauthor, 'manual'), -- ('reference/index', 'reference.tex', -- 'The Python Language Reference', _stdauthor, 'manual'), -- ('tutorial/index', 'tutorial.tex', -- 'Python Tutorial', _stdauthor, 'manual'), -- ('using/index', 'using.tex', -- 'Python Setup and Usage', _stdauthor, 'manual'), -- ('faq/index', 'faq.tex', -- 'Python Frequently Asked Questions', _stdauthor, 'manual'), -- ('whatsnew/' + version, 'whatsnew.tex', -- 'What\'s New in Python', 'A. M. Kuchling', 'howto'), -+ ('c-api/index', 'c-api.tex', 'The Python/C API', _stdauthor, 'manual'), -+ ( -+ 'extending/index', -+ 'extending.tex', -+ 'Extending and Embedding Python', -+ _stdauthor, -+ 'manual', -+ ), -+ ( -+ 'installing/index', -+ 'installing.tex', -+ 'Installing Python Modules', -+ _stdauthor, -+ 'manual', -+ ), -+ ( -+ 'library/index', -+ 'library.tex', -+ 'The Python Library Reference', -+ _stdauthor, -+ 'manual', -+ ), -+ ( -+ 'reference/index', -+ 'reference.tex', -+ 'The Python Language Reference', -+ _stdauthor, -+ 'manual', -+ ), -+ ( -+ 'tutorial/index', -+ 'tutorial.tex', -+ 'Python Tutorial', -+ _stdauthor, -+ 'manual', -+ ), -+ ( -+ 'using/index', -+ 'using.tex', -+ 'Python Setup and Usage', -+ _stdauthor, -+ 'manual', -+ ), -+ ( -+ 'faq/index', -+ 'faq.tex', -+ 'Python Frequently Asked Questions', -+ _stdauthor, -+ 'manual', -+ ), -+ ( -+ 'whatsnew/' + version, -+ 'whatsnew.tex', -+ 'What\'s New in Python', -+ 'A. M. Kuchling', -+ 'howto', -+ ), - ] - # Collect all HOWTOs individually --latex_documents.extend(('howto/' + fn[:-4], 'howto-' + fn[:-4] + '.tex', -- '', _stdauthor, 'howto') -- for fn in os.listdir('howto') -- if fn.endswith('.rst') and fn != 'index.rst') -+latex_documents.extend( -+ ('howto/' + fn[:-4], 'howto-' + fn[:-4] + '.tex', '', _stdauthor, 'howto') -+ for fn in os.listdir('howto') -+ if fn.endswith('.rst') and fn != 'index.rst' -+) - - # Documents to append as an appendix to all manuals. - latex_appendices = ['glossary', 'about', 'license', 'copyright'] -@@ -439,8 +493,7 @@ - 'test($|_)', - ] - --coverage_ignore_classes = [ --] -+coverage_ignore_classes = [] - - # Glob patterns for C source files for C API coverage, relative to this directory. - coverage_c_path = [ -@@ -457,7 +510,7 @@ - # The coverage checker will ignore all C items whose names match these regexes - # (using re.match) -- the keys must be the same as in coverage_c_regexes. - coverage_ignore_c_items = { --# 'cfunction': [...] -+ # 'cfunction': [...] - } - - -@@ -522,14 +575,16 @@ - } - extlinks_detect_hardcoded_links = True - --# Options for extensions --# ---------------------- -+# Options for c_annotations -+# ------------------------- - - # Relative filename of the data files - refcount_file = 'data/refcounts.dat' - stable_abi_file = 'data/stable_abi.dat' - --# sphinxext-opengraph config -+# Options for sphinxext-opengraph -+# ------------------------------- -+ - ogp_site_url = 'https://docs.python.org/3/' - ogp_site_name = 'Python documentation' - ogp_image = '_static/og-image.png' -diff --git a/Doc/constraints.txt b/Doc/constraints.txt -index 16b735ea07a..26ac1862dba 100644 ---- a/Doc/constraints.txt -+++ b/Doc/constraints.txt -@@ -7,18 +7,20 @@ - # Direct dependencies of Sphinx - babel<3 - colorama<0.5 --imagesize<1.5 --Jinja2<3.2 --packaging<24 --Pygments>=2.16.1,<3 -+imagesize<2 -+Jinja2<4 -+packaging<25 -+Pygments<3 - requests<3 - snowballstemmer<3 --sphinxcontrib-applehelp<1.0.5 --sphinxcontrib-devhelp<1.0.6 --sphinxcontrib-htmlhelp<2.0.5 --sphinxcontrib-jsmath<1.1 --sphinxcontrib-qthelp<1.0.7 --sphinxcontrib-serializinghtml<1.1.10 -+# keep lower-bounds until Sphinx 8.1 is released -+# https://github.com/sphinx-doc/sphinx/pull/12756 -+sphinxcontrib-applehelp>=1.0.7,<3 -+sphinxcontrib-devhelp>=1.0.6,<3 -+sphinxcontrib-htmlhelp>=2.0.6,<3 -+sphinxcontrib-jsmath>=1.0.1,<2 -+sphinxcontrib-qthelp>=1.0.6,<3 -+sphinxcontrib-serializinghtml>=1.1.9,<3 - - # Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above) --MarkupSafe<2.2 -+MarkupSafe<3 -diff --git a/Doc/contents.rst b/Doc/contents.rst -index 24ceacb0076..b57f4b09a5d 100644 ---- a/Doc/contents.rst -+++ b/Doc/contents.rst -@@ -14,6 +14,7 @@ - installing/index.rst - howto/index.rst - faq/index.rst -+ deprecations/index.rst - glossary.rst - - about.rst -diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat -index f112d268129..4aa2b35162d 100644 ---- a/Doc/data/stable_abi.dat -+++ b/Doc/data/stable_abi.dat -@@ -1,868 +1,868 @@ - role,name,added,ifdef_note,struct_abi_kind - macro,PY_VECTORCALL_ARGUMENTS_OFFSET,3.12,, --function,PyAIter_Check,3.10,, --function,PyArg_Parse,3.2,, --function,PyArg_ParseTuple,3.2,, --function,PyArg_ParseTupleAndKeywords,3.2,, --function,PyArg_UnpackTuple,3.2,, --function,PyArg_VaParse,3.2,, --function,PyArg_VaParseTupleAndKeywords,3.2,, --function,PyArg_ValidateKeywordArguments,3.2,, --var,PyBaseObject_Type,3.2,, --function,PyBool_FromLong,3.2,, --var,PyBool_Type,3.2,, --function,PyBuffer_FillContiguousStrides,3.11,, --function,PyBuffer_FillInfo,3.11,, --function,PyBuffer_FromContiguous,3.11,, --function,PyBuffer_GetPointer,3.11,, --function,PyBuffer_IsContiguous,3.11,, --function,PyBuffer_Release,3.11,, --function,PyBuffer_SizeFromFormat,3.11,, --function,PyBuffer_ToContiguous,3.11,, --var,PyByteArrayIter_Type,3.2,, --function,PyByteArray_AsString,3.2,, --function,PyByteArray_Concat,3.2,, --function,PyByteArray_FromObject,3.2,, --function,PyByteArray_FromStringAndSize,3.2,, --function,PyByteArray_Resize,3.2,, --function,PyByteArray_Size,3.2,, --var,PyByteArray_Type,3.2,, --var,PyBytesIter_Type,3.2,, --function,PyBytes_AsString,3.2,, --function,PyBytes_AsStringAndSize,3.2,, --function,PyBytes_Concat,3.2,, --function,PyBytes_ConcatAndDel,3.2,, --function,PyBytes_DecodeEscape,3.2,, --function,PyBytes_FromFormat,3.2,, --function,PyBytes_FromFormatV,3.2,, --function,PyBytes_FromObject,3.2,, --function,PyBytes_FromString,3.2,, --function,PyBytes_FromStringAndSize,3.2,, --function,PyBytes_Repr,3.2,, --function,PyBytes_Size,3.2,, --var,PyBytes_Type,3.2,, -+func,PyAIter_Check,3.10,, -+func,PyArg_Parse,3.2,, -+func,PyArg_ParseTuple,3.2,, -+func,PyArg_ParseTupleAndKeywords,3.2,, -+func,PyArg_UnpackTuple,3.2,, -+func,PyArg_VaParse,3.2,, -+func,PyArg_VaParseTupleAndKeywords,3.2,, -+func,PyArg_ValidateKeywordArguments,3.2,, -+data,PyBaseObject_Type,3.2,, -+func,PyBool_FromLong,3.2,, -+data,PyBool_Type,3.2,, -+func,PyBuffer_FillContiguousStrides,3.11,, -+func,PyBuffer_FillInfo,3.11,, -+func,PyBuffer_FromContiguous,3.11,, -+func,PyBuffer_GetPointer,3.11,, -+func,PyBuffer_IsContiguous,3.11,, -+func,PyBuffer_Release,3.11,, -+func,PyBuffer_SizeFromFormat,3.11,, -+func,PyBuffer_ToContiguous,3.11,, -+data,PyByteArrayIter_Type,3.2,, -+func,PyByteArray_AsString,3.2,, -+func,PyByteArray_Concat,3.2,, -+func,PyByteArray_FromObject,3.2,, -+func,PyByteArray_FromStringAndSize,3.2,, -+func,PyByteArray_Resize,3.2,, -+func,PyByteArray_Size,3.2,, -+data,PyByteArray_Type,3.2,, -+data,PyBytesIter_Type,3.2,, -+func,PyBytes_AsString,3.2,, -+func,PyBytes_AsStringAndSize,3.2,, -+func,PyBytes_Concat,3.2,, -+func,PyBytes_ConcatAndDel,3.2,, -+func,PyBytes_DecodeEscape,3.2,, -+func,PyBytes_FromFormat,3.2,, -+func,PyBytes_FromFormatV,3.2,, -+func,PyBytes_FromObject,3.2,, -+func,PyBytes_FromString,3.2,, -+func,PyBytes_FromStringAndSize,3.2,, -+func,PyBytes_Repr,3.2,, -+func,PyBytes_Size,3.2,, -+data,PyBytes_Type,3.2,, - type,PyCFunction,3.2,, - type,PyCFunctionWithKeywords,3.2,, --function,PyCFunction_Call,3.2,, --function,PyCFunction_GetFlags,3.2,, --function,PyCFunction_GetFunction,3.2,, --function,PyCFunction_GetSelf,3.2,, --function,PyCFunction_New,3.4,, --function,PyCFunction_NewEx,3.2,, --var,PyCFunction_Type,3.2,, --function,PyCMethod_New,3.9,, --function,PyCallIter_New,3.2,, --var,PyCallIter_Type,3.2,, --function,PyCallable_Check,3.2,, -+func,PyCFunction_Call,3.2,, -+func,PyCFunction_GetFlags,3.2,, -+func,PyCFunction_GetFunction,3.2,, -+func,PyCFunction_GetSelf,3.2,, -+func,PyCFunction_New,3.4,, -+func,PyCFunction_NewEx,3.2,, -+data,PyCFunction_Type,3.2,, -+func,PyCMethod_New,3.9,, -+func,PyCallIter_New,3.2,, -+data,PyCallIter_Type,3.2,, -+func,PyCallable_Check,3.2,, - type,PyCapsule_Destructor,3.2,, --function,PyCapsule_GetContext,3.2,, --function,PyCapsule_GetDestructor,3.2,, --function,PyCapsule_GetName,3.2,, --function,PyCapsule_GetPointer,3.2,, --function,PyCapsule_Import,3.2,, --function,PyCapsule_IsValid,3.2,, --function,PyCapsule_New,3.2,, --function,PyCapsule_SetContext,3.2,, --function,PyCapsule_SetDestructor,3.2,, --function,PyCapsule_SetName,3.2,, --function,PyCapsule_SetPointer,3.2,, --var,PyCapsule_Type,3.2,, --var,PyClassMethodDescr_Type,3.2,, --function,PyCodec_BackslashReplaceErrors,3.2,, --function,PyCodec_Decode,3.2,, --function,PyCodec_Decoder,3.2,, --function,PyCodec_Encode,3.2,, --function,PyCodec_Encoder,3.2,, --function,PyCodec_IgnoreErrors,3.2,, --function,PyCodec_IncrementalDecoder,3.2,, --function,PyCodec_IncrementalEncoder,3.2,, --function,PyCodec_KnownEncoding,3.2,, --function,PyCodec_LookupError,3.2,, --function,PyCodec_NameReplaceErrors,3.7,, --function,PyCodec_Register,3.2,, --function,PyCodec_RegisterError,3.2,, --function,PyCodec_ReplaceErrors,3.2,, --function,PyCodec_StreamReader,3.2,, --function,PyCodec_StreamWriter,3.2,, --function,PyCodec_StrictErrors,3.2,, --function,PyCodec_Unregister,3.10,, --function,PyCodec_XMLCharRefReplaceErrors,3.2,, --function,PyComplex_FromDoubles,3.2,, --function,PyComplex_ImagAsDouble,3.2,, --function,PyComplex_RealAsDouble,3.2,, --var,PyComplex_Type,3.2,, --function,PyDescr_NewClassMethod,3.2,, --function,PyDescr_NewGetSet,3.2,, --function,PyDescr_NewMember,3.2,, --function,PyDescr_NewMethod,3.2,, --var,PyDictItems_Type,3.2,, --var,PyDictIterItem_Type,3.2,, --var,PyDictIterKey_Type,3.2,, --var,PyDictIterValue_Type,3.2,, --var,PyDictKeys_Type,3.2,, --function,PyDictProxy_New,3.2,, --var,PyDictProxy_Type,3.2,, --var,PyDictRevIterItem_Type,3.8,, --var,PyDictRevIterKey_Type,3.8,, --var,PyDictRevIterValue_Type,3.8,, --var,PyDictValues_Type,3.2,, --function,PyDict_Clear,3.2,, --function,PyDict_Contains,3.2,, --function,PyDict_Copy,3.2,, --function,PyDict_DelItem,3.2,, --function,PyDict_DelItemString,3.2,, --function,PyDict_GetItem,3.2,, --function,PyDict_GetItemString,3.2,, --function,PyDict_GetItemWithError,3.2,, --function,PyDict_Items,3.2,, --function,PyDict_Keys,3.2,, --function,PyDict_Merge,3.2,, --function,PyDict_MergeFromSeq2,3.2,, --function,PyDict_New,3.2,, --function,PyDict_Next,3.2,, --function,PyDict_SetItem,3.2,, --function,PyDict_SetItemString,3.2,, --function,PyDict_Size,3.2,, --var,PyDict_Type,3.2,, --function,PyDict_Update,3.2,, --function,PyDict_Values,3.2,, --var,PyEllipsis_Type,3.2,, --var,PyEnum_Type,3.2,, --function,PyErr_BadArgument,3.2,, --function,PyErr_BadInternalCall,3.2,, --function,PyErr_CheckSignals,3.2,, --function,PyErr_Clear,3.2,, --function,PyErr_Display,3.2,, --function,PyErr_DisplayException,3.12,, --function,PyErr_ExceptionMatches,3.2,, --function,PyErr_Fetch,3.2,, --function,PyErr_Format,3.2,, --function,PyErr_FormatV,3.5,, --function,PyErr_GetExcInfo,3.7,, --function,PyErr_GetHandledException,3.11,, --function,PyErr_GetRaisedException,3.12,, --function,PyErr_GivenExceptionMatches,3.2,, --function,PyErr_NewException,3.2,, --function,PyErr_NewExceptionWithDoc,3.2,, --function,PyErr_NoMemory,3.2,, --function,PyErr_NormalizeException,3.2,, --function,PyErr_Occurred,3.2,, --function,PyErr_Print,3.2,, --function,PyErr_PrintEx,3.2,, --function,PyErr_ProgramText,3.2,, --function,PyErr_ResourceWarning,3.6,, --function,PyErr_Restore,3.2,, --function,PyErr_SetExcFromWindowsErr,3.7,on Windows, --function,PyErr_SetExcFromWindowsErrWithFilename,3.7,on Windows, --function,PyErr_SetExcFromWindowsErrWithFilenameObject,3.7,on Windows, --function,PyErr_SetExcFromWindowsErrWithFilenameObjects,3.7,on Windows, --function,PyErr_SetExcInfo,3.7,, --function,PyErr_SetFromErrno,3.2,, --function,PyErr_SetFromErrnoWithFilename,3.2,, --function,PyErr_SetFromErrnoWithFilenameObject,3.2,, --function,PyErr_SetFromErrnoWithFilenameObjects,3.7,, --function,PyErr_SetFromWindowsErr,3.7,on Windows, --function,PyErr_SetFromWindowsErrWithFilename,3.7,on Windows, --function,PyErr_SetHandledException,3.11,, --function,PyErr_SetImportError,3.7,, --function,PyErr_SetImportErrorSubclass,3.6,, --function,PyErr_SetInterrupt,3.2,, --function,PyErr_SetInterruptEx,3.10,, --function,PyErr_SetNone,3.2,, --function,PyErr_SetObject,3.2,, --function,PyErr_SetRaisedException,3.12,, --function,PyErr_SetString,3.2,, --function,PyErr_SyntaxLocation,3.2,, --function,PyErr_SyntaxLocationEx,3.7,, --function,PyErr_WarnEx,3.2,, --function,PyErr_WarnExplicit,3.2,, --function,PyErr_WarnFormat,3.2,, --function,PyErr_WriteUnraisable,3.2,, --function,PyEval_AcquireLock,3.2,, --function,PyEval_AcquireThread,3.2,, --function,PyEval_CallFunction,3.2,, --function,PyEval_CallMethod,3.2,, --function,PyEval_CallObjectWithKeywords,3.2,, --function,PyEval_EvalCode,3.2,, --function,PyEval_EvalCodeEx,3.2,, --function,PyEval_EvalFrame,3.2,, --function,PyEval_EvalFrameEx,3.2,, --function,PyEval_GetBuiltins,3.2,, --function,PyEval_GetFrame,3.2,, --function,PyEval_GetFuncDesc,3.2,, --function,PyEval_GetFuncName,3.2,, --function,PyEval_GetGlobals,3.2,, --function,PyEval_GetLocals,3.2,, --function,PyEval_InitThreads,3.2,, --function,PyEval_ReleaseLock,3.2,, --function,PyEval_ReleaseThread,3.2,, --function,PyEval_RestoreThread,3.2,, --function,PyEval_SaveThread,3.2,, --function,PyEval_ThreadsInitialized,3.2,, --var,PyExc_ArithmeticError,3.2,, --var,PyExc_AssertionError,3.2,, --var,PyExc_AttributeError,3.2,, --var,PyExc_BaseException,3.2,, --var,PyExc_BaseExceptionGroup,3.11,, --var,PyExc_BlockingIOError,3.7,, --var,PyExc_BrokenPipeError,3.7,, --var,PyExc_BufferError,3.2,, --var,PyExc_BytesWarning,3.2,, --var,PyExc_ChildProcessError,3.7,, --var,PyExc_ConnectionAbortedError,3.7,, --var,PyExc_ConnectionError,3.7,, --var,PyExc_ConnectionRefusedError,3.7,, --var,PyExc_ConnectionResetError,3.7,, --var,PyExc_DeprecationWarning,3.2,, --var,PyExc_EOFError,3.2,, --var,PyExc_EncodingWarning,3.10,, --var,PyExc_EnvironmentError,3.2,, --var,PyExc_Exception,3.2,, --var,PyExc_FileExistsError,3.7,, --var,PyExc_FileNotFoundError,3.7,, --var,PyExc_FloatingPointError,3.2,, --var,PyExc_FutureWarning,3.2,, --var,PyExc_GeneratorExit,3.2,, --var,PyExc_IOError,3.2,, --var,PyExc_ImportError,3.2,, --var,PyExc_ImportWarning,3.2,, --var,PyExc_IndentationError,3.2,, --var,PyExc_IndexError,3.2,, --var,PyExc_InterruptedError,3.7,, --var,PyExc_IsADirectoryError,3.7,, --var,PyExc_KeyError,3.2,, --var,PyExc_KeyboardInterrupt,3.2,, --var,PyExc_LookupError,3.2,, --var,PyExc_MemoryError,3.2,, --var,PyExc_ModuleNotFoundError,3.6,, --var,PyExc_NameError,3.2,, --var,PyExc_NotADirectoryError,3.7,, --var,PyExc_NotImplementedError,3.2,, --var,PyExc_OSError,3.2,, --var,PyExc_OverflowError,3.2,, --var,PyExc_PendingDeprecationWarning,3.2,, --var,PyExc_PermissionError,3.7,, --var,PyExc_ProcessLookupError,3.7,, --var,PyExc_RecursionError,3.7,, --var,PyExc_ReferenceError,3.2,, --var,PyExc_ResourceWarning,3.7,, --var,PyExc_RuntimeError,3.2,, --var,PyExc_RuntimeWarning,3.2,, --var,PyExc_StopAsyncIteration,3.7,, --var,PyExc_StopIteration,3.2,, --var,PyExc_SyntaxError,3.2,, --var,PyExc_SyntaxWarning,3.2,, --var,PyExc_SystemError,3.2,, --var,PyExc_SystemExit,3.2,, --var,PyExc_TabError,3.2,, --var,PyExc_TimeoutError,3.7,, --var,PyExc_TypeError,3.2,, --var,PyExc_UnboundLocalError,3.2,, --var,PyExc_UnicodeDecodeError,3.2,, --var,PyExc_UnicodeEncodeError,3.2,, --var,PyExc_UnicodeError,3.2,, --var,PyExc_UnicodeTranslateError,3.2,, --var,PyExc_UnicodeWarning,3.2,, --var,PyExc_UserWarning,3.2,, --var,PyExc_ValueError,3.2,, --var,PyExc_Warning,3.2,, --var,PyExc_WindowsError,3.7,on Windows, --var,PyExc_ZeroDivisionError,3.2,, --function,PyExceptionClass_Name,3.8,, --function,PyException_GetArgs,3.12,, --function,PyException_GetCause,3.2,, --function,PyException_GetContext,3.2,, --function,PyException_GetTraceback,3.2,, --function,PyException_SetArgs,3.12,, --function,PyException_SetCause,3.2,, --function,PyException_SetContext,3.2,, --function,PyException_SetTraceback,3.2,, --function,PyFile_FromFd,3.2,, --function,PyFile_GetLine,3.2,, --function,PyFile_WriteObject,3.2,, --function,PyFile_WriteString,3.2,, --var,PyFilter_Type,3.2,, --function,PyFloat_AsDouble,3.2,, --function,PyFloat_FromDouble,3.2,, --function,PyFloat_FromString,3.2,, --function,PyFloat_GetInfo,3.2,, --function,PyFloat_GetMax,3.2,, --function,PyFloat_GetMin,3.2,, --var,PyFloat_Type,3.2,, -+func,PyCapsule_GetContext,3.2,, -+func,PyCapsule_GetDestructor,3.2,, -+func,PyCapsule_GetName,3.2,, -+func,PyCapsule_GetPointer,3.2,, -+func,PyCapsule_Import,3.2,, -+func,PyCapsule_IsValid,3.2,, -+func,PyCapsule_New,3.2,, -+func,PyCapsule_SetContext,3.2,, -+func,PyCapsule_SetDestructor,3.2,, -+func,PyCapsule_SetName,3.2,, -+func,PyCapsule_SetPointer,3.2,, -+data,PyCapsule_Type,3.2,, -+data,PyClassMethodDescr_Type,3.2,, -+func,PyCodec_BackslashReplaceErrors,3.2,, -+func,PyCodec_Decode,3.2,, -+func,PyCodec_Decoder,3.2,, -+func,PyCodec_Encode,3.2,, -+func,PyCodec_Encoder,3.2,, -+func,PyCodec_IgnoreErrors,3.2,, -+func,PyCodec_IncrementalDecoder,3.2,, -+func,PyCodec_IncrementalEncoder,3.2,, -+func,PyCodec_KnownEncoding,3.2,, -+func,PyCodec_LookupError,3.2,, -+func,PyCodec_NameReplaceErrors,3.7,, -+func,PyCodec_Register,3.2,, -+func,PyCodec_RegisterError,3.2,, -+func,PyCodec_ReplaceErrors,3.2,, -+func,PyCodec_StreamReader,3.2,, -+func,PyCodec_StreamWriter,3.2,, -+func,PyCodec_StrictErrors,3.2,, -+func,PyCodec_Unregister,3.10,, -+func,PyCodec_XMLCharRefReplaceErrors,3.2,, -+func,PyComplex_FromDoubles,3.2,, -+func,PyComplex_ImagAsDouble,3.2,, -+func,PyComplex_RealAsDouble,3.2,, -+data,PyComplex_Type,3.2,, -+func,PyDescr_NewClassMethod,3.2,, -+func,PyDescr_NewGetSet,3.2,, -+func,PyDescr_NewMember,3.2,, -+func,PyDescr_NewMethod,3.2,, -+data,PyDictItems_Type,3.2,, -+data,PyDictIterItem_Type,3.2,, -+data,PyDictIterKey_Type,3.2,, -+data,PyDictIterValue_Type,3.2,, -+data,PyDictKeys_Type,3.2,, -+func,PyDictProxy_New,3.2,, -+data,PyDictProxy_Type,3.2,, -+data,PyDictRevIterItem_Type,3.8,, -+data,PyDictRevIterKey_Type,3.8,, -+data,PyDictRevIterValue_Type,3.8,, -+data,PyDictValues_Type,3.2,, -+func,PyDict_Clear,3.2,, -+func,PyDict_Contains,3.2,, -+func,PyDict_Copy,3.2,, -+func,PyDict_DelItem,3.2,, -+func,PyDict_DelItemString,3.2,, -+func,PyDict_GetItem,3.2,, -+func,PyDict_GetItemString,3.2,, -+func,PyDict_GetItemWithError,3.2,, -+func,PyDict_Items,3.2,, -+func,PyDict_Keys,3.2,, -+func,PyDict_Merge,3.2,, -+func,PyDict_MergeFromSeq2,3.2,, -+func,PyDict_New,3.2,, -+func,PyDict_Next,3.2,, -+func,PyDict_SetItem,3.2,, -+func,PyDict_SetItemString,3.2,, -+func,PyDict_Size,3.2,, -+data,PyDict_Type,3.2,, -+func,PyDict_Update,3.2,, -+func,PyDict_Values,3.2,, -+data,PyEllipsis_Type,3.2,, -+data,PyEnum_Type,3.2,, -+func,PyErr_BadArgument,3.2,, -+func,PyErr_BadInternalCall,3.2,, -+func,PyErr_CheckSignals,3.2,, -+func,PyErr_Clear,3.2,, -+func,PyErr_Display,3.2,, -+func,PyErr_DisplayException,3.12,, -+func,PyErr_ExceptionMatches,3.2,, -+func,PyErr_Fetch,3.2,, -+func,PyErr_Format,3.2,, -+func,PyErr_FormatV,3.5,, -+func,PyErr_GetExcInfo,3.7,, -+func,PyErr_GetHandledException,3.11,, -+func,PyErr_GetRaisedException,3.12,, -+func,PyErr_GivenExceptionMatches,3.2,, -+func,PyErr_NewException,3.2,, -+func,PyErr_NewExceptionWithDoc,3.2,, -+func,PyErr_NoMemory,3.2,, -+func,PyErr_NormalizeException,3.2,, -+func,PyErr_Occurred,3.2,, -+func,PyErr_Print,3.2,, -+func,PyErr_PrintEx,3.2,, -+func,PyErr_ProgramText,3.2,, -+func,PyErr_ResourceWarning,3.6,, -+func,PyErr_Restore,3.2,, -+func,PyErr_SetExcFromWindowsErr,3.7,on Windows, -+func,PyErr_SetExcFromWindowsErrWithFilename,3.7,on Windows, -+func,PyErr_SetExcFromWindowsErrWithFilenameObject,3.7,on Windows, -+func,PyErr_SetExcFromWindowsErrWithFilenameObjects,3.7,on Windows, -+func,PyErr_SetExcInfo,3.7,, -+func,PyErr_SetFromErrno,3.2,, -+func,PyErr_SetFromErrnoWithFilename,3.2,, -+func,PyErr_SetFromErrnoWithFilenameObject,3.2,, -+func,PyErr_SetFromErrnoWithFilenameObjects,3.7,, -+func,PyErr_SetFromWindowsErr,3.7,on Windows, -+func,PyErr_SetFromWindowsErrWithFilename,3.7,on Windows, -+func,PyErr_SetHandledException,3.11,, -+func,PyErr_SetImportError,3.7,, -+func,PyErr_SetImportErrorSubclass,3.6,, -+func,PyErr_SetInterrupt,3.2,, -+func,PyErr_SetInterruptEx,3.10,, -+func,PyErr_SetNone,3.2,, -+func,PyErr_SetObject,3.2,, -+func,PyErr_SetRaisedException,3.12,, -+func,PyErr_SetString,3.2,, -+func,PyErr_SyntaxLocation,3.2,, -+func,PyErr_SyntaxLocationEx,3.7,, -+func,PyErr_WarnEx,3.2,, -+func,PyErr_WarnExplicit,3.2,, -+func,PyErr_WarnFormat,3.2,, -+func,PyErr_WriteUnraisable,3.2,, -+func,PyEval_AcquireLock,3.2,, -+func,PyEval_AcquireThread,3.2,, -+func,PyEval_CallFunction,3.2,, -+func,PyEval_CallMethod,3.2,, -+func,PyEval_CallObjectWithKeywords,3.2,, -+func,PyEval_EvalCode,3.2,, -+func,PyEval_EvalCodeEx,3.2,, -+func,PyEval_EvalFrame,3.2,, -+func,PyEval_EvalFrameEx,3.2,, -+func,PyEval_GetBuiltins,3.2,, -+func,PyEval_GetFrame,3.2,, -+func,PyEval_GetFuncDesc,3.2,, -+func,PyEval_GetFuncName,3.2,, -+func,PyEval_GetGlobals,3.2,, -+func,PyEval_GetLocals,3.2,, -+func,PyEval_InitThreads,3.2,, -+func,PyEval_ReleaseLock,3.2,, -+func,PyEval_ReleaseThread,3.2,, -+func,PyEval_RestoreThread,3.2,, -+func,PyEval_SaveThread,3.2,, -+func,PyEval_ThreadsInitialized,3.2,, -+data,PyExc_ArithmeticError,3.2,, -+data,PyExc_AssertionError,3.2,, -+data,PyExc_AttributeError,3.2,, -+data,PyExc_BaseException,3.2,, -+data,PyExc_BaseExceptionGroup,3.11,, -+data,PyExc_BlockingIOError,3.7,, -+data,PyExc_BrokenPipeError,3.7,, -+data,PyExc_BufferError,3.2,, -+data,PyExc_BytesWarning,3.2,, -+data,PyExc_ChildProcessError,3.7,, -+data,PyExc_ConnectionAbortedError,3.7,, -+data,PyExc_ConnectionError,3.7,, -+data,PyExc_ConnectionRefusedError,3.7,, -+data,PyExc_ConnectionResetError,3.7,, -+data,PyExc_DeprecationWarning,3.2,, -+data,PyExc_EOFError,3.2,, -+data,PyExc_EncodingWarning,3.10,, -+data,PyExc_EnvironmentError,3.2,, -+data,PyExc_Exception,3.2,, -+data,PyExc_FileExistsError,3.7,, -+data,PyExc_FileNotFoundError,3.7,, -+data,PyExc_FloatingPointError,3.2,, -+data,PyExc_FutureWarning,3.2,, -+data,PyExc_GeneratorExit,3.2,, -+data,PyExc_IOError,3.2,, -+data,PyExc_ImportError,3.2,, -+data,PyExc_ImportWarning,3.2,, -+data,PyExc_IndentationError,3.2,, -+data,PyExc_IndexError,3.2,, -+data,PyExc_InterruptedError,3.7,, -+data,PyExc_IsADirectoryError,3.7,, -+data,PyExc_KeyError,3.2,, -+data,PyExc_KeyboardInterrupt,3.2,, -+data,PyExc_LookupError,3.2,, -+data,PyExc_MemoryError,3.2,, -+data,PyExc_ModuleNotFoundError,3.6,, -+data,PyExc_NameError,3.2,, -+data,PyExc_NotADirectoryError,3.7,, -+data,PyExc_NotImplementedError,3.2,, -+data,PyExc_OSError,3.2,, -+data,PyExc_OverflowError,3.2,, -+data,PyExc_PendingDeprecationWarning,3.2,, -+data,PyExc_PermissionError,3.7,, -+data,PyExc_ProcessLookupError,3.7,, -+data,PyExc_RecursionError,3.7,, -+data,PyExc_ReferenceError,3.2,, -+data,PyExc_ResourceWarning,3.7,, -+data,PyExc_RuntimeError,3.2,, -+data,PyExc_RuntimeWarning,3.2,, -+data,PyExc_StopAsyncIteration,3.7,, -+data,PyExc_StopIteration,3.2,, -+data,PyExc_SyntaxError,3.2,, -+data,PyExc_SyntaxWarning,3.2,, -+data,PyExc_SystemError,3.2,, -+data,PyExc_SystemExit,3.2,, -+data,PyExc_TabError,3.2,, -+data,PyExc_TimeoutError,3.7,, -+data,PyExc_TypeError,3.2,, -+data,PyExc_UnboundLocalError,3.2,, -+data,PyExc_UnicodeDecodeError,3.2,, -+data,PyExc_UnicodeEncodeError,3.2,, -+data,PyExc_UnicodeError,3.2,, -+data,PyExc_UnicodeTranslateError,3.2,, -+data,PyExc_UnicodeWarning,3.2,, -+data,PyExc_UserWarning,3.2,, -+data,PyExc_ValueError,3.2,, -+data,PyExc_Warning,3.2,, -+data,PyExc_WindowsError,3.7,on Windows, -+data,PyExc_ZeroDivisionError,3.2,, -+func,PyExceptionClass_Name,3.8,, -+func,PyException_GetArgs,3.12,, -+func,PyException_GetCause,3.2,, -+func,PyException_GetContext,3.2,, -+func,PyException_GetTraceback,3.2,, -+func,PyException_SetArgs,3.12,, -+func,PyException_SetCause,3.2,, -+func,PyException_SetContext,3.2,, -+func,PyException_SetTraceback,3.2,, -+func,PyFile_FromFd,3.2,, -+func,PyFile_GetLine,3.2,, -+func,PyFile_WriteObject,3.2,, -+func,PyFile_WriteString,3.2,, -+data,PyFilter_Type,3.2,, -+func,PyFloat_AsDouble,3.2,, -+func,PyFloat_FromDouble,3.2,, -+func,PyFloat_FromString,3.2,, -+func,PyFloat_GetInfo,3.2,, -+func,PyFloat_GetMax,3.2,, -+func,PyFloat_GetMin,3.2,, -+data,PyFloat_Type,3.2,, - type,PyFrameObject,3.2,,opaque --function,PyFrame_GetCode,3.10,, --function,PyFrame_GetLineNumber,3.10,, --function,PyFrozenSet_New,3.2,, --var,PyFrozenSet_Type,3.2,, --function,PyGC_Collect,3.2,, --function,PyGC_Disable,3.10,, --function,PyGC_Enable,3.10,, --function,PyGC_IsEnabled,3.10,, --function,PyGILState_Ensure,3.2,, --function,PyGILState_GetThisThreadState,3.2,, --function,PyGILState_Release,3.2,, -+func,PyFrame_GetCode,3.10,, -+func,PyFrame_GetLineNumber,3.10,, -+func,PyFrozenSet_New,3.2,, -+data,PyFrozenSet_Type,3.2,, -+func,PyGC_Collect,3.2,, -+func,PyGC_Disable,3.10,, -+func,PyGC_Enable,3.10,, -+func,PyGC_IsEnabled,3.10,, -+func,PyGILState_Ensure,3.2,, -+func,PyGILState_GetThisThreadState,3.2,, -+func,PyGILState_Release,3.2,, - type,PyGILState_STATE,3.2,, - type,PyGetSetDef,3.2,,full-abi --var,PyGetSetDescr_Type,3.2,, --function,PyImport_AddModule,3.2,, --function,PyImport_AddModuleObject,3.7,, --function,PyImport_AppendInittab,3.2,, --function,PyImport_ExecCodeModule,3.2,, --function,PyImport_ExecCodeModuleEx,3.2,, --function,PyImport_ExecCodeModuleObject,3.7,, --function,PyImport_ExecCodeModuleWithPathnames,3.2,, --function,PyImport_GetImporter,3.2,, --function,PyImport_GetMagicNumber,3.2,, --function,PyImport_GetMagicTag,3.2,, --function,PyImport_GetModule,3.8,, --function,PyImport_GetModuleDict,3.2,, --function,PyImport_Import,3.2,, --function,PyImport_ImportFrozenModule,3.2,, --function,PyImport_ImportFrozenModuleObject,3.7,, --function,PyImport_ImportModule,3.2,, --function,PyImport_ImportModuleLevel,3.2,, --function,PyImport_ImportModuleLevelObject,3.7,, --function,PyImport_ImportModuleNoBlock,3.2,, --function,PyImport_ReloadModule,3.2,, --function,PyIndex_Check,3.8,, -+data,PyGetSetDescr_Type,3.2,, -+func,PyImport_AddModule,3.2,, -+func,PyImport_AddModuleObject,3.7,, -+func,PyImport_AppendInittab,3.2,, -+func,PyImport_ExecCodeModule,3.2,, -+func,PyImport_ExecCodeModuleEx,3.2,, -+func,PyImport_ExecCodeModuleObject,3.7,, -+func,PyImport_ExecCodeModuleWithPathnames,3.2,, -+func,PyImport_GetImporter,3.2,, -+func,PyImport_GetMagicNumber,3.2,, -+func,PyImport_GetMagicTag,3.2,, -+func,PyImport_GetModule,3.8,, -+func,PyImport_GetModuleDict,3.2,, -+func,PyImport_Import,3.2,, -+func,PyImport_ImportFrozenModule,3.2,, -+func,PyImport_ImportFrozenModuleObject,3.7,, -+func,PyImport_ImportModule,3.2,, -+func,PyImport_ImportModuleLevel,3.2,, -+func,PyImport_ImportModuleLevelObject,3.7,, -+func,PyImport_ImportModuleNoBlock,3.2,, -+func,PyImport_ReloadModule,3.2,, -+func,PyIndex_Check,3.8,, - type,PyInterpreterState,3.2,,opaque --function,PyInterpreterState_Clear,3.2,, --function,PyInterpreterState_Delete,3.2,, --function,PyInterpreterState_Get,3.9,, --function,PyInterpreterState_GetDict,3.8,, --function,PyInterpreterState_GetID,3.7,, --function,PyInterpreterState_New,3.2,, --function,PyIter_Check,3.8,, --function,PyIter_Next,3.2,, --function,PyIter_Send,3.10,, --var,PyListIter_Type,3.2,, --var,PyListRevIter_Type,3.2,, --function,PyList_Append,3.2,, --function,PyList_AsTuple,3.2,, --function,PyList_GetItem,3.2,, --function,PyList_GetSlice,3.2,, --function,PyList_Insert,3.2,, --function,PyList_New,3.2,, --function,PyList_Reverse,3.2,, --function,PyList_SetItem,3.2,, --function,PyList_SetSlice,3.2,, --function,PyList_Size,3.2,, --function,PyList_Sort,3.2,, --var,PyList_Type,3.2,, -+func,PyInterpreterState_Clear,3.2,, -+func,PyInterpreterState_Delete,3.2,, -+func,PyInterpreterState_Get,3.9,, -+func,PyInterpreterState_GetDict,3.8,, -+func,PyInterpreterState_GetID,3.7,, -+func,PyInterpreterState_New,3.2,, -+func,PyIter_Check,3.8,, -+func,PyIter_Next,3.2,, -+func,PyIter_Send,3.10,, -+data,PyListIter_Type,3.2,, -+data,PyListRevIter_Type,3.2,, -+func,PyList_Append,3.2,, -+func,PyList_AsTuple,3.2,, -+func,PyList_GetItem,3.2,, -+func,PyList_GetSlice,3.2,, -+func,PyList_Insert,3.2,, -+func,PyList_New,3.2,, -+func,PyList_Reverse,3.2,, -+func,PyList_SetItem,3.2,, -+func,PyList_SetSlice,3.2,, -+func,PyList_Size,3.2,, -+func,PyList_Sort,3.2,, -+data,PyList_Type,3.2,, - type,PyLongObject,3.2,,opaque --var,PyLongRangeIter_Type,3.2,, --function,PyLong_AsDouble,3.2,, --function,PyLong_AsLong,3.2,, --function,PyLong_AsLongAndOverflow,3.2,, --function,PyLong_AsLongLong,3.2,, --function,PyLong_AsLongLongAndOverflow,3.2,, --function,PyLong_AsSize_t,3.2,, --function,PyLong_AsSsize_t,3.2,, --function,PyLong_AsUnsignedLong,3.2,, --function,PyLong_AsUnsignedLongLong,3.2,, --function,PyLong_AsUnsignedLongLongMask,3.2,, --function,PyLong_AsUnsignedLongMask,3.2,, --function,PyLong_AsVoidPtr,3.2,, --function,PyLong_FromDouble,3.2,, --function,PyLong_FromLong,3.2,, --function,PyLong_FromLongLong,3.2,, --function,PyLong_FromSize_t,3.2,, --function,PyLong_FromSsize_t,3.2,, --function,PyLong_FromString,3.2,, --function,PyLong_FromUnsignedLong,3.2,, --function,PyLong_FromUnsignedLongLong,3.2,, --function,PyLong_FromVoidPtr,3.2,, --function,PyLong_GetInfo,3.2,, --var,PyLong_Type,3.2,, --var,PyMap_Type,3.2,, --function,PyMapping_Check,3.2,, --function,PyMapping_GetItemString,3.2,, --function,PyMapping_HasKey,3.2,, --function,PyMapping_HasKeyString,3.2,, --function,PyMapping_Items,3.2,, --function,PyMapping_Keys,3.2,, --function,PyMapping_Length,3.2,, --function,PyMapping_SetItemString,3.2,, --function,PyMapping_Size,3.2,, --function,PyMapping_Values,3.2,, --function,PyMem_Calloc,3.7,, --function,PyMem_Free,3.2,, --function,PyMem_Malloc,3.2,, --function,PyMem_Realloc,3.2,, -+data,PyLongRangeIter_Type,3.2,, -+func,PyLong_AsDouble,3.2,, -+func,PyLong_AsLong,3.2,, -+func,PyLong_AsLongAndOverflow,3.2,, -+func,PyLong_AsLongLong,3.2,, -+func,PyLong_AsLongLongAndOverflow,3.2,, -+func,PyLong_AsSize_t,3.2,, -+func,PyLong_AsSsize_t,3.2,, -+func,PyLong_AsUnsignedLong,3.2,, -+func,PyLong_AsUnsignedLongLong,3.2,, -+func,PyLong_AsUnsignedLongLongMask,3.2,, -+func,PyLong_AsUnsignedLongMask,3.2,, -+func,PyLong_AsVoidPtr,3.2,, -+func,PyLong_FromDouble,3.2,, -+func,PyLong_FromLong,3.2,, -+func,PyLong_FromLongLong,3.2,, -+func,PyLong_FromSize_t,3.2,, -+func,PyLong_FromSsize_t,3.2,, -+func,PyLong_FromString,3.2,, -+func,PyLong_FromUnsignedLong,3.2,, -+func,PyLong_FromUnsignedLongLong,3.2,, -+func,PyLong_FromVoidPtr,3.2,, -+func,PyLong_GetInfo,3.2,, -+data,PyLong_Type,3.2,, -+data,PyMap_Type,3.2,, -+func,PyMapping_Check,3.2,, -+func,PyMapping_GetItemString,3.2,, -+func,PyMapping_HasKey,3.2,, -+func,PyMapping_HasKeyString,3.2,, -+func,PyMapping_Items,3.2,, -+func,PyMapping_Keys,3.2,, -+func,PyMapping_Length,3.2,, -+func,PyMapping_SetItemString,3.2,, -+func,PyMapping_Size,3.2,, -+func,PyMapping_Values,3.2,, -+func,PyMem_Calloc,3.7,, -+func,PyMem_Free,3.2,, -+func,PyMem_Malloc,3.2,, -+func,PyMem_Realloc,3.2,, - type,PyMemberDef,3.2,,full-abi --var,PyMemberDescr_Type,3.2,, --function,PyMember_GetOne,3.2,, --function,PyMember_SetOne,3.2,, --function,PyMemoryView_FromBuffer,3.11,, --function,PyMemoryView_FromMemory,3.7,, --function,PyMemoryView_FromObject,3.2,, --function,PyMemoryView_GetContiguous,3.2,, --var,PyMemoryView_Type,3.2,, -+data,PyMemberDescr_Type,3.2,, -+func,PyMember_GetOne,3.2,, -+func,PyMember_SetOne,3.2,, -+func,PyMemoryView_FromBuffer,3.11,, -+func,PyMemoryView_FromMemory,3.7,, -+func,PyMemoryView_FromObject,3.2,, -+func,PyMemoryView_GetContiguous,3.2,, -+data,PyMemoryView_Type,3.2,, - type,PyMethodDef,3.2,,full-abi --var,PyMethodDescr_Type,3.2,, -+data,PyMethodDescr_Type,3.2,, - type,PyModuleDef,3.2,,full-abi - type,PyModuleDef_Base,3.2,,full-abi --function,PyModuleDef_Init,3.5,, --var,PyModuleDef_Type,3.5,, --function,PyModule_AddFunctions,3.7,, --function,PyModule_AddIntConstant,3.2,, --function,PyModule_AddObject,3.2,, --function,PyModule_AddObjectRef,3.10,, --function,PyModule_AddStringConstant,3.2,, --function,PyModule_AddType,3.10,, --function,PyModule_Create2,3.2,, --function,PyModule_ExecDef,3.7,, --function,PyModule_FromDefAndSpec2,3.7,, --function,PyModule_GetDef,3.2,, --function,PyModule_GetDict,3.2,, --function,PyModule_GetFilename,3.2,, --function,PyModule_GetFilenameObject,3.2,, --function,PyModule_GetName,3.2,, --function,PyModule_GetNameObject,3.7,, --function,PyModule_GetState,3.2,, --function,PyModule_New,3.2,, --function,PyModule_NewObject,3.7,, --function,PyModule_SetDocString,3.7,, --var,PyModule_Type,3.2,, --function,PyNumber_Absolute,3.2,, --function,PyNumber_Add,3.2,, --function,PyNumber_And,3.2,, --function,PyNumber_AsSsize_t,3.2,, --function,PyNumber_Check,3.2,, --function,PyNumber_Divmod,3.2,, --function,PyNumber_Float,3.2,, --function,PyNumber_FloorDivide,3.2,, --function,PyNumber_InPlaceAdd,3.2,, --function,PyNumber_InPlaceAnd,3.2,, --function,PyNumber_InPlaceFloorDivide,3.2,, --function,PyNumber_InPlaceLshift,3.2,, --function,PyNumber_InPlaceMatrixMultiply,3.7,, --function,PyNumber_InPlaceMultiply,3.2,, --function,PyNumber_InPlaceOr,3.2,, --function,PyNumber_InPlacePower,3.2,, --function,PyNumber_InPlaceRemainder,3.2,, --function,PyNumber_InPlaceRshift,3.2,, --function,PyNumber_InPlaceSubtract,3.2,, --function,PyNumber_InPlaceTrueDivide,3.2,, --function,PyNumber_InPlaceXor,3.2,, --function,PyNumber_Index,3.2,, --function,PyNumber_Invert,3.2,, --function,PyNumber_Long,3.2,, --function,PyNumber_Lshift,3.2,, --function,PyNumber_MatrixMultiply,3.7,, --function,PyNumber_Multiply,3.2,, --function,PyNumber_Negative,3.2,, --function,PyNumber_Or,3.2,, --function,PyNumber_Positive,3.2,, --function,PyNumber_Power,3.2,, --function,PyNumber_Remainder,3.2,, --function,PyNumber_Rshift,3.2,, --function,PyNumber_Subtract,3.2,, --function,PyNumber_ToBase,3.2,, --function,PyNumber_TrueDivide,3.2,, --function,PyNumber_Xor,3.2,, --function,PyOS_AfterFork,3.2,on platforms with fork(), --function,PyOS_AfterFork_Child,3.7,on platforms with fork(), --function,PyOS_AfterFork_Parent,3.7,on platforms with fork(), --function,PyOS_BeforeFork,3.7,on platforms with fork(), --function,PyOS_CheckStack,3.7,on platforms with USE_STACKCHECK, --function,PyOS_FSPath,3.6,, --var,PyOS_InputHook,3.2,, --function,PyOS_InterruptOccurred,3.2,, --function,PyOS_double_to_string,3.2,, --function,PyOS_getsig,3.2,, --function,PyOS_mystricmp,3.2,, --function,PyOS_mystrnicmp,3.2,, --function,PyOS_setsig,3.2,, -+func,PyModuleDef_Init,3.5,, -+data,PyModuleDef_Type,3.5,, -+func,PyModule_AddFunctions,3.7,, -+func,PyModule_AddIntConstant,3.2,, -+func,PyModule_AddObject,3.2,, -+func,PyModule_AddObjectRef,3.10,, -+func,PyModule_AddStringConstant,3.2,, -+func,PyModule_AddType,3.10,, -+func,PyModule_Create2,3.2,, -+func,PyModule_ExecDef,3.7,, -+func,PyModule_FromDefAndSpec2,3.7,, -+func,PyModule_GetDef,3.2,, -+func,PyModule_GetDict,3.2,, -+func,PyModule_GetFilename,3.2,, -+func,PyModule_GetFilenameObject,3.2,, -+func,PyModule_GetName,3.2,, -+func,PyModule_GetNameObject,3.7,, -+func,PyModule_GetState,3.2,, -+func,PyModule_New,3.2,, -+func,PyModule_NewObject,3.7,, -+func,PyModule_SetDocString,3.7,, -+data,PyModule_Type,3.2,, -+func,PyNumber_Absolute,3.2,, -+func,PyNumber_Add,3.2,, -+func,PyNumber_And,3.2,, -+func,PyNumber_AsSsize_t,3.2,, -+func,PyNumber_Check,3.2,, -+func,PyNumber_Divmod,3.2,, -+func,PyNumber_Float,3.2,, -+func,PyNumber_FloorDivide,3.2,, -+func,PyNumber_InPlaceAdd,3.2,, -+func,PyNumber_InPlaceAnd,3.2,, -+func,PyNumber_InPlaceFloorDivide,3.2,, -+func,PyNumber_InPlaceLshift,3.2,, -+func,PyNumber_InPlaceMatrixMultiply,3.7,, -+func,PyNumber_InPlaceMultiply,3.2,, -+func,PyNumber_InPlaceOr,3.2,, -+func,PyNumber_InPlacePower,3.2,, -+func,PyNumber_InPlaceRemainder,3.2,, -+func,PyNumber_InPlaceRshift,3.2,, -+func,PyNumber_InPlaceSubtract,3.2,, -+func,PyNumber_InPlaceTrueDivide,3.2,, -+func,PyNumber_InPlaceXor,3.2,, -+func,PyNumber_Index,3.2,, -+func,PyNumber_Invert,3.2,, -+func,PyNumber_Long,3.2,, -+func,PyNumber_Lshift,3.2,, -+func,PyNumber_MatrixMultiply,3.7,, -+func,PyNumber_Multiply,3.2,, -+func,PyNumber_Negative,3.2,, -+func,PyNumber_Or,3.2,, -+func,PyNumber_Positive,3.2,, -+func,PyNumber_Power,3.2,, -+func,PyNumber_Remainder,3.2,, -+func,PyNumber_Rshift,3.2,, -+func,PyNumber_Subtract,3.2,, -+func,PyNumber_ToBase,3.2,, -+func,PyNumber_TrueDivide,3.2,, -+func,PyNumber_Xor,3.2,, -+func,PyOS_AfterFork,3.2,on platforms with fork(), -+func,PyOS_AfterFork_Child,3.7,on platforms with fork(), -+func,PyOS_AfterFork_Parent,3.7,on platforms with fork(), -+func,PyOS_BeforeFork,3.7,on platforms with fork(), -+func,PyOS_CheckStack,3.7,on platforms with USE_STACKCHECK, -+func,PyOS_FSPath,3.6,, -+data,PyOS_InputHook,3.2,, -+func,PyOS_InterruptOccurred,3.2,, -+func,PyOS_double_to_string,3.2,, -+func,PyOS_getsig,3.2,, -+func,PyOS_mystricmp,3.2,, -+func,PyOS_mystrnicmp,3.2,, -+func,PyOS_setsig,3.2,, - type,PyOS_sighandler_t,3.2,, --function,PyOS_snprintf,3.2,, --function,PyOS_string_to_double,3.2,, --function,PyOS_strtol,3.2,, --function,PyOS_strtoul,3.2,, --function,PyOS_vsnprintf,3.2,, -+func,PyOS_snprintf,3.2,, -+func,PyOS_string_to_double,3.2,, -+func,PyOS_strtol,3.2,, -+func,PyOS_strtoul,3.2,, -+func,PyOS_vsnprintf,3.2,, - type,PyObject,3.2,,members - member,PyObject.ob_refcnt,3.2,, - member,PyObject.ob_type,3.2,, --function,PyObject_ASCII,3.2,, --function,PyObject_AsCharBuffer,3.2,, --function,PyObject_AsFileDescriptor,3.2,, --function,PyObject_AsReadBuffer,3.2,, --function,PyObject_AsWriteBuffer,3.2,, --function,PyObject_Bytes,3.2,, --function,PyObject_Call,3.2,, --function,PyObject_CallFunction,3.2,, --function,PyObject_CallFunctionObjArgs,3.2,, --function,PyObject_CallMethod,3.2,, --function,PyObject_CallMethodObjArgs,3.2,, --function,PyObject_CallNoArgs,3.10,, --function,PyObject_CallObject,3.2,, --function,PyObject_Calloc,3.7,, --function,PyObject_CheckBuffer,3.11,, --function,PyObject_CheckReadBuffer,3.2,, --function,PyObject_ClearWeakRefs,3.2,, --function,PyObject_CopyData,3.11,, --function,PyObject_DelItem,3.2,, --function,PyObject_DelItemString,3.2,, --function,PyObject_Dir,3.2,, --function,PyObject_Format,3.2,, --function,PyObject_Free,3.2,, --function,PyObject_GC_Del,3.2,, --function,PyObject_GC_IsFinalized,3.9,, --function,PyObject_GC_IsTracked,3.9,, --function,PyObject_GC_Track,3.2,, --function,PyObject_GC_UnTrack,3.2,, --function,PyObject_GenericGetAttr,3.2,, --function,PyObject_GenericGetDict,3.10,, --function,PyObject_GenericSetAttr,3.2,, --function,PyObject_GenericSetDict,3.7,, --function,PyObject_GetAIter,3.10,, --function,PyObject_GetAttr,3.2,, --function,PyObject_GetAttrString,3.2,, --function,PyObject_GetBuffer,3.11,, --function,PyObject_GetItem,3.2,, --function,PyObject_GetIter,3.2,, --function,PyObject_GetTypeData,3.12,, --function,PyObject_HasAttr,3.2,, --function,PyObject_HasAttrString,3.2,, --function,PyObject_Hash,3.2,, --function,PyObject_HashNotImplemented,3.2,, --function,PyObject_Init,3.2,, --function,PyObject_InitVar,3.2,, --function,PyObject_IsInstance,3.2,, --function,PyObject_IsSubclass,3.2,, --function,PyObject_IsTrue,3.2,, --function,PyObject_Length,3.2,, --function,PyObject_Malloc,3.2,, --function,PyObject_Not,3.2,, --function,PyObject_Realloc,3.2,, --function,PyObject_Repr,3.2,, --function,PyObject_RichCompare,3.2,, --function,PyObject_RichCompareBool,3.2,, --function,PyObject_SelfIter,3.2,, --function,PyObject_SetAttr,3.2,, --function,PyObject_SetAttrString,3.2,, --function,PyObject_SetItem,3.2,, --function,PyObject_Size,3.2,, --function,PyObject_Str,3.2,, --function,PyObject_Type,3.2,, --function,PyObject_Vectorcall,3.12,, --function,PyObject_VectorcallMethod,3.12,, --var,PyProperty_Type,3.2,, --var,PyRangeIter_Type,3.2,, --var,PyRange_Type,3.2,, --var,PyReversed_Type,3.2,, --function,PySeqIter_New,3.2,, --var,PySeqIter_Type,3.2,, --function,PySequence_Check,3.2,, --function,PySequence_Concat,3.2,, --function,PySequence_Contains,3.2,, --function,PySequence_Count,3.2,, --function,PySequence_DelItem,3.2,, --function,PySequence_DelSlice,3.2,, --function,PySequence_Fast,3.2,, --function,PySequence_GetItem,3.2,, --function,PySequence_GetSlice,3.2,, --function,PySequence_In,3.2,, --function,PySequence_InPlaceConcat,3.2,, --function,PySequence_InPlaceRepeat,3.2,, --function,PySequence_Index,3.2,, --function,PySequence_Length,3.2,, --function,PySequence_List,3.2,, --function,PySequence_Repeat,3.2,, --function,PySequence_SetItem,3.2,, --function,PySequence_SetSlice,3.2,, --function,PySequence_Size,3.2,, --function,PySequence_Tuple,3.2,, --var,PySetIter_Type,3.2,, --function,PySet_Add,3.2,, --function,PySet_Clear,3.2,, --function,PySet_Contains,3.2,, --function,PySet_Discard,3.2,, --function,PySet_New,3.2,, --function,PySet_Pop,3.2,, --function,PySet_Size,3.2,, --var,PySet_Type,3.2,, --function,PySlice_AdjustIndices,3.7,, --function,PySlice_GetIndices,3.2,, --function,PySlice_GetIndicesEx,3.2,, --function,PySlice_New,3.2,, --var,PySlice_Type,3.2,, --function,PySlice_Unpack,3.7,, --function,PyState_AddModule,3.3,, --function,PyState_FindModule,3.2,, --function,PyState_RemoveModule,3.3,, -+func,PyObject_ASCII,3.2,, -+func,PyObject_AsCharBuffer,3.2,, -+func,PyObject_AsFileDescriptor,3.2,, -+func,PyObject_AsReadBuffer,3.2,, -+func,PyObject_AsWriteBuffer,3.2,, -+func,PyObject_Bytes,3.2,, -+func,PyObject_Call,3.2,, -+func,PyObject_CallFunction,3.2,, -+func,PyObject_CallFunctionObjArgs,3.2,, -+func,PyObject_CallMethod,3.2,, -+func,PyObject_CallMethodObjArgs,3.2,, -+func,PyObject_CallNoArgs,3.10,, -+func,PyObject_CallObject,3.2,, -+func,PyObject_Calloc,3.7,, -+func,PyObject_CheckBuffer,3.11,, -+func,PyObject_CheckReadBuffer,3.2,, -+func,PyObject_ClearWeakRefs,3.2,, -+func,PyObject_CopyData,3.11,, -+func,PyObject_DelItem,3.2,, -+func,PyObject_DelItemString,3.2,, -+func,PyObject_Dir,3.2,, -+func,PyObject_Format,3.2,, -+func,PyObject_Free,3.2,, -+func,PyObject_GC_Del,3.2,, -+func,PyObject_GC_IsFinalized,3.9,, -+func,PyObject_GC_IsTracked,3.9,, -+func,PyObject_GC_Track,3.2,, -+func,PyObject_GC_UnTrack,3.2,, -+func,PyObject_GenericGetAttr,3.2,, -+func,PyObject_GenericGetDict,3.10,, -+func,PyObject_GenericSetAttr,3.2,, -+func,PyObject_GenericSetDict,3.7,, -+func,PyObject_GetAIter,3.10,, -+func,PyObject_GetAttr,3.2,, -+func,PyObject_GetAttrString,3.2,, -+func,PyObject_GetBuffer,3.11,, -+func,PyObject_GetItem,3.2,, -+func,PyObject_GetIter,3.2,, -+func,PyObject_GetTypeData,3.12,, -+func,PyObject_HasAttr,3.2,, -+func,PyObject_HasAttrString,3.2,, -+func,PyObject_Hash,3.2,, -+func,PyObject_HashNotImplemented,3.2,, -+func,PyObject_Init,3.2,, -+func,PyObject_InitVar,3.2,, -+func,PyObject_IsInstance,3.2,, -+func,PyObject_IsSubclass,3.2,, -+func,PyObject_IsTrue,3.2,, -+func,PyObject_Length,3.2,, -+func,PyObject_Malloc,3.2,, -+func,PyObject_Not,3.2,, -+func,PyObject_Realloc,3.2,, -+func,PyObject_Repr,3.2,, -+func,PyObject_RichCompare,3.2,, -+func,PyObject_RichCompareBool,3.2,, -+func,PyObject_SelfIter,3.2,, -+func,PyObject_SetAttr,3.2,, -+func,PyObject_SetAttrString,3.2,, -+func,PyObject_SetItem,3.2,, -+func,PyObject_Size,3.2,, -+func,PyObject_Str,3.2,, -+func,PyObject_Type,3.2,, -+func,PyObject_Vectorcall,3.12,, -+func,PyObject_VectorcallMethod,3.12,, -+data,PyProperty_Type,3.2,, -+data,PyRangeIter_Type,3.2,, -+data,PyRange_Type,3.2,, -+data,PyReversed_Type,3.2,, -+func,PySeqIter_New,3.2,, -+data,PySeqIter_Type,3.2,, -+func,PySequence_Check,3.2,, -+func,PySequence_Concat,3.2,, -+func,PySequence_Contains,3.2,, -+func,PySequence_Count,3.2,, -+func,PySequence_DelItem,3.2,, -+func,PySequence_DelSlice,3.2,, -+func,PySequence_Fast,3.2,, -+func,PySequence_GetItem,3.2,, -+func,PySequence_GetSlice,3.2,, -+func,PySequence_In,3.2,, -+func,PySequence_InPlaceConcat,3.2,, -+func,PySequence_InPlaceRepeat,3.2,, -+func,PySequence_Index,3.2,, -+func,PySequence_Length,3.2,, -+func,PySequence_List,3.2,, -+func,PySequence_Repeat,3.2,, -+func,PySequence_SetItem,3.2,, -+func,PySequence_SetSlice,3.2,, -+func,PySequence_Size,3.2,, -+func,PySequence_Tuple,3.2,, -+data,PySetIter_Type,3.2,, -+func,PySet_Add,3.2,, -+func,PySet_Clear,3.2,, -+func,PySet_Contains,3.2,, -+func,PySet_Discard,3.2,, -+func,PySet_New,3.2,, -+func,PySet_Pop,3.2,, -+func,PySet_Size,3.2,, -+data,PySet_Type,3.2,, -+func,PySlice_AdjustIndices,3.7,, -+func,PySlice_GetIndices,3.2,, -+func,PySlice_GetIndicesEx,3.2,, -+func,PySlice_New,3.2,, -+data,PySlice_Type,3.2,, -+func,PySlice_Unpack,3.7,, -+func,PyState_AddModule,3.3,, -+func,PyState_FindModule,3.2,, -+func,PyState_RemoveModule,3.3,, - type,PyStructSequence_Desc,3.2,,full-abi - type,PyStructSequence_Field,3.2,,full-abi --function,PyStructSequence_GetItem,3.2,, --function,PyStructSequence_New,3.2,, --function,PyStructSequence_NewType,3.2,, --function,PyStructSequence_SetItem,3.2,, --var,PyStructSequence_UnnamedField,3.11,, --var,PySuper_Type,3.2,, --function,PySys_AddWarnOption,3.2,, --function,PySys_AddWarnOptionUnicode,3.2,, --function,PySys_AddXOption,3.7,, --function,PySys_FormatStderr,3.2,, --function,PySys_FormatStdout,3.2,, --function,PySys_GetObject,3.2,, --function,PySys_GetXOptions,3.7,, --function,PySys_HasWarnOptions,3.2,, --function,PySys_ResetWarnOptions,3.2,, --function,PySys_SetArgv,3.2,, --function,PySys_SetArgvEx,3.2,, --function,PySys_SetObject,3.2,, --function,PySys_SetPath,3.2,, --function,PySys_WriteStderr,3.2,, --function,PySys_WriteStdout,3.2,, -+func,PyStructSequence_GetItem,3.2,, -+func,PyStructSequence_New,3.2,, -+func,PyStructSequence_NewType,3.2,, -+func,PyStructSequence_SetItem,3.2,, -+data,PyStructSequence_UnnamedField,3.11,, -+data,PySuper_Type,3.2,, -+func,PySys_AddWarnOption,3.2,, -+func,PySys_AddWarnOptionUnicode,3.2,, -+func,PySys_AddXOption,3.7,, -+func,PySys_FormatStderr,3.2,, -+func,PySys_FormatStdout,3.2,, -+func,PySys_GetObject,3.2,, -+func,PySys_GetXOptions,3.7,, -+func,PySys_HasWarnOptions,3.2,, -+func,PySys_ResetWarnOptions,3.2,, -+func,PySys_SetArgv,3.2,, -+func,PySys_SetArgvEx,3.2,, -+func,PySys_SetObject,3.2,, -+func,PySys_SetPath,3.2,, -+func,PySys_WriteStderr,3.2,, -+func,PySys_WriteStdout,3.2,, - type,PyThreadState,3.2,,opaque --function,PyThreadState_Clear,3.2,, --function,PyThreadState_Delete,3.2,, --function,PyThreadState_Get,3.2,, --function,PyThreadState_GetDict,3.2,, --function,PyThreadState_GetFrame,3.10,, --function,PyThreadState_GetID,3.10,, --function,PyThreadState_GetInterpreter,3.10,, --function,PyThreadState_New,3.2,, --function,PyThreadState_SetAsyncExc,3.2,, --function,PyThreadState_Swap,3.2,, --function,PyThread_GetInfo,3.3,, --function,PyThread_ReInitTLS,3.2,, --function,PyThread_acquire_lock,3.2,, --function,PyThread_acquire_lock_timed,3.2,, --function,PyThread_allocate_lock,3.2,, --function,PyThread_create_key,3.2,, --function,PyThread_delete_key,3.2,, --function,PyThread_delete_key_value,3.2,, --function,PyThread_exit_thread,3.2,, --function,PyThread_free_lock,3.2,, --function,PyThread_get_key_value,3.2,, --function,PyThread_get_stacksize,3.2,, --function,PyThread_get_thread_ident,3.2,, --function,PyThread_get_thread_native_id,3.2,on platforms with native thread IDs, --function,PyThread_init_thread,3.2,, --function,PyThread_release_lock,3.2,, --function,PyThread_set_key_value,3.2,, --function,PyThread_set_stacksize,3.2,, --function,PyThread_start_new_thread,3.2,, --function,PyThread_tss_alloc,3.7,, --function,PyThread_tss_create,3.7,, --function,PyThread_tss_delete,3.7,, --function,PyThread_tss_free,3.7,, --function,PyThread_tss_get,3.7,, --function,PyThread_tss_is_created,3.7,, --function,PyThread_tss_set,3.7,, --function,PyTraceBack_Here,3.2,, --function,PyTraceBack_Print,3.2,, --var,PyTraceBack_Type,3.2,, --var,PyTupleIter_Type,3.2,, --function,PyTuple_GetItem,3.2,, --function,PyTuple_GetSlice,3.2,, --function,PyTuple_New,3.2,, --function,PyTuple_Pack,3.2,, --function,PyTuple_SetItem,3.2,, --function,PyTuple_Size,3.2,, --var,PyTuple_Type,3.2,, -+func,PyThreadState_Clear,3.2,, -+func,PyThreadState_Delete,3.2,, -+func,PyThreadState_Get,3.2,, -+func,PyThreadState_GetDict,3.2,, -+func,PyThreadState_GetFrame,3.10,, -+func,PyThreadState_GetID,3.10,, -+func,PyThreadState_GetInterpreter,3.10,, -+func,PyThreadState_New,3.2,, -+func,PyThreadState_SetAsyncExc,3.2,, -+func,PyThreadState_Swap,3.2,, -+func,PyThread_GetInfo,3.3,, -+func,PyThread_ReInitTLS,3.2,, -+func,PyThread_acquire_lock,3.2,, -+func,PyThread_acquire_lock_timed,3.2,, -+func,PyThread_allocate_lock,3.2,, -+func,PyThread_create_key,3.2,, -+func,PyThread_delete_key,3.2,, -+func,PyThread_delete_key_value,3.2,, -+func,PyThread_exit_thread,3.2,, -+func,PyThread_free_lock,3.2,, -+func,PyThread_get_key_value,3.2,, -+func,PyThread_get_stacksize,3.2,, -+func,PyThread_get_thread_ident,3.2,, -+func,PyThread_get_thread_native_id,3.2,on platforms with native thread IDs, -+func,PyThread_init_thread,3.2,, -+func,PyThread_release_lock,3.2,, -+func,PyThread_set_key_value,3.2,, -+func,PyThread_set_stacksize,3.2,, -+func,PyThread_start_new_thread,3.2,, -+func,PyThread_tss_alloc,3.7,, -+func,PyThread_tss_create,3.7,, -+func,PyThread_tss_delete,3.7,, -+func,PyThread_tss_free,3.7,, -+func,PyThread_tss_get,3.7,, -+func,PyThread_tss_is_created,3.7,, -+func,PyThread_tss_set,3.7,, -+func,PyTraceBack_Here,3.2,, -+func,PyTraceBack_Print,3.2,, -+data,PyTraceBack_Type,3.2,, -+data,PyTupleIter_Type,3.2,, -+func,PyTuple_GetItem,3.2,, -+func,PyTuple_GetSlice,3.2,, -+func,PyTuple_New,3.2,, -+func,PyTuple_Pack,3.2,, -+func,PyTuple_SetItem,3.2,, -+func,PyTuple_Size,3.2,, -+data,PyTuple_Type,3.2,, - type,PyTypeObject,3.2,,opaque --function,PyType_ClearCache,3.2,, --function,PyType_FromMetaclass,3.12,, --function,PyType_FromModuleAndSpec,3.10,, --function,PyType_FromSpec,3.2,, --function,PyType_FromSpecWithBases,3.3,, --function,PyType_GenericAlloc,3.2,, --function,PyType_GenericNew,3.2,, --function,PyType_GetFlags,3.2,, --function,PyType_GetModule,3.10,, --function,PyType_GetModuleState,3.10,, --function,PyType_GetName,3.11,, --function,PyType_GetQualName,3.11,, --function,PyType_GetSlot,3.4,, --function,PyType_GetTypeDataSize,3.12,, --function,PyType_IsSubtype,3.2,, --function,PyType_Modified,3.2,, --function,PyType_Ready,3.2,, -+func,PyType_ClearCache,3.2,, -+func,PyType_FromMetaclass,3.12,, -+func,PyType_FromModuleAndSpec,3.10,, -+func,PyType_FromSpec,3.2,, -+func,PyType_FromSpecWithBases,3.3,, -+func,PyType_GenericAlloc,3.2,, -+func,PyType_GenericNew,3.2,, -+func,PyType_GetFlags,3.2,, -+func,PyType_GetModule,3.10,, -+func,PyType_GetModuleState,3.10,, -+func,PyType_GetName,3.11,, -+func,PyType_GetQualName,3.11,, -+func,PyType_GetSlot,3.4,, -+func,PyType_GetTypeDataSize,3.12,, -+func,PyType_IsSubtype,3.2,, -+func,PyType_Modified,3.2,, -+func,PyType_Ready,3.2,, - type,PyType_Slot,3.2,,full-abi - type,PyType_Spec,3.2,,full-abi --var,PyType_Type,3.2,, --function,PyUnicodeDecodeError_Create,3.2,, --function,PyUnicodeDecodeError_GetEncoding,3.2,, --function,PyUnicodeDecodeError_GetEnd,3.2,, --function,PyUnicodeDecodeError_GetObject,3.2,, --function,PyUnicodeDecodeError_GetReason,3.2,, --function,PyUnicodeDecodeError_GetStart,3.2,, --function,PyUnicodeDecodeError_SetEnd,3.2,, --function,PyUnicodeDecodeError_SetReason,3.2,, --function,PyUnicodeDecodeError_SetStart,3.2,, --function,PyUnicodeEncodeError_GetEncoding,3.2,, --function,PyUnicodeEncodeError_GetEnd,3.2,, --function,PyUnicodeEncodeError_GetObject,3.2,, --function,PyUnicodeEncodeError_GetReason,3.2,, --function,PyUnicodeEncodeError_GetStart,3.2,, --function,PyUnicodeEncodeError_SetEnd,3.2,, --function,PyUnicodeEncodeError_SetReason,3.2,, --function,PyUnicodeEncodeError_SetStart,3.2,, --var,PyUnicodeIter_Type,3.2,, --function,PyUnicodeTranslateError_GetEnd,3.2,, --function,PyUnicodeTranslateError_GetObject,3.2,, --function,PyUnicodeTranslateError_GetReason,3.2,, --function,PyUnicodeTranslateError_GetStart,3.2,, --function,PyUnicodeTranslateError_SetEnd,3.2,, --function,PyUnicodeTranslateError_SetReason,3.2,, --function,PyUnicodeTranslateError_SetStart,3.2,, --function,PyUnicode_Append,3.2,, --function,PyUnicode_AppendAndDel,3.2,, --function,PyUnicode_AsASCIIString,3.2,, --function,PyUnicode_AsCharmapString,3.2,, --function,PyUnicode_AsDecodedObject,3.2,, --function,PyUnicode_AsDecodedUnicode,3.2,, --function,PyUnicode_AsEncodedObject,3.2,, --function,PyUnicode_AsEncodedString,3.2,, --function,PyUnicode_AsEncodedUnicode,3.2,, --function,PyUnicode_AsLatin1String,3.2,, --function,PyUnicode_AsMBCSString,3.7,on Windows, --function,PyUnicode_AsRawUnicodeEscapeString,3.2,, --function,PyUnicode_AsUCS4,3.7,, --function,PyUnicode_AsUCS4Copy,3.7,, --function,PyUnicode_AsUTF16String,3.2,, --function,PyUnicode_AsUTF32String,3.2,, --function,PyUnicode_AsUTF8AndSize,3.10,, --function,PyUnicode_AsUTF8String,3.2,, --function,PyUnicode_AsUnicodeEscapeString,3.2,, --function,PyUnicode_AsWideChar,3.2,, --function,PyUnicode_AsWideCharString,3.7,, --function,PyUnicode_BuildEncodingMap,3.2,, --function,PyUnicode_Compare,3.2,, --function,PyUnicode_CompareWithASCIIString,3.2,, --function,PyUnicode_Concat,3.2,, --function,PyUnicode_Contains,3.2,, --function,PyUnicode_Count,3.2,, --function,PyUnicode_Decode,3.2,, --function,PyUnicode_DecodeASCII,3.2,, --function,PyUnicode_DecodeCharmap,3.2,, --function,PyUnicode_DecodeCodePageStateful,3.7,on Windows, --function,PyUnicode_DecodeFSDefault,3.2,, --function,PyUnicode_DecodeFSDefaultAndSize,3.2,, --function,PyUnicode_DecodeLatin1,3.2,, --function,PyUnicode_DecodeLocale,3.7,, --function,PyUnicode_DecodeLocaleAndSize,3.7,, --function,PyUnicode_DecodeMBCS,3.7,on Windows, --function,PyUnicode_DecodeMBCSStateful,3.7,on Windows, --function,PyUnicode_DecodeRawUnicodeEscape,3.2,, --function,PyUnicode_DecodeUTF16,3.2,, --function,PyUnicode_DecodeUTF16Stateful,3.2,, --function,PyUnicode_DecodeUTF32,3.2,, --function,PyUnicode_DecodeUTF32Stateful,3.2,, --function,PyUnicode_DecodeUTF7,3.2,, --function,PyUnicode_DecodeUTF7Stateful,3.2,, --function,PyUnicode_DecodeUTF8,3.2,, --function,PyUnicode_DecodeUTF8Stateful,3.2,, --function,PyUnicode_DecodeUnicodeEscape,3.2,, --function,PyUnicode_EncodeCodePage,3.7,on Windows, --function,PyUnicode_EncodeFSDefault,3.2,, --function,PyUnicode_EncodeLocale,3.7,, --function,PyUnicode_FSConverter,3.2,, --function,PyUnicode_FSDecoder,3.2,, --function,PyUnicode_Find,3.2,, --function,PyUnicode_FindChar,3.7,, --function,PyUnicode_Format,3.2,, --function,PyUnicode_FromEncodedObject,3.2,, --function,PyUnicode_FromFormat,3.2,, --function,PyUnicode_FromFormatV,3.2,, --function,PyUnicode_FromObject,3.2,, --function,PyUnicode_FromOrdinal,3.2,, --function,PyUnicode_FromString,3.2,, --function,PyUnicode_FromStringAndSize,3.2,, --function,PyUnicode_FromWideChar,3.2,, --function,PyUnicode_GetDefaultEncoding,3.2,, --function,PyUnicode_GetLength,3.7,, --function,PyUnicode_InternFromString,3.2,, --function,PyUnicode_InternInPlace,3.2,, --function,PyUnicode_IsIdentifier,3.2,, --function,PyUnicode_Join,3.2,, --function,PyUnicode_Partition,3.2,, --function,PyUnicode_RPartition,3.2,, --function,PyUnicode_RSplit,3.2,, --function,PyUnicode_ReadChar,3.7,, --function,PyUnicode_Replace,3.2,, --function,PyUnicode_Resize,3.2,, --function,PyUnicode_RichCompare,3.2,, --function,PyUnicode_Split,3.2,, --function,PyUnicode_Splitlines,3.2,, --function,PyUnicode_Substring,3.7,, --function,PyUnicode_Tailmatch,3.2,, --function,PyUnicode_Translate,3.2,, --var,PyUnicode_Type,3.2,, --function,PyUnicode_WriteChar,3.7,, -+data,PyType_Type,3.2,, -+func,PyUnicodeDecodeError_Create,3.2,, -+func,PyUnicodeDecodeError_GetEncoding,3.2,, -+func,PyUnicodeDecodeError_GetEnd,3.2,, -+func,PyUnicodeDecodeError_GetObject,3.2,, -+func,PyUnicodeDecodeError_GetReason,3.2,, -+func,PyUnicodeDecodeError_GetStart,3.2,, -+func,PyUnicodeDecodeError_SetEnd,3.2,, -+func,PyUnicodeDecodeError_SetReason,3.2,, -+func,PyUnicodeDecodeError_SetStart,3.2,, -+func,PyUnicodeEncodeError_GetEncoding,3.2,, -+func,PyUnicodeEncodeError_GetEnd,3.2,, -+func,PyUnicodeEncodeError_GetObject,3.2,, -+func,PyUnicodeEncodeError_GetReason,3.2,, -+func,PyUnicodeEncodeError_GetStart,3.2,, -+func,PyUnicodeEncodeError_SetEnd,3.2,, -+func,PyUnicodeEncodeError_SetReason,3.2,, -+func,PyUnicodeEncodeError_SetStart,3.2,, -+data,PyUnicodeIter_Type,3.2,, -+func,PyUnicodeTranslateError_GetEnd,3.2,, -+func,PyUnicodeTranslateError_GetObject,3.2,, -+func,PyUnicodeTranslateError_GetReason,3.2,, -+func,PyUnicodeTranslateError_GetStart,3.2,, -+func,PyUnicodeTranslateError_SetEnd,3.2,, -+func,PyUnicodeTranslateError_SetReason,3.2,, -+func,PyUnicodeTranslateError_SetStart,3.2,, -+func,PyUnicode_Append,3.2,, -+func,PyUnicode_AppendAndDel,3.2,, -+func,PyUnicode_AsASCIIString,3.2,, -+func,PyUnicode_AsCharmapString,3.2,, -+func,PyUnicode_AsDecodedObject,3.2,, -+func,PyUnicode_AsDecodedUnicode,3.2,, -+func,PyUnicode_AsEncodedObject,3.2,, -+func,PyUnicode_AsEncodedString,3.2,, -+func,PyUnicode_AsEncodedUnicode,3.2,, -+func,PyUnicode_AsLatin1String,3.2,, -+func,PyUnicode_AsMBCSString,3.7,on Windows, -+func,PyUnicode_AsRawUnicodeEscapeString,3.2,, -+func,PyUnicode_AsUCS4,3.7,, -+func,PyUnicode_AsUCS4Copy,3.7,, -+func,PyUnicode_AsUTF16String,3.2,, -+func,PyUnicode_AsUTF32String,3.2,, -+func,PyUnicode_AsUTF8AndSize,3.10,, -+func,PyUnicode_AsUTF8String,3.2,, -+func,PyUnicode_AsUnicodeEscapeString,3.2,, -+func,PyUnicode_AsWideChar,3.2,, -+func,PyUnicode_AsWideCharString,3.7,, -+func,PyUnicode_BuildEncodingMap,3.2,, -+func,PyUnicode_Compare,3.2,, -+func,PyUnicode_CompareWithASCIIString,3.2,, -+func,PyUnicode_Concat,3.2,, -+func,PyUnicode_Contains,3.2,, -+func,PyUnicode_Count,3.2,, -+func,PyUnicode_Decode,3.2,, -+func,PyUnicode_DecodeASCII,3.2,, -+func,PyUnicode_DecodeCharmap,3.2,, -+func,PyUnicode_DecodeCodePageStateful,3.7,on Windows, -+func,PyUnicode_DecodeFSDefault,3.2,, -+func,PyUnicode_DecodeFSDefaultAndSize,3.2,, -+func,PyUnicode_DecodeLatin1,3.2,, -+func,PyUnicode_DecodeLocale,3.7,, -+func,PyUnicode_DecodeLocaleAndSize,3.7,, -+func,PyUnicode_DecodeMBCS,3.7,on Windows, -+func,PyUnicode_DecodeMBCSStateful,3.7,on Windows, -+func,PyUnicode_DecodeRawUnicodeEscape,3.2,, -+func,PyUnicode_DecodeUTF16,3.2,, -+func,PyUnicode_DecodeUTF16Stateful,3.2,, -+func,PyUnicode_DecodeUTF32,3.2,, -+func,PyUnicode_DecodeUTF32Stateful,3.2,, -+func,PyUnicode_DecodeUTF7,3.2,, -+func,PyUnicode_DecodeUTF7Stateful,3.2,, -+func,PyUnicode_DecodeUTF8,3.2,, -+func,PyUnicode_DecodeUTF8Stateful,3.2,, -+func,PyUnicode_DecodeUnicodeEscape,3.2,, -+func,PyUnicode_EncodeCodePage,3.7,on Windows, -+func,PyUnicode_EncodeFSDefault,3.2,, -+func,PyUnicode_EncodeLocale,3.7,, -+func,PyUnicode_FSConverter,3.2,, -+func,PyUnicode_FSDecoder,3.2,, -+func,PyUnicode_Find,3.2,, -+func,PyUnicode_FindChar,3.7,, -+func,PyUnicode_Format,3.2,, -+func,PyUnicode_FromEncodedObject,3.2,, -+func,PyUnicode_FromFormat,3.2,, -+func,PyUnicode_FromFormatV,3.2,, -+func,PyUnicode_FromObject,3.2,, -+func,PyUnicode_FromOrdinal,3.2,, -+func,PyUnicode_FromString,3.2,, -+func,PyUnicode_FromStringAndSize,3.2,, -+func,PyUnicode_FromWideChar,3.2,, -+func,PyUnicode_GetDefaultEncoding,3.2,, -+func,PyUnicode_GetLength,3.7,, -+func,PyUnicode_InternFromString,3.2,, -+func,PyUnicode_InternInPlace,3.2,, -+func,PyUnicode_IsIdentifier,3.2,, -+func,PyUnicode_Join,3.2,, -+func,PyUnicode_Partition,3.2,, -+func,PyUnicode_RPartition,3.2,, -+func,PyUnicode_RSplit,3.2,, -+func,PyUnicode_ReadChar,3.7,, -+func,PyUnicode_Replace,3.2,, -+func,PyUnicode_Resize,3.2,, -+func,PyUnicode_RichCompare,3.2,, -+func,PyUnicode_Split,3.2,, -+func,PyUnicode_Splitlines,3.2,, -+func,PyUnicode_Substring,3.7,, -+func,PyUnicode_Tailmatch,3.2,, -+func,PyUnicode_Translate,3.2,, -+data,PyUnicode_Type,3.2,, -+func,PyUnicode_WriteChar,3.7,, - type,PyVarObject,3.2,,members - member,PyVarObject.ob_base,3.2,, - member,PyVarObject.ob_size,3.2,, --function,PyVectorcall_Call,3.12,, --function,PyVectorcall_NARGS,3.12,, -+func,PyVectorcall_Call,3.12,, -+func,PyVectorcall_NARGS,3.12,, - type,PyWeakReference,3.2,,opaque --function,PyWeakref_GetObject,3.2,, --function,PyWeakref_NewProxy,3.2,, --function,PyWeakref_NewRef,3.2,, --var,PyWrapperDescr_Type,3.2,, --function,PyWrapper_New,3.2,, --var,PyZip_Type,3.2,, --function,Py_AddPendingCall,3.2,, --function,Py_AtExit,3.2,, -+func,PyWeakref_GetObject,3.2,, -+func,PyWeakref_NewProxy,3.2,, -+func,PyWeakref_NewRef,3.2,, -+data,PyWrapperDescr_Type,3.2,, -+func,PyWrapper_New,3.2,, -+data,PyZip_Type,3.2,, -+func,Py_AddPendingCall,3.2,, -+func,Py_AtExit,3.2,, - macro,Py_BEGIN_ALLOW_THREADS,3.2,, - macro,Py_BLOCK_THREADS,3.2,, --function,Py_BuildValue,3.2,, --function,Py_BytesMain,3.8,, --function,Py_CompileString,3.2,, --function,Py_DecRef,3.2,, --function,Py_DecodeLocale,3.7,, -+func,Py_BuildValue,3.2,, -+func,Py_BytesMain,3.8,, -+func,Py_CompileString,3.2,, -+func,Py_DecRef,3.2,, -+func,Py_DecodeLocale,3.7,, - macro,Py_END_ALLOW_THREADS,3.2,, --function,Py_EncodeLocale,3.7,, --function,Py_EndInterpreter,3.2,, --function,Py_EnterRecursiveCall,3.9,, --function,Py_Exit,3.2,, --function,Py_FatalError,3.2,, --var,Py_FileSystemDefaultEncodeErrors,3.10,, --var,Py_FileSystemDefaultEncoding,3.2,, --function,Py_Finalize,3.2,, --function,Py_FinalizeEx,3.6,, --function,Py_GenericAlias,3.9,, --var,Py_GenericAliasType,3.9,, --function,Py_GetBuildInfo,3.2,, --function,Py_GetCompiler,3.2,, --function,Py_GetCopyright,3.2,, --function,Py_GetExecPrefix,3.2,, --function,Py_GetPath,3.2,, --function,Py_GetPlatform,3.2,, --function,Py_GetPrefix,3.2,, --function,Py_GetProgramFullPath,3.2,, --function,Py_GetProgramName,3.2,, --function,Py_GetPythonHome,3.2,, --function,Py_GetRecursionLimit,3.2,, --function,Py_GetVersion,3.2,, --var,Py_HasFileSystemDefaultEncoding,3.2,, --function,Py_IncRef,3.2,, --function,Py_Initialize,3.2,, --function,Py_InitializeEx,3.2,, --function,Py_Is,3.10,, --function,Py_IsFalse,3.10,, --function,Py_IsInitialized,3.2,, --function,Py_IsNone,3.10,, --function,Py_IsTrue,3.10,, --function,Py_LeaveRecursiveCall,3.9,, --function,Py_Main,3.2,, --function,Py_MakePendingCalls,3.2,, --function,Py_NewInterpreter,3.2,, --function,Py_NewRef,3.10,, --function,Py_ReprEnter,3.2,, --function,Py_ReprLeave,3.2,, --function,Py_SetPath,3.7,, --function,Py_SetProgramName,3.2,, --function,Py_SetPythonHome,3.2,, --function,Py_SetRecursionLimit,3.2,, -+func,Py_EncodeLocale,3.7,, -+func,Py_EndInterpreter,3.2,, -+func,Py_EnterRecursiveCall,3.9,, -+func,Py_Exit,3.2,, -+func,Py_FatalError,3.2,, -+data,Py_FileSystemDefaultEncodeErrors,3.10,, -+data,Py_FileSystemDefaultEncoding,3.2,, -+func,Py_Finalize,3.2,, -+func,Py_FinalizeEx,3.6,, -+func,Py_GenericAlias,3.9,, -+data,Py_GenericAliasType,3.9,, -+func,Py_GetBuildInfo,3.2,, -+func,Py_GetCompiler,3.2,, -+func,Py_GetCopyright,3.2,, -+func,Py_GetExecPrefix,3.2,, -+func,Py_GetPath,3.2,, -+func,Py_GetPlatform,3.2,, -+func,Py_GetPrefix,3.2,, -+func,Py_GetProgramFullPath,3.2,, -+func,Py_GetProgramName,3.2,, -+func,Py_GetPythonHome,3.2,, -+func,Py_GetRecursionLimit,3.2,, -+func,Py_GetVersion,3.2,, -+data,Py_HasFileSystemDefaultEncoding,3.2,, -+func,Py_IncRef,3.2,, -+func,Py_Initialize,3.2,, -+func,Py_InitializeEx,3.2,, -+func,Py_Is,3.10,, -+func,Py_IsFalse,3.10,, -+func,Py_IsInitialized,3.2,, -+func,Py_IsNone,3.10,, -+func,Py_IsTrue,3.10,, -+func,Py_LeaveRecursiveCall,3.9,, -+func,Py_Main,3.2,, -+func,Py_MakePendingCalls,3.2,, -+func,Py_NewInterpreter,3.2,, -+func,Py_NewRef,3.10,, -+func,Py_ReprEnter,3.2,, -+func,Py_ReprLeave,3.2,, -+func,Py_SetPath,3.7,, -+func,Py_SetProgramName,3.2,, -+func,Py_SetPythonHome,3.2,, -+func,Py_SetRecursionLimit,3.2,, - type,Py_UCS4,3.2,, - macro,Py_UNBLOCK_THREADS,3.2,, --var,Py_UTF8Mode,3.8,, --function,Py_VaBuildValue,3.2,, --var,Py_Version,3.11,, --function,Py_XNewRef,3.10,, -+data,Py_UTF8Mode,3.8,, -+func,Py_VaBuildValue,3.2,, -+data,Py_Version,3.11,, -+func,Py_XNewRef,3.10,, - type,Py_buffer,3.11,,full-abi - type,Py_intptr_t,3.2,, - type,Py_ssize_t,3.2,, ---- /dev/null -+++ b/Doc/deprecations/c-api-pending-removal-in-3.14.rst -@@ -0,0 +1,46 @@ -+Pending Removal in Python 3.14 -+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -+ -+* The ``ma_version_tag`` field in :c:type:`PyDictObject` for extension modules -+ (:pep:`699`; :gh:`101193`). -+ -+* Creating :c:data:`immutable types ` with mutable -+ bases (:gh:`95388`). -+ -+* Functions to configure Python's initialization, deprecated in Python 3.11: -+ -+ * ``PySys_SetArgvEx()``: set :c:member:`PyConfig.argv` instead. -+ * ``PySys_SetArgv()``: set :c:member:`PyConfig.argv` instead. -+ * ``Py_SetProgramName()``: set :c:member:`PyConfig.program_name` instead. -+ * ``Py_SetPythonHome()``: set :c:member:`PyConfig.home` instead. -+ -+ The :c:func:`Py_InitializeFromConfig` API should be used with -+ :c:type:`PyConfig` instead. -+ -+* Global configuration variables: -+ -+ * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug` instead. -+ * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose` instead. -+ * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet` instead. -+ * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive` instead. -+ * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect` instead. -+ * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level` instead. -+ * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import` instead. -+ * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning` instead. -+ * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings` instead. -+ * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment` instead. -+ * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode` instead. -+ * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory` instead. -+ * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio` instead. -+ * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed` -+ and :c:member:`PyConfig.hash_seed` instead. -+ * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated` instead. -+ * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding` instead. -+ * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio` instead. -+ * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` instead. -+ * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` instead. -+ * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` instead. -+ * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` instead. (see :c:func:`Py_PreInitialize`) -+ -+ The :c:func:`Py_InitializeFromConfig` API should be used with -+ :c:type:`PyConfig` instead. ---- /dev/null -+++ b/Doc/deprecations/c-api-pending-removal-in-3.15.rst -@@ -0,0 +1,20 @@ -+Pending Removal in Python 3.15 -+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -+ -+* The bundled copy of ``libmpdecimal``. -+* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule` instead. -+* :c:func:`PyWeakref_GET_OBJECT`: use :c:func:`!PyWeakref_GetRef` instead. -+* :c:func:`PyWeakref_GetObject`: use :c:func:`!PyWeakref_GetRef` instead. -+* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` instead. -+* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` instead. -+* Python initialization functions: -+ -+ * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and -+ :data:`!warnings.filters` instead. -+ * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` instead. -+ * :c:func:`Py_GetPath`: get :data:`sys.path` instead. -+ * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` instead. -+ * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` instead. -+ * :c:func:`Py_GetProgramName`: get :data:`sys.executable` instead. -+ * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or -+ the :envvar:`PYTHONHOME` environment variable instead. ---- /dev/null -+++ b/Doc/deprecations/c-api-pending-removal-in-future.rst -@@ -0,0 +1,31 @@ -+Pending Removal in Future Versions -+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -+ -+The following APIs are deprecated and will be removed, -+although there is currently no date scheduled for their removal. -+ -+* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: unneeded since Python 3.8. -+* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException` instead. -+* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException` instead. -+* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException` instead. -+* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject` instead. -+* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child` instead. -+* :c:func:`PySlice_GetIndicesEx`: use :c:func:`PySlice_Unpack` and :c:func:`PySlice_AdjustIndices` instead. -+* :c:func:`!PyUnicode_AsDecodedObject`: use :c:func:`PyCodec_Decode` instead. -+* :c:func:`!PyUnicode_AsDecodedUnicode`: use :c:func:`PyCodec_Decode` instead. -+* :c:func:`!PyUnicode_AsEncodedObject`: use :c:func:`PyCodec_Encode` instead. -+* :c:func:`!PyUnicode_AsEncodedUnicode`: use :c:func:`PyCodec_Encode` instead. -+* :c:func:`PyUnicode_READY`: unneeded since Python 3.12 -+* :c:func:`!PyErr_Display`: use :c:func:`PyErr_DisplayException` instead. -+* :c:func:`!_PyErr_ChainExceptions`: use ``_PyErr_ChainExceptions1`` instead. -+* :c:member:`!PyBytesObject.ob_shash` member: -+ call :c:func:`PyObject_Hash` instead. -+* :c:member:`!PyDictObject.ma_version_tag` member. -+* Thread Local Storage (TLS) API: -+ -+ * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc` instead. -+ * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free` instead. -+ * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set` instead. -+ * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get` instead. -+ * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete` instead. -+ * :c:func:`PyThread_ReInitTLS`: unneeded since Python 3.7. ---- /dev/null -+++ b/Doc/deprecations/index.rst -@@ -0,0 +1,21 @@ -+Deprecations -+============ -+ -+.. include:: pending-removal-in-3.13.rst -+ -+.. include:: pending-removal-in-3.14.rst -+ -+.. include:: pending-removal-in-3.15.rst -+ -+.. include:: pending-removal-in-3.16.rst -+ -+.. include:: pending-removal-in-future.rst -+ -+C API Deprecations -+------------------ -+ -+.. include:: c-api-pending-removal-in-3.14.rst -+ -+.. include:: c-api-pending-removal-in-3.15.rst -+ -+.. include:: c-api-pending-removal-in-future.rst ---- /dev/null -+++ b/Doc/deprecations/pending-removal-in-3.13.rst -@@ -0,0 +1,52 @@ -+Pending Removal in Python 3.13 -+------------------------------ -+ -+Modules (see :pep:`594`): -+ -+* :mod:`aifc` -+* :mod:`audioop` -+* :mod:`cgi` -+* :mod:`cgitb` -+* :mod:`chunk` -+* :mod:`crypt` -+* :mod:`imghdr` -+* :mod:`mailcap` -+* :mod:`msilib` -+* :mod:`nis` -+* :mod:`nntplib` -+* :mod:`ossaudiodev` -+* :mod:`pipes` -+* :mod:`sndhdr` -+* :mod:`spwd` -+* :mod:`sunau` -+* :mod:`telnetlib` -+* :mod:`uu` -+* :mod:`xdrlib` -+ -+Other modules: -+ -+* :mod:`!lib2to3`, and the :program:`2to3` program (:gh:`84540`) -+ -+APIs: -+ -+* :class:`!configparser.LegacyInterpolation` (:gh:`90765`) -+* ``locale.resetlocale()`` (:gh:`90817`) -+* :meth:`!turtle.RawTurtle.settiltangle` (:gh:`50096`) -+* :func:`!unittest.findTestCases` (:gh:`50096`) -+* :func:`!unittest.getTestCaseNames` (:gh:`50096`) -+* :func:`!unittest.makeSuite` (:gh:`50096`) -+* :meth:`!unittest.TestProgram.usageExit` (:gh:`67048`) -+* :class:`!webbrowser.MacOSX` (:gh:`86421`) -+* :class:`classmethod` descriptor chaining (:gh:`89519`) -+* :mod:`importlib.resources` deprecated methods: -+ -+ * ``contents()`` -+ * ``is_resource()`` -+ * ``open_binary()`` -+ * ``open_text()`` -+ * ``path()`` -+ * ``read_binary()`` -+ * ``read_text()`` -+ -+ Use :func:`importlib.resources.files` instead. Refer to `importlib-resources: Migrating from Legacy -+ `_ (:gh:`106531`) ---- /dev/null -+++ b/Doc/deprecations/pending-removal-in-3.14.rst -@@ -0,0 +1,118 @@ -+Pending Removal in Python 3.14 -+------------------------------ -+ -+* :mod:`argparse`: The *type*, *choices*, and *metavar* parameters -+ of :class:`!argparse.BooleanOptionalAction` are deprecated -+ and will be removed in 3.14. -+ (Contributed by Nikita Sobolev in :gh:`92248`.) -+ -+* :mod:`ast`: The following features have been deprecated in documentation -+ since Python 3.8, now cause a :exc:`DeprecationWarning` to be emitted at -+ runtime when they are accessed or used, and will be removed in Python 3.14: -+ -+ * :class:`!ast.Num` -+ * :class:`!ast.Str` -+ * :class:`!ast.Bytes` -+ * :class:`!ast.NameConstant` -+ * :class:`!ast.Ellipsis` -+ -+ Use :class:`ast.Constant` instead. -+ (Contributed by Serhiy Storchaka in :gh:`90953`.) -+ -+* :mod:`asyncio`: -+ -+ * The child watcher classes :class:`~asyncio.MultiLoopChildWatcher`, -+ :class:`~asyncio.FastChildWatcher`, :class:`~asyncio.AbstractChildWatcher` -+ and :class:`~asyncio.SafeChildWatcher` are deprecated and -+ will be removed in Python 3.14. -+ (Contributed by Kumar Aditya in :gh:`94597`.) -+ -+ * :func:`asyncio.set_child_watcher`, :func:`asyncio.get_child_watcher`, -+ :meth:`asyncio.AbstractEventLoopPolicy.set_child_watcher` and -+ :meth:`asyncio.AbstractEventLoopPolicy.get_child_watcher` are deprecated -+ and will be removed in Python 3.14. -+ (Contributed by Kumar Aditya in :gh:`94597`.) -+ -+ * The :meth:`~asyncio.get_event_loop` method of the -+ default event loop policy now emits a :exc:`DeprecationWarning` if there -+ is no current event loop set and it decides to create one. -+ (Contributed by Serhiy Storchaka and Guido van Rossum in :gh:`100160`.) -+ -+* :mod:`collections.abc`: Deprecated :class:`~collections.abc.ByteString`. -+ Prefer :class:`!Sequence` or :class:`~collections.abc.Buffer`. -+ For use in typing, prefer a union, like ``bytes | bytearray``, -+ or :class:`collections.abc.Buffer`. -+ (Contributed by Shantanu Jain in :gh:`91896`.) -+ -+* :mod:`email`: Deprecated the *isdst* parameter in :func:`email.utils.localtime`. -+ (Contributed by Alan Williams in :gh:`72346`.) -+ -+* :mod:`importlib`: ``__package__`` and ``__cached__`` will cease to be set or -+ taken into consideration by the import system (:gh:`97879`). -+ -+* :mod:`importlib.abc` deprecated classes: -+ -+ * :class:`!importlib.abc.ResourceReader` -+ * :class:`!importlib.abc.Traversable` -+ * :class:`!importlib.abc.TraversableResources` -+ -+ Use :mod:`importlib.resources.abc` classes instead: -+ -+ * :class:`importlib.resources.abc.Traversable` -+ * :class:`importlib.resources.abc.TraversableResources` -+ -+ (Contributed by Jason R. Coombs and Hugo van Kemenade in :gh:`93963`.) -+ -+* :mod:`itertools` had undocumented, inefficient, historically buggy, -+ and inconsistent support for copy, deepcopy, and pickle operations. -+ This will be removed in 3.14 for a significant reduction in code -+ volume and maintenance burden. -+ (Contributed by Raymond Hettinger in :gh:`101588`.) -+ -+* :mod:`multiprocessing`: The default start method will change to a safer one on -+ Linux, BSDs, and other non-macOS POSIX platforms where ``'fork'`` is currently -+ the default (:gh:`84559`). Adding a runtime warning about this was deemed too -+ disruptive as the majority of code is not expected to care. Use the -+ :func:`~multiprocessing.get_context` or -+ :func:`~multiprocessing.set_start_method` APIs to explicitly specify when -+ your code *requires* ``'fork'``. See :ref:`multiprocessing-start-methods`. -+ -+* :mod:`pathlib`: :meth:`~pathlib.PurePath.is_relative_to` and -+ :meth:`~pathlib.PurePath.relative_to`: passing additional arguments is -+ deprecated. -+ -+* :mod:`pkgutil`: :func:`~pkgutil.find_loader` and :func:`~pkgutil.get_loader` -+ now raise :exc:`DeprecationWarning`; -+ use :func:`importlib.util.find_spec` instead. -+ (Contributed by Nikita Sobolev in :gh:`97850`.) -+ -+* :mod:`pty`: -+ -+ * ``master_open()``: use :func:`pty.openpty`. -+ * ``slave_open()``: use :func:`pty.openpty`. -+ -+* :mod:`sqlite3`: -+ -+ * :data:`~sqlite3.version` and :data:`~sqlite3.version_info`. -+ -+ * :meth:`~sqlite3.Cursor.execute` and :meth:`~sqlite3.Cursor.executemany` -+ if :ref:`named placeholders ` are used and -+ *parameters* is a sequence instead of a :class:`dict`. -+ -+ * date and datetime adapter, date and timestamp converter: -+ see the :mod:`sqlite3` documentation for suggested replacement recipes. -+ -+* :class:`types.CodeType`: Accessing :attr:`~codeobject.co_lnotab` was -+ deprecated in :pep:`626` -+ since 3.10 and was planned to be removed in 3.12, -+ but it only got a proper :exc:`DeprecationWarning` in 3.12. -+ May be removed in 3.14. -+ (Contributed by Nikita Sobolev in :gh:`101866`.) -+ -+* :mod:`typing`: :class:`~typing.ByteString`, deprecated since Python 3.9, -+ now causes a :exc:`DeprecationWarning` to be emitted when it is used. -+ -+* :mod:`urllib`: -+ :class:`!urllib.parse.Quoter` is deprecated: it was not intended to be a -+ public API. -+ (Contributed by Gregory P. Smith in :gh:`88168`.) ---- /dev/null -+++ b/Doc/deprecations/pending-removal-in-3.15.rst -@@ -0,0 +1,57 @@ -+Pending Removal in Python 3.15 -+------------------------------ -+ -+* :class:`http.server.CGIHTTPRequestHandler` will be removed along with its -+ related ``--cgi`` flag to ``python -m http.server``. It was obsolete and -+ rarely used. No direct replacement exists. *Anything* is better than CGI -+ to interface a web server with a request handler. -+ -+* :class:`locale`: :func:`locale.getdefaultlocale` was deprecated in Python 3.11 -+ and originally planned for removal in Python 3.13 (:gh:`90817`), -+ but removal has been postponed to Python 3.15. -+ Use :func:`locale.setlocale`, :func:`locale.getencoding` and -+ :func:`locale.getlocale` instead. -+ (Contributed by Hugo van Kemenade in :gh:`111187`.) -+ -+* :mod:`pathlib`: -+ :meth:`pathlib.PurePath.is_reserved` is deprecated and scheduled for -+ removal in Python 3.15. From Python 3.13 onwards, use ``os.path.isreserved`` -+ to detect reserved paths on Windows. -+ -+* :mod:`platform`: -+ :func:`~platform.java_ver` is deprecated and will be removed in 3.15. -+ It was largely untested, had a confusing API, -+ and was only useful for Jython support. -+ (Contributed by Nikita Sobolev in :gh:`116349`.) -+ -+* :mod:`threading`: -+ Passing any arguments to :func:`threading.RLock` is now deprecated. -+ C version allows any numbers of args and kwargs, -+ but they are just ignored. Python version does not allow any arguments. -+ All arguments will be removed from :func:`threading.RLock` in Python 3.15. -+ (Contributed by Nikita Sobolev in :gh:`102029`.) -+ -+* :class:`typing.NamedTuple`: -+ -+ * The undocumented keyword argument syntax for creating :class:`!NamedTuple` classes -+ (``NT = NamedTuple("NT", x=int)``) is deprecated, and will be disallowed in -+ 3.15. Use the class-based syntax or the functional syntax instead. -+ -+ * When using the functional syntax to create a :class:`!NamedTuple` class, failing to -+ pass a value to the *fields* parameter (``NT = NamedTuple("NT")``) is -+ deprecated. Passing ``None`` to the *fields* parameter -+ (``NT = NamedTuple("NT", None)``) is also deprecated. Both will be -+ disallowed in Python 3.15. To create a :class:`!NamedTuple` class with 0 fields, use -+ ``class NT(NamedTuple): pass`` or ``NT = NamedTuple("NT", [])``. -+ -+* :class:`typing.TypedDict`: When using the functional syntax to create a -+ :class:`!TypedDict` class, failing to pass a value to the *fields* parameter (``TD = -+ TypedDict("TD")``) is deprecated. Passing ``None`` to the *fields* parameter -+ (``TD = TypedDict("TD", None)``) is also deprecated. Both will be disallowed -+ in Python 3.15. To create a :class:`!TypedDict` class with 0 fields, use ``class -+ TD(TypedDict): pass`` or ``TD = TypedDict("TD", {})``. -+ -+* :mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()`` -+ methods of the :class:`wave.Wave_read` and :class:`wave.Wave_write` classes. -+ They will be removed in Python 3.15. -+ (Contributed by Victor Stinner in :gh:`105096`.) ---- /dev/null -+++ b/Doc/deprecations/pending-removal-in-3.16.rst -@@ -0,0 +1,13 @@ -+Pending Removal in Python 3.16 -+------------------------------ -+ -+* :mod:`array`: -+ :class:`array.array` ``'u'`` type (:c:type:`wchar_t`): -+ use the ``'w'`` type instead (``Py_UCS4``). -+ -+* :mod:`builtins`: -+ ``~bool``, bitwise inversion on bool. -+ -+* :mod:`symtable`: -+ Deprecate :meth:`symtable.Class.get_methods` due to the lack of interest. -+ (Contributed by Bénédikt Tran in :gh:`119698`.) ---- /dev/null -+++ b/Doc/deprecations/pending-removal-in-future.rst -@@ -0,0 +1,152 @@ -+Pending Removal in Future Versions -+---------------------------------- -+ -+The following APIs will be removed in the future, -+although there is currently no date scheduled for their removal. -+ -+* :mod:`argparse`: Nesting argument groups and nesting mutually exclusive -+ groups are deprecated. -+ -+* :mod:`array`'s ``'u'`` format code (:gh:`57281`) -+ -+* :mod:`builtins`: -+ -+ * ``bool(NotImplemented)``. -+ * Generators: ``throw(type, exc, tb)`` and ``athrow(type, exc, tb)`` -+ signature is deprecated: use ``throw(exc)`` and ``athrow(exc)`` instead, -+ the single argument signature. -+ * Currently Python accepts numeric literals immediately followed by keywords, -+ for example ``0in x``, ``1or x``, ``0if 1else 2``. It allows confusing and -+ ambiguous expressions like ``[0x1for x in y]`` (which can be interpreted as -+ ``[0x1 for x in y]`` or ``[0x1f or x in y]``). A syntax warning is raised -+ if the numeric literal is immediately followed by one of keywords -+ :keyword:`and`, :keyword:`else`, :keyword:`for`, :keyword:`if`, -+ :keyword:`in`, :keyword:`is` and :keyword:`or`. In a future release it -+ will be changed to a syntax error. (:gh:`87999`) -+ * Support for ``__index__()`` and ``__int__()`` method returning non-int type: -+ these methods will be required to return an instance of a strict subclass of -+ :class:`int`. -+ * Support for ``__float__()`` method returning a strict subclass of -+ :class:`float`: these methods will be required to return an instance of -+ :class:`float`. -+ * Support for ``__complex__()`` method returning a strict subclass of -+ :class:`complex`: these methods will be required to return an instance of -+ :class:`complex`. -+ * Delegation of ``int()`` to ``__trunc__()`` method. -+ * Passing a complex number as the *real* or *imag* argument in the -+ :func:`complex` constructor is now deprecated; it should only be passed -+ as a single positional argument. -+ (Contributed by Serhiy Storchaka in :gh:`109218`.) -+ -+* :mod:`calendar`: ``calendar.January`` and ``calendar.February`` constants are -+ deprecated and replaced by :data:`calendar.JANUARY` and -+ :data:`calendar.FEBRUARY`. -+ (Contributed by Prince Roshan in :gh:`103636`.) -+ -+* :attr:`codeobject.co_lnotab`: use the :meth:`codeobject.co_lines` method -+ instead. -+ -+* :mod:`datetime`: -+ -+ * :meth:`~datetime.datetime.utcnow`: -+ use ``datetime.datetime.now(tz=datetime.UTC)``. -+ * :meth:`~datetime.datetime.utcfromtimestamp`: -+ use ``datetime.datetime.fromtimestamp(timestamp, tz=datetime.UTC)``. -+ -+* :mod:`gettext`: Plural value must be an integer. -+ -+* :mod:`importlib`: -+ -+ * ``load_module()`` method: use ``exec_module()`` instead. -+ * :func:`~importlib.util.cache_from_source` *debug_override* parameter is -+ deprecated: use the *optimization* parameter instead. -+ -+* :mod:`importlib.metadata`: -+ -+ * ``EntryPoints`` tuple interface. -+ * Implicit ``None`` on return values. -+ -+* :mod:`mailbox`: Use of StringIO input and text mode is deprecated, use -+ BytesIO and binary mode instead. -+ -+* :mod:`os`: Calling :func:`os.register_at_fork` in multi-threaded process. -+ -+* :class:`!pydoc.ErrorDuringImport`: A tuple value for *exc_info* parameter is -+ deprecated, use an exception instance. -+ -+* :mod:`re`: More strict rules are now applied for numerical group references -+ and group names in regular expressions. Only sequence of ASCII digits is now -+ accepted as a numerical reference. The group name in bytes patterns and -+ replacement strings can now only contain ASCII letters and digits and -+ underscore. -+ (Contributed by Serhiy Storchaka in :gh:`91760`.) -+ -+* :mod:`!sre_compile`, :mod:`!sre_constants` and :mod:`!sre_parse` modules. -+ -+* :mod:`shutil`: :func:`~shutil.rmtree`'s *onerror* parameter is deprecated in -+ Python 3.12; use the *onexc* parameter instead. -+ -+* :mod:`ssl` options and protocols: -+ -+ * :class:`ssl.SSLContext` without protocol argument is deprecated. -+ * :class:`ssl.SSLContext`: :meth:`~ssl.SSLContext.set_npn_protocols` and -+ :meth:`!selected_npn_protocol` are deprecated: use ALPN -+ instead. -+ * ``ssl.OP_NO_SSL*`` options -+ * ``ssl.OP_NO_TLS*`` options -+ * ``ssl.PROTOCOL_SSLv3`` -+ * ``ssl.PROTOCOL_TLS`` -+ * ``ssl.PROTOCOL_TLSv1`` -+ * ``ssl.PROTOCOL_TLSv1_1`` -+ * ``ssl.PROTOCOL_TLSv1_2`` -+ * ``ssl.TLSVersion.SSLv3`` -+ * ``ssl.TLSVersion.TLSv1`` -+ * ``ssl.TLSVersion.TLSv1_1`` -+ -+* :func:`sysconfig.is_python_build` *check_home* parameter is deprecated and -+ ignored. -+ -+* :mod:`threading` methods: -+ -+ * :meth:`!threading.Condition.notifyAll`: use :meth:`~threading.Condition.notify_all`. -+ * :meth:`!threading.Event.isSet`: use :meth:`~threading.Event.is_set`. -+ * :meth:`!threading.Thread.isDaemon`, :meth:`threading.Thread.setDaemon`: -+ use :attr:`threading.Thread.daemon` attribute. -+ * :meth:`!threading.Thread.getName`, :meth:`threading.Thread.setName`: -+ use :attr:`threading.Thread.name` attribute. -+ * :meth:`!threading.currentThread`: use :meth:`threading.current_thread`. -+ * :meth:`!threading.activeCount`: use :meth:`threading.active_count`. -+ -+* :class:`typing.Text` (:gh:`92332`). -+ -+* :class:`unittest.IsolatedAsyncioTestCase`: it is deprecated to return a value -+ that is not ``None`` from a test case. -+ -+* :mod:`urllib.parse` deprecated functions: :func:`~urllib.parse.urlparse` instead -+ -+ * ``splitattr()`` -+ * ``splithost()`` -+ * ``splitnport()`` -+ * ``splitpasswd()`` -+ * ``splitport()`` -+ * ``splitquery()`` -+ * ``splittag()`` -+ * ``splittype()`` -+ * ``splituser()`` -+ * ``splitvalue()`` -+ * ``to_bytes()`` -+ -+* :mod:`urllib.request`: :class:`~urllib.request.URLopener` and -+ :class:`~urllib.request.FancyURLopener` style of invoking requests is -+ deprecated. Use newer :func:`~urllib.request.urlopen` functions and methods. -+ -+* :mod:`wsgiref`: ``SimpleHandler.stdout.write()`` should not do partial -+ writes. -+ -+* :mod:`xml.etree.ElementTree`: Testing the truth value of an -+ :class:`~xml.etree.ElementTree.Element` is deprecated. In a future release it -+ will always return ``True``. Prefer explicit ``len(elem)`` or -+ ``elem is not None`` tests instead. -+ -+* :meth:`zipimport.zipimporter.load_module` is deprecated: -+ use :meth:`~zipimport.zipimporter.exec_module` instead. -diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst -index c2bc5f699a1..f10dba7b97b 100644 ---- a/Doc/extending/newtypes_tutorial.rst -+++ b/Doc/extending/newtypes_tutorial.rst -@@ -449,7 +449,7 @@ - though we can make sure the members are initialized to non-``NULL`` values, the - members can be set to ``NULL`` if the attributes are deleted. - --We define a single method, :meth:`!Custom.name()`, that outputs the objects name as the -+We define a single method, :meth:`!Custom.name`, that outputs the objects name as the - concatenation of the first and last names. :: - - static PyObject * -diff --git a/Doc/faq/design.rst b/Doc/faq/design.rst -index c8beb64e39b..ebb6d5ed128 100644 ---- a/Doc/faq/design.rst -+++ b/Doc/faq/design.rst -@@ -70,7 +70,7 @@ - Python behaves like many popular languages including C and Java. - - Many numbers that can be written easily in decimal notation cannot be expressed --exactly in binary floating-point. For example, after:: -+exactly in binary floating point. For example, after:: - - >>> x = 1.2 - -@@ -87,7 +87,7 @@ - The typical precision of 53 bits provides Python floats with 15--16 - decimal digits of accuracy. - --For a fuller explanation, please see the :ref:`floating point arithmetic -+For a fuller explanation, please see the :ref:`floating-point arithmetic - ` chapter in the Python tutorial. - - -diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst -index ac10a04d50d..45fd7eb1140 100644 ---- a/Doc/faq/library.rst -+++ b/Doc/faq/library.rst -@@ -825,12 +825,12 @@ - import random - random.random() - --This returns a random floating point number in the range [0, 1). -+This returns a random floating-point number in the range [0, 1). - - There are also many other specialized generators in this module, such as: - - * ``randrange(a, b)`` chooses an integer in the range [a, b). --* ``uniform(a, b)`` chooses a floating point number in the range [a, b). -+* ``uniform(a, b)`` chooses a floating-point number in the range [a, b). - * ``normalvariate(mean, sdev)`` samples the normal (Gaussian) distribution. - - Some higher-level functions operate on sequences directly, such as: -diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst -index f43f69b8a1e..ddfb25d5526 100644 ---- a/Doc/faq/programming.rst -+++ b/Doc/faq/programming.rst -@@ -869,7 +869,7 @@ - -------------------------------------- - - For integers, use the built-in :func:`int` type constructor, e.g. ``int('144') --== 144``. Similarly, :func:`float` converts to floating-point, -+== 144``. Similarly, :func:`float` converts to a floating-point number, - e.g. ``float('144') == 144.0``. - - By default, these interpret the number as decimal, so that ``int('0144') == -@@ -1013,7 +1013,7 @@ - For simple input parsing, the easiest approach is usually to split the line into - whitespace-delimited words using the :meth:`~str.split` method of string objects - and then convert decimal strings to numeric values using :func:`int` or --:func:`float`. :meth:`!split()` supports an optional "sep" parameter which is useful -+:func:`float`. :meth:`!split` supports an optional "sep" parameter which is useful - if the line uses something other than whitespace as a separator. - - For more complicated input parsing, regular expressions are more powerful -@@ -1741,11 +1741,31 @@ - is textually replaced with ``_classname__spam``, where ``classname`` is the - current class name with any leading underscores stripped. - --This doesn't guarantee privacy: an outside user can still deliberately access --the "_classname__spam" attribute, and private values are visible in the object's --``__dict__``. Many Python programmers never bother to use private variable --names at all. -+The identifier can be used unchanged within the class, but to access it outside -+the class, the mangled name must be used: - -+.. code-block:: python -+ -+ class A: -+ def __one(self): -+ return 1 -+ def two(self): -+ return 2 * self.__one() -+ -+ class B(A): -+ def three(self): -+ return 3 * self._A__one() -+ -+ four = 4 * A()._A__one() -+ -+In particular, this does not guarantee privacy since an outside user can still -+deliberately access the private attribute; many Python programmers never bother -+to use private variable names at all. -+ -+.. seealso:: -+ -+ The :ref:`private name mangling specifications ` -+ for details and special cases. - - My class defines __del__ but it is not called when I delete the object. - ----------------------------------------------------------------------- -diff --git a/Doc/glossary.rst b/Doc/glossary.rst -index d1745bf5ccd..c85370fec84 100644 ---- a/Doc/glossary.rst -+++ b/Doc/glossary.rst -@@ -591,6 +591,14 @@ - :ref:`idle` is a basic editor and interpreter environment - which ships with the standard distribution of Python. - -+ immortal -+ *Immortal objects* are a CPython implementation detail introduced -+ in :pep:`683`. -+ -+ If an object is immortal, its :term:`reference count` is never modified, -+ and therefore it is never deallocated while the interpreter is running. -+ For example, :const:`True` and :const:`None` are immortal in CPython. -+ - immutable - An object with a fixed value. Immutable objects include numbers, strings and - tuples. Such an object cannot be altered. A new object has to -diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst -index 330402d1835..8be1be3450f 100644 ---- a/Doc/howto/descriptor.rst -+++ b/Doc/howto/descriptor.rst -@@ -513,7 +513,7 @@ - - Descriptors are a powerful, general purpose protocol. They are the mechanism - behind properties, methods, static methods, class methods, and --:func:`super()`. They are used throughout Python itself. Descriptors -+:func:`super`. They are used throughout Python itself. Descriptors - simplify the underlying C code and offer a flexible set of new tools for - everyday Python programs. - -@@ -787,7 +787,7 @@ - --------------------- - - The logic for super's dotted lookup is in the :meth:`__getattribute__` method for --object returned by :class:`super()`. -+object returned by :func:`super`. - - A dotted lookup such as ``super(A, obj).m`` searches ``obj.__class__.__mro__`` - for the base class ``B`` immediately following ``A`` and then returns -@@ -803,7 +803,7 @@ - Summary of invocation logic - --------------------------- - --The mechanism for descriptors is embedded in the :meth:`__getattribute__()` -+The mechanism for descriptors is embedded in the :meth:`__getattribute__` - methods for :class:`object`, :class:`type`, and :func:`super`. - - The important points to remember are: -diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst -index ffdafb749c7..b575e00bc7c 100644 ---- a/Doc/howto/enum.rst -+++ b/Doc/howto/enum.rst -@@ -7,7 +7,7 @@ - .. currentmodule:: enum - - An :class:`Enum` is a set of symbolic names bound to unique values. They are --similar to global variables, but they offer a more useful :func:`repr()`, -+similar to global variables, but they offer a more useful :func:`repr`, - grouping, type-safety, and a few other features. - - They are most useful when you have a variable that can take one of a limited -@@ -165,7 +165,7 @@ - answer SO questions - - In cases where the actual values of the members do not matter, you can save --yourself some work and use :func:`auto()` for the values:: -+yourself some work and use :func:`auto` for the values:: - - >>> from enum import auto - >>> class Weekday(Flag): -@@ -1129,6 +1129,14 @@ - >>> (Color.RED | Color.GREEN).name - 'RED|GREEN' - -+ >>> class Perm(IntFlag): -+ ... R = 4 -+ ... W = 2 -+ ... X = 1 -+ ... -+ >>> (Perm.R & Perm.W).name is None # effectively Perm(0) -+ True -+ - - multi-bit flags, aka aliases, can be returned from operations:: - - >>> Color.RED | Color.BLUE -diff --git a/Doc/howto/instrumentation.rst b/Doc/howto/instrumentation.rst -index 9c99fcecce1..6e03ef20a21 100644 ---- a/Doc/howto/instrumentation.rst -+++ b/Doc/howto/instrumentation.rst -@@ -307,7 +307,7 @@ - .. object:: gc__start(int generation) - - Fires when the Python interpreter starts a garbage collection cycle. -- ``arg0`` is the generation to scan, like :func:`gc.collect()`. -+ ``arg0`` is the generation to scan, like :func:`gc.collect`. - - .. object:: gc__done(long collected) - -diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst -index e35855deedb..a636e06bda8 100644 ---- a/Doc/howto/isolating-extensions.rst -+++ b/Doc/howto/isolating-extensions.rst -@@ -339,7 +339,7 @@ - - Define a traverse function using ``Py_tp_traverse``, which - visits the type (e.g. using ``Py_VISIT(Py_TYPE(self))``). - --Please refer to the the documentation of -+Please refer to the documentation of - :c:macro:`Py_TPFLAGS_HAVE_GC` and :c:member:`~PyTypeObject.tp_traverse` - for additional considerations. - -diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst -index 06a1ec18b0a..c7b463d1303 100644 ---- a/Doc/howto/logging-cookbook.rst -+++ b/Doc/howto/logging-cookbook.rst -@@ -4022,7 +4022,7 @@ - which writes to ``sys.stderr`` makes multiple writes, each of which results in a - separate logged line (for example, the last three lines above). To get around - this problem, you need to buffer things and only output log lines when newlines --are seen. Let's use a slghtly better implementation of ``LoggerWriter``: -+are seen. Let's use a slightly better implementation of ``LoggerWriter``: - - .. code-block:: python - -diff --git a/Doc/howto/logging.rst b/Doc/howto/logging.rst -index 877cb24328c..5a392f94da4 100644 ---- a/Doc/howto/logging.rst -+++ b/Doc/howto/logging.rst -@@ -381,8 +381,52 @@ - The flow of log event information in loggers and handlers is illustrated in the - following diagram. - --.. image:: logging_flow.png -- :class: invert-in-dark-mode -+.. only:: not html -+ -+ .. image:: logging_flow.* -+ -+.. raw:: html -+ :file: logging_flow.svg -+ -+.. raw:: html -+ -+ - - Loggers - ^^^^^^^ ---- /dev/null -+++ b/Doc/howto/logging_flow.svg -@@ -0,0 +1,327 @@ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ Logger flow -+ -+ -+ -+ -+ Create -+ LogRecord -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ Logging call in user -+ code, e.g. -+ -+ -+ logger.info(...) -+ -+ -+ -+ -+ -+ -+ -+ -+ Stop -+ -+ -+ -+ -+ -+ Does a filter attached -+ to logger reject the -+ record? -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ Pass record to -+ handlers of -+ current logger -+ -+ -+ -+ -+ -+ Is propagate true for -+ current logger? -+ -+ -+ -+ -+ -+ Is there a parent -+ logger? -+ -+ -+ -+ -+ -+ Set current -+ logger to parent -+ -+ -+ -+ -+ -+ At least one handler -+ in hierarchy? -+ -+ -+ -+ -+ -+ Use -+ lastResort -+ handler -+ -+ -+ -+ -+ -+ Handler enabled for -+ level of record? -+ -+ -+ -+ -+ -+ Does a filter attached -+ to handler reject the -+ record? -+ -+ -+ -+ -+ -+ Stop -+ -+ -+ -+ -+ -+ Emit (includes formatting) -+ -+ -+ -+ Handler flow -+ -+ -+ -+ -+ Logger enabled for -+ level of call? -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ No -+ -+ -+ Yes -+ -+ -+ Yes -+ -+ -+ No -+ -+ -+ No -+ -+ -+ Yes -+ -+ -+ Yes -+ -+ -+ No -+ -+ -+ No -+ -+ -+ Yes -+ -+ -+ -+ -+ -+ No -+ -+ -+ -+ -+ -+ -+ -+ -+ Yes -+ -+ -+ No -+ -+ -+ -+ -+ -+ Yes -+ -+ -+ Record passed -+ to handler -+ -+ -+ -+ -+ -+ -+ -+ --- /dev/null +++ b/Doc/includes/wasm-ios-notavail.rst @@ -0,0 +1,8 @@ @@ -4137,526 +23,6 @@ index e680e1f9b43..c1b79d2a4a0 100644 - ``wasm32-emscripten`` and ``wasm32-wasi``. See + This module does not work or is not available on WebAssembly. See :ref:`wasm-availability` for more information. -diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst -index 6232e173d95..647ff9da04d 100644 ---- a/Doc/library/__main__.rst -+++ b/Doc/library/__main__.rst -@@ -251,9 +251,9 @@ - >>> asyncio.__main__.__name__ - 'asyncio.__main__' - --This won't work for ``__main__.py`` files in the root directory of a .zip file --though. Hence, for consistency, minimal ``__main__.py`` like the :mod:`venv` --one mentioned below are preferred. -+This won't work for ``__main__.py`` files in the root directory of a -+``.zip`` file though. Hence, for consistency, a minimal ``__main__.py`` -+without a ``__name__`` check is preferred. - - .. seealso:: - -diff --git a/Doc/library/array.rst b/Doc/library/array.rst -index beaa8cdadda..b4e656a7a83 100644 ---- a/Doc/library/array.rst -+++ b/Doc/library/array.rst -@@ -9,7 +9,7 @@ - -------------- - - This module defines an object type which can compactly represent an array of --basic values: characters, integers, floating point numbers. Arrays are sequence -+basic values: characters, integers, floating-point numbers. Arrays are sequence - types and behave very much like lists, except that the type of objects stored in - them is constrained. The type is specified at object creation time by using a - :dfn:`type code`, which is a single character. The following type codes are -@@ -253,7 +253,7 @@ - array with the same type and value using :func:`eval`, so long as the - :class:`~array.array` class has been imported using ``from array import array``. - Variables ``inf`` and ``nan`` must also be defined if it contains --corresponding floating point values. -+corresponding floating-point values. - Examples:: - - array('l') -diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst -index 1c0c808e273..46e5e8f63da 100644 ---- a/Doc/library/ast.rst -+++ b/Doc/library/ast.rst -@@ -881,11 +881,15 @@ - .. class:: AnnAssign(target, annotation, value, simple) - - An assignment with a type annotation. ``target`` is a single node and can -- be a :class:`Name`, a :class:`Attribute` or a :class:`Subscript`. -+ be a :class:`Name`, an :class:`Attribute` or a :class:`Subscript`. - ``annotation`` is the annotation, such as a :class:`Constant` or :class:`Name` -- node. ``value`` is a single optional node. ``simple`` is a boolean integer -- set to True for a :class:`Name` node in ``target`` that do not appear in -- between parenthesis and are hence pure names and not expressions. -+ node. ``value`` is a single optional node. -+ -+ ``simple`` is always either 0 (indicating a "complex" target) or 1 -+ (indicating a "simple" target). A "simple" target consists solely of a -+ :class:`Name` node that does not appear between parentheses; all other -+ targets are considered complex. Only simple targets appear in -+ the :attr:`__annotations__` dictionary of modules and classes. - - .. doctest:: - -@@ -2000,7 +2004,7 @@ - YieldFrom(value) - - A ``yield`` or ``yield from`` expression. Because these are expressions, they -- must be wrapped in a :class:`Expr` node if the value sent back is not used. -+ must be wrapped in an :class:`Expr` node if the value sent back is not used. - - .. doctest:: - -@@ -2167,7 +2171,7 @@ - If ``type_comments=True`` is given, the parser is modified to check - and return type comments as specified by :pep:`484` and :pep:`526`. - This is equivalent to adding :data:`ast.PyCF_TYPE_COMMENTS` to the -- flags passed to :func:`compile()`. This will report syntax errors -+ flags passed to :func:`compile`. This will report syntax errors - for misplaced type comments. Without this flag, type comments will - be ignored, and the ``type_comment`` field on selected AST nodes - will always be ``None``. In addition, the locations of ``# type: -diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst -index ba0ee1b6c2c..bdcea54f6a0 100644 ---- a/Doc/library/asyncio-eventloop.rst -+++ b/Doc/library/asyncio-eventloop.rst -@@ -126,7 +126,7 @@ - - Run the event loop until :meth:`stop` is called. - -- If :meth:`stop` is called before :meth:`run_forever()` is called, -+ If :meth:`stop` is called before :meth:`run_forever` is called, - the loop will poll the I/O selector once with a timeout of zero, - run all callbacks scheduled in response to I/O events (and - those that were already scheduled), and then exit. -@@ -165,7 +165,7 @@ - .. coroutinemethod:: loop.shutdown_asyncgens() - - Schedule all currently open :term:`asynchronous generator` objects to -- close with an :meth:`~agen.aclose()` call. After calling this method, -+ close with an :meth:`~agen.aclose` call. After calling this method, - the event loop will issue a warning if a new asynchronous generator - is iterated. This should be used to reliably finalize all scheduled - asynchronous generators. -@@ -1139,6 +1139,14 @@ - - Asynchronous version of :meth:`socket.getnameinfo`. - -+.. note:: -+ Both *getaddrinfo* and *getnameinfo* internally utilize their synchronous -+ versions through the loop's default thread pool executor. -+ When this executor is saturated, these methods may experience delays, -+ which higher-level networking libraries may report as increased timeouts. -+ To mitigate this, consider using a custom executor for other user tasks, -+ or setting a default executor with a larger number of workers. -+ - .. versionchanged:: 3.7 - Both *getaddrinfo* and *getnameinfo* methods were always documented - to return a coroutine, but prior to Python 3.7 they were, in fact, -@@ -1238,6 +1246,9 @@ - - The *executor* argument should be an :class:`concurrent.futures.Executor` - instance. The default executor is used if *executor* is ``None``. -+ The default executor can be set by :meth:`loop.set_default_executor`, -+ otherwise, a :class:`concurrent.futures.ThreadPoolExecutor` will be -+ lazy-initialized and used by :func:`run_in_executor` if needed. - - Example:: - -@@ -1375,7 +1386,7 @@ - - This method should not be overloaded in subclassed - event loops. For custom exception handling, use -- the :meth:`set_exception_handler()` method. -+ the :meth:`set_exception_handler` method. - - Enabling debug mode - ^^^^^^^^^^^^^^^^^^^ -@@ -1458,7 +1469,7 @@ - * *stdin* can be any of these: - - * a file-like object -- * an existing file descriptor (a positive integer), for example those created with :meth:`os.pipe()` -+ * an existing file descriptor (a positive integer), for example those created with :meth:`os.pipe` - * the :const:`subprocess.PIPE` constant (default) which will create a new - pipe and connect it, - * the value ``None`` which will make the subprocess inherit the file -diff --git a/Doc/library/asyncio-future.rst b/Doc/library/asyncio-future.rst -index 893ae5518f7..9dce0731411 100644 ---- a/Doc/library/asyncio-future.rst -+++ b/Doc/library/asyncio-future.rst -@@ -120,20 +120,20 @@ - a :exc:`CancelledError` exception. - - If the Future's result isn't yet available, this method raises -- a :exc:`InvalidStateError` exception. -+ an :exc:`InvalidStateError` exception. - - .. method:: set_result(result) - - Mark the Future as *done* and set its result. - -- Raises a :exc:`InvalidStateError` error if the Future is -+ Raises an :exc:`InvalidStateError` error if the Future is - already *done*. - - .. method:: set_exception(exception) - - Mark the Future as *done* and set an exception. - -- Raises a :exc:`InvalidStateError` error if the Future is -+ Raises an :exc:`InvalidStateError` error if the Future is - already *done*. - - .. method:: done() -diff --git a/Doc/library/asyncio-llapi-index.rst b/Doc/library/asyncio-llapi-index.rst -index 67136ba69ec..3e21054aa4f 100644 ---- a/Doc/library/asyncio-llapi-index.rst -+++ b/Doc/library/asyncio-llapi-index.rst -@@ -56,10 +56,10 @@ - * - :meth:`loop.close` - - Close the event loop. - -- * - :meth:`loop.is_running()` -+ * - :meth:`loop.is_running` - - Return ``True`` if the event loop is running. - -- * - :meth:`loop.is_closed()` -+ * - :meth:`loop.is_closed` - - Return ``True`` if the event loop is closed. - - * - ``await`` :meth:`loop.shutdown_asyncgens` -diff --git a/Doc/library/asyncio-queue.rst b/Doc/library/asyncio-queue.rst -index d86fbc21351..63afc411d96 100644 ---- a/Doc/library/asyncio-queue.rst -+++ b/Doc/library/asyncio-queue.rst -@@ -55,7 +55,7 @@ - Return ``True`` if there are :attr:`maxsize` items in the queue. - - If the queue was initialized with ``maxsize=0`` (the default), -- then :meth:`full()` never returns ``True``. -+ then :meth:`full` never returns ``True``. - - .. coroutinemethod:: get() - -diff --git a/Doc/library/asyncio-runner.rst b/Doc/library/asyncio-runner.rst -index b68b2570ef0..e2cff48ee41 100644 ---- a/Doc/library/asyncio-runner.rst -+++ b/Doc/library/asyncio-runner.rst -@@ -89,7 +89,7 @@ - current one. By default :func:`asyncio.new_event_loop` is used and set as - current event loop with :func:`asyncio.set_event_loop` if *loop_factory* is ``None``. - -- Basically, :func:`asyncio.run()` example can be rewritten with the runner usage:: -+ Basically, :func:`asyncio.run` example can be rewritten with the runner usage:: - - async def main(): - await asyncio.sleep(1) -diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst -index 6c046ebec96..48cd2f247ad 100644 ---- a/Doc/library/asyncio-task.rst -+++ b/Doc/library/asyncio-task.rst -@@ -1104,7 +1104,7 @@ - a :exc:`CancelledError` exception. - - If the Task's result isn't yet available, this method raises -- a :exc:`InvalidStateError` exception. -+ an :exc:`InvalidStateError` exception. - - .. method:: exception() - -diff --git a/Doc/library/asyncio.rst b/Doc/library/asyncio.rst -index 184f981c102..1fb575d77f3 100644 ---- a/Doc/library/asyncio.rst -+++ b/Doc/library/asyncio.rst -@@ -56,8 +56,12 @@ - * :ref:`bridge ` callback-based libraries and code - with async/await syntax. - -+.. include:: ../includes/wasm-notavail.rst -+ - .. _asyncio-cli: - -+.. rubric:: asyncio REPL -+ - You can experiment with an ``asyncio`` concurrent context in the REPL: - - .. code-block:: pycon -@@ -70,7 +74,10 @@ - >>> await asyncio.sleep(10, result='hello') - 'hello' - --.. include:: ../includes/wasm-notavail.rst -+.. audit-event:: cpython.run_stdin "" "" -+ -+.. versionchanged:: 3.12.5 (also 3.11.10, 3.10.15, 3.9.20, and 3.8.20) -+ Emits audit events. - - .. We use the "rubric" directive here to avoid creating - the "Reference" subsection in the TOC. -diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst -index ea27436f67f..0adbd305b46 100644 ---- a/Doc/library/collections.abc.rst -+++ b/Doc/library/collections.abc.rst -@@ -216,6 +216,9 @@ - - ABC for classes that provide the :meth:`~object.__call__` method. - -+ See :ref:`annotating-callables` for details on how to use -+ :class:`!Callable` in type annotations. -+ - .. class:: Iterable - - ABC for classes that provide the :meth:`~container.__iter__` method. -@@ -253,6 +256,9 @@ - :meth:`~generator.send`, - :meth:`~generator.throw` and :meth:`~generator.close` methods. - -+ See :ref:`annotating-generators-and-coroutines` -+ for details on using :class:`!Generator` in type annotations. -+ - .. versionadded:: 3.5 - - .. class:: Sequence -@@ -331,6 +337,11 @@ - Using ``isinstance(gencoro, Coroutine)`` for them will return ``False``. - Use :func:`inspect.isawaitable` to detect them. - -+ See :ref:`annotating-generators-and-coroutines` -+ for details on using :class:`!Coroutine` in type annotations. -+ The variance and order of type parameters correspond to those of -+ :class:`Generator`. -+ - .. versionadded:: 3.5 - - .. class:: AsyncIterable -@@ -352,6 +363,9 @@ - ABC for :term:`asynchronous generator` classes that implement the protocol - defined in :pep:`525` and :pep:`492`. - -+ See :ref:`annotating-generators-and-coroutines` -+ for details on using :class:`!AsyncGenerator` in type annotations. -+ - .. versionadded:: 3.6 - - .. class:: Buffer -diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst -index fedf1914145..fe9a35ecfb0 100644 ---- a/Doc/library/collections.rst -+++ b/Doc/library/collections.rst -@@ -99,7 +99,7 @@ - :func:`super` function. A reference to ``d.parents`` is equivalent to: - ``ChainMap(*d.maps[1:])``. - -- Note, the iteration order of a :class:`ChainMap()` is determined by -+ Note, the iteration order of a :class:`ChainMap` is determined by - scanning the mappings last to first:: - - >>> baseline = {'music': 'bach', 'art': 'rembrandt'} -diff --git a/Doc/library/colorsys.rst b/Doc/library/colorsys.rst -index 125d62b1740..ffebf4e40dd 100644 ---- a/Doc/library/colorsys.rst -+++ b/Doc/library/colorsys.rst -@@ -14,7 +14,7 @@ - between colors expressed in the RGB (Red Green Blue) color space used in - computer monitors and three other coordinate systems: YIQ, HLS (Hue Lightness - Saturation) and HSV (Hue Saturation Value). Coordinates in all of these color --spaces are floating point values. In the YIQ space, the Y coordinate is between -+spaces are floating-point values. In the YIQ space, the Y coordinate is between - 0 and 1, but the I and Q coordinates can be positive or negative. In all other - spaces, the coordinates are all between 0 and 1. - -diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst -index f2c6433408e..1cf40fa6424 100644 ---- a/Doc/library/compileall.rst -+++ b/Doc/library/compileall.rst -@@ -90,7 +90,7 @@ - .. option:: -j N - - Use *N* workers to compile the files within the given directory. -- If ``0`` is used, then the result of :func:`os.cpu_count()` -+ If ``0`` is used, then the result of :func:`os.cpu_count` - will be used. - - .. option:: --invalidation-mode [timestamp|checked-hash|unchecked-hash] -diff --git a/Doc/library/configparser.rst b/Doc/library/configparser.rst -index 560260e8a4b..5f04cbc42bf 100644 ---- a/Doc/library/configparser.rst -+++ b/Doc/library/configparser.rst -@@ -147,23 +147,28 @@ - It is possible to read several configurations into a single - :class:`ConfigParser`, where the most recently added configuration has the - highest priority. Any conflicting keys are taken from the more recent --configuration while the previously existing keys are retained. -+configuration while the previously existing keys are retained. The example -+below reads in an ``override.ini`` file, which will override any conflicting -+keys from the ``example.ini`` file. -+ -+.. code-block:: ini -+ -+ [DEFAULT] -+ ServerAliveInterval = -1 - - .. doctest:: - -- >>> another_config = configparser.ConfigParser() -- >>> another_config.read('example.ini') -- ['example.ini'] -- >>> another_config['topsecret.server.example']['Port'] -- '50022' -- >>> another_config.read_string("[topsecret.server.example]\nPort=48484") -- >>> another_config['topsecret.server.example']['Port'] -- '48484' -- >>> another_config.read_dict({"topsecret.server.example": {"Port": 21212}}) -- >>> another_config['topsecret.server.example']['Port'] -- '21212' -- >>> another_config['topsecret.server.example']['ForwardX11'] -- 'no' -+ >>> config_override = configparser.ConfigParser() -+ >>> config_override['DEFAULT'] = {'ServerAliveInterval': '-1'} -+ >>> with open('override.ini', 'w') as configfile: -+ ... config_override.write(configfile) -+ ... -+ >>> config_override = configparser.ConfigParser() -+ >>> config_override.read(['example.ini', 'override.ini']) -+ ['example.ini', 'override.ini'] -+ >>> print(config_override.get('DEFAULT', 'ServerAliveInterval')) -+ -1 -+ - - This behaviour is equivalent to a :meth:`ConfigParser.read` call with several - files passed to the *filenames* parameter. -@@ -955,9 +960,34 @@ - When *converters* is given, it should be a dictionary where each key - represents the name of a type converter and each value is a callable - implementing the conversion from string to the desired datatype. Every -- converter gets its own corresponding :meth:`!get*()` method on the parser -+ converter gets its own corresponding :meth:`!get*` method on the parser - object and section proxies. - -+ It is possible to read several configurations into a single -+ :class:`ConfigParser`, where the most recently added configuration has the -+ highest priority. Any conflicting keys are taken from the more recent -+ configuration while the previously existing keys are retained. The example -+ below reads in an ``override.ini`` file, which will override any conflicting -+ keys from the ``example.ini`` file. -+ -+ .. code-block:: ini -+ -+ [DEFAULT] -+ ServerAliveInterval = -1 -+ -+ .. doctest:: -+ -+ >>> config_override = configparser.ConfigParser() -+ >>> config_override['DEFAULT'] = {'ServerAliveInterval': '-1'} -+ >>> with open('override.ini', 'w') as configfile: -+ ... config_override.write(configfile) -+ ... -+ >>> config_override = configparser.ConfigParser() -+ >>> config_override.read(['example.ini', 'override.ini']) -+ ['example.ini', 'override.ini'] -+ >>> print(config_override.get('DEFAULT', 'ServerAliveInterval')) -+ -1 -+ - .. versionchanged:: 3.1 - The default *dict_type* is :class:`collections.OrderedDict`. - -@@ -970,7 +1000,7 @@ - The *converters* argument was added. - - .. versionchanged:: 3.7 -- The *defaults* argument is read with :meth:`read_dict()`, -+ The *defaults* argument is read with :meth:`read_dict`, - providing consistent behavior across the parser: non-string - keys and values are implicitly converted to strings. - -@@ -1123,7 +1153,7 @@ - .. method:: getfloat(section, option, *, raw=False, vars=None[, fallback]) - - A convenience method which coerces the *option* in the specified *section* -- to a floating point number. See :meth:`get` for explanation of *raw*, -+ to a floating-point number. See :meth:`get` for explanation of *raw*, - *vars* and *fallback*. - - -diff --git a/Doc/library/constants.rst b/Doc/library/constants.rst -index 93a7244f87d..3eceecc4e0a 100644 ---- a/Doc/library/constants.rst -+++ b/Doc/library/constants.rst -@@ -79,6 +79,8 @@ - :exc:`SyntaxError`), so they can be considered "true" constants. - - -+.. _site-consts: -+ - Constants added by the :mod:`site` module - ----------------------------------------- - -@@ -94,6 +96,13 @@ - (i.e. EOF) to exit", and when called, raise :exc:`SystemExit` with the - specified exit code. - -+.. data:: help -+ :noindex: -+ -+ Object that when printed, prints the message "Type help() for interactive -+ help, or help(object) for help about object.", and when called, -+ acts as described :func:`elsewhere `. -+ - .. data:: copyright - credits - -diff --git a/Doc/library/contextlib.rst b/Doc/library/contextlib.rst -index 27cf99446e5..f5b349441bc 100644 ---- a/Doc/library/contextlib.rst -+++ b/Doc/library/contextlib.rst -@@ -322,7 +322,7 @@ - - .. versionchanged:: 3.12 - ``suppress`` now supports suppressing exceptions raised as -- part of an :exc:`BaseExceptionGroup`. -+ part of a :exc:`BaseExceptionGroup`. - - .. function:: redirect_stdout(new_target) - -diff --git a/Doc/library/contextvars.rst b/Doc/library/contextvars.rst -index 8ae386b489f..b2261ea5127 100644 ---- a/Doc/library/contextvars.rst -+++ b/Doc/library/contextvars.rst -@@ -15,7 +15,7 @@ - manage the current context in asynchronous frameworks. - - Context managers that have state should use Context Variables --instead of :func:`threading.local()` to prevent their state from -+instead of :func:`threading.local` to prevent their state from - bleeding to other code unexpectedly, when used in concurrent code. - - See also :pep:`567` for additional details. -@@ -146,7 +146,7 @@ - - Every thread will have a different top-level :class:`~contextvars.Context` - object. This means that a :class:`ContextVar` object behaves in a similar -- fashion to :func:`threading.local()` when values are assigned in different -+ fashion to :func:`threading.local` when values are assigned in different - threads. - - Context implements the :class:`collections.abc.Mapping` interface. -diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst -index e01bd9277b1..130f9b9cab4 100644 ---- a/Doc/library/ctypes.rst -+++ b/Doc/library/ctypes.rst -@@ -107,7 +107,7 @@ - - Note that win32 system dlls like ``kernel32`` and ``user32`` often export ANSI - as well as UNICODE versions of a function. The UNICODE version is exported with --an ``W`` appended to the name, while the ANSI version is exported with an ``A`` -+a ``W`` appended to the name, while the ANSI version is exported with an ``A`` - appended to the name. The win32 ``GetModuleHandle`` function, which returns a - *module handle* for a given module name, has the following C prototype, and a - macro is used to expose one of them as ``GetModuleHandle`` depending on whether diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst index 2ebda3d3396..91ea6150fb1 100644 --- a/Doc/library/curses.rst @@ -4670,722 +36,36 @@ index 2ebda3d3396..91ea6150fb1 100644 .. note:: Whenever the documentation mentions a *character* it can be specified -diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst -index e4a9cd4ebcb..87b532fb4f8 100644 ---- a/Doc/library/dataclasses.rst -+++ b/Doc/library/dataclasses.rst -@@ -124,7 +124,7 @@ - - *unsafe_hash*: If ``False`` (the default), a :meth:`~object.__hash__` method - is generated according to how *eq* and *frozen* are set. - -- :meth:`!__hash__` is used by built-in :meth:`hash()`, and when objects are -+ :meth:`!__hash__` is used by built-in :meth:`hash`, and when objects are - added to hashed collections such as dictionaries and sets. Having a - :meth:`!__hash__` implies that instances of the class are immutable. - Mutability is a complicated property that depends on the programmer's -@@ -185,10 +185,21 @@ - - *slots*: If true (the default is ``False``), :attr:`~object.__slots__` attribute - will be generated and new class will be returned instead of the original one. - If :attr:`!__slots__` is already defined in the class, then :exc:`TypeError` -- is raised. Calling no-arg :func:`super` in dataclasses using ``slots=True`` will result in -- the following exception being raised: -- ``TypeError: super(type, obj): obj must be an instance or subtype of type``. -- The two-arg :func:`super` is a valid workaround. See :gh:`90562` for full details. -+ is raised. -+ -+ .. warning:: -+ Calling no-arg :func:`super` in dataclasses using ``slots=True`` -+ will result in the following exception being raised: -+ ``TypeError: super(type, obj): obj must be an instance or subtype of type``. -+ The two-arg :func:`super` is a valid workaround. -+ See :gh:`90562` for full details. -+ -+ .. warning:: -+ Passing parameters to a base class :meth:`~object.__init_subclass__` -+ when using ``slots=True`` will result in a :exc:`TypeError`. -+ Either use ``__init_subclass__`` with no parameters -+ or use default values as a workaround. -+ See :gh:`91126` for full details. - - .. versionadded:: 3.10 - -@@ -204,7 +215,8 @@ - - - *weakref_slot*: If true (the default is ``False``), add a slot - named "__weakref__", which is required to make an instance -- weakref-able. It is an error to specify ``weakref_slot=True`` -+ :func:`weakref-able `. -+ It is an error to specify ``weakref_slot=True`` - without also specifying ``slots=True``. - - .. versionadded:: 3.11 -diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst -index 4ba9d6df890..e3ddd8ca82e 100644 ---- a/Doc/library/datetime.rst -+++ b/Doc/library/datetime.rst -@@ -48,7 +48,7 @@ - ----------------------- +diff --git a/Doc/library/dbm.rst b/Doc/library/dbm.rst +index 500e831908f..7f7d650bf5d 100644 +--- a/Doc/library/dbm.rst ++++ b/Doc/library/dbm.rst +@@ -14,6 +14,7 @@ + is a `third party interface `_ to + the Oracle Berkeley DB. - Date and time objects may be categorized as "aware" or "naive" depending on --whether or not they include timezone information. -+whether or not they include time zone information. ++.. include:: ../includes/wasm-ios-notavail.rst - With sufficient knowledge of applicable algorithmic and political time - adjustments, such as time zone and daylight saving time information, -@@ -58,7 +58,7 @@ + .. exception:: error - A **naive** object does not contain enough information to unambiguously locate - itself relative to other date/time objects. Whether a naive object represents --Coordinated Universal Time (UTC), local time, or time in some other timezone is -+Coordinated Universal Time (UTC), local time, or time in some other time zone is - purely up to the program, just like it is up to the program whether a - particular number represents metres, miles, or mass. Naive objects are easy to - understand and to work with, at the cost of ignoring some aspects of reality. -@@ -70,9 +70,9 @@ - time, the time zone name, and whether daylight saving time is in effect. +@@ -398,4 +399,3 @@ + .. method:: dumbdbm.close() - Only one concrete :class:`tzinfo` class, the :class:`timezone` class, is --supplied by the :mod:`!datetime` module. The :class:`timezone` class can --represent simple timezones with fixed offsets from UTC, such as UTC itself or --North American EST and EDT timezones. Supporting timezones at deeper levels of -+supplied by the :mod:`!datetime` module. The :class:`!timezone` class can -+represent simple time zones with fixed offsets from UTC, such as UTC itself or -+North American EST and EDT time zones. Supporting time zones at deeper levels of - detail is up to the application. The rules for time adjustment across the - world are more political than rational, change frequently, and there is no - standard suitable for every application aside from UTC. -@@ -95,7 +95,7 @@ - - .. attribute:: UTC - -- Alias for the UTC timezone singleton :attr:`datetime.timezone.utc`. -+ Alias for the UTC time zone singleton :attr:`datetime.timezone.utc`. - - .. versionadded:: 3.11 - -@@ -283,17 +283,23 @@ - Note that, because of normalization, ``timedelta.max`` is greater than ``-timedelta.min``. - ``-timedelta.max`` is not representable as a :class:`timedelta` object. - -+ - Instance attributes (read-only): - --+------------------+--------------------------------------------+ --| Attribute | Value | --+==================+============================================+ --| ``days`` | Between -999999999 and 999999999 inclusive | --+------------------+--------------------------------------------+ --| ``seconds`` | Between 0 and 86399 inclusive | --+------------------+--------------------------------------------+ --| ``microseconds`` | Between 0 and 999999 inclusive | --+------------------+--------------------------------------------+ -+.. attribute:: timedelta.days -+ -+ Between -999,999,999 and 999,999,999 inclusive. -+ -+ -+.. attribute:: timedelta.seconds -+ -+ Between 0 and 86,399 inclusive. -+ -+ -+.. attribute:: timedelta.microseconds -+ -+ Between 0 and 999,999 inclusive. -+ - - Supported operations: - -@@ -850,7 +856,7 @@ - - .. classmethod:: datetime.today() - -- Return the current local datetime, with :attr:`.tzinfo` ``None``. -+ Return the current local date and time, with :attr:`.tzinfo` ``None``. - - Equivalent to:: - -@@ -1034,7 +1040,7 @@ - .. versionadded:: 3.7 - .. versionchanged:: 3.11 - Previously, this method only supported formats that could be emitted by -- :meth:`date.isoformat()` or :meth:`datetime.isoformat()`. -+ :meth:`date.isoformat` or :meth:`datetime.isoformat`. - - - .. classmethod:: datetime.fromisocalendar(year, week, day) -@@ -1051,7 +1057,7 @@ - Return a :class:`.datetime` corresponding to *date_string*, parsed according to - *format*. - -- If *format* does not contain microseconds or timezone information, this is equivalent to:: -+ If *format* does not contain microseconds or time zone information, this is equivalent to:: - - datetime(*(time.strptime(date_string, format)[0:6])) - -@@ -1267,22 +1273,22 @@ - - If provided, *tz* must be an instance of a :class:`tzinfo` subclass, and its - :meth:`utcoffset` and :meth:`dst` methods must not return ``None``. If *self* -- is naive, it is presumed to represent time in the system timezone. -+ is naive, it is presumed to represent time in the system time zone. - - If called without arguments (or with ``tz=None``) the system local -- timezone is assumed for the target timezone. The ``.tzinfo`` attribute of the converted -+ time zone is assumed for the target time zone. The ``.tzinfo`` attribute of the converted - datetime instance will be set to an instance of :class:`timezone` - with the zone name and offset obtained from the OS. - - If ``self.tzinfo`` is *tz*, ``self.astimezone(tz)`` is equal to *self*: no - adjustment of date or time data is performed. Else the result is local -- time in the timezone *tz*, representing the same UTC time as *self*: after -+ time in the time zone *tz*, representing the same UTC time as *self*: after - ``astz = dt.astimezone(tz)``, ``astz - astz.utcoffset()`` will have - the same date and time data as ``dt - dt.utcoffset()``. - -- If you merely want to attach a time zone object *tz* to a datetime *dt* without -+ If you merely want to attach a :class:`timezone` object *tz* to a datetime *dt* without - adjustment of date and time data, use ``dt.replace(tzinfo=tz)``. If you -- merely want to remove the time zone object from an aware datetime *dt* without -+ merely want to remove the :class:`!timezone` object from an aware datetime *dt* without - conversion of date and time data, use ``dt.replace(tzinfo=None)``. - - Note that the default :meth:`tzinfo.fromutc` method can be overridden in a -@@ -1292,7 +1298,7 @@ - def astimezone(self, tz): - if self.tzinfo is tz: - return self -- # Convert self to UTC, and attach the new time zone object. -+ # Convert self to UTC, and attach the new timezone object. - utc = (self - self.utcoffset()).replace(tzinfo=tz) - # Convert from UTC to tz's local time. - return tz.fromutc(utc) -@@ -1406,7 +1412,7 @@ - - There is no method to obtain the POSIX timestamp directly from a - naive :class:`.datetime` instance representing UTC time. If your -- application uses this convention and your system timezone is not -+ application uses this convention and your system time zone is not - set to UTC, you can obtain the POSIX timestamp by supplying - ``tzinfo=timezone.utc``:: - -@@ -1817,7 +1823,7 @@ - .. versionadded:: 3.7 - .. versionchanged:: 3.11 - Previously, this method only supported formats that could be emitted by -- :meth:`time.isoformat()`. -+ :meth:`time.isoformat`. - - - Instance methods: -@@ -1974,7 +1980,7 @@ - supply implementations of the standard :class:`tzinfo` methods needed by the - :class:`.datetime` methods you use. The :mod:`!datetime` module provides - :class:`timezone`, a simple concrete subclass of :class:`tzinfo` which can -- represent timezones with fixed offset from UTC such as UTC itself or North -+ represent time zones with fixed offset from UTC such as UTC itself or North - American EST and EDT. - - Special requirement for pickling: A :class:`tzinfo` subclass must have an -@@ -2099,14 +2105,14 @@ - method, ``dt.tzinfo`` is the same object as *self*. :class:`tzinfo` methods can - rely on this, unless user code calls :class:`tzinfo` methods directly. The - intent is that the :class:`tzinfo` methods interpret *dt* as being in local --time, and not need worry about objects in other timezones. -+time, and not need worry about objects in other time zones. - - There is one more :class:`tzinfo` method that a subclass may wish to override: - - - .. method:: tzinfo.fromutc(dt) - -- This is called from the default :class:`datetime.astimezone()` -+ This is called from the default :meth:`datetime.astimezone` - implementation. When called from that, ``dt.tzinfo`` is *self*, and *dt*'s - date and time data are to be viewed as expressing a UTC time. The purpose - of :meth:`fromutc` is to adjust the date and time data, returning an -@@ -2216,12 +2222,12 @@ - :mod:`zoneinfo` - The :mod:`!datetime` module has a basic :class:`timezone` class (for - handling arbitrary fixed offsets from UTC) and its :attr:`timezone.utc` -- attribute (a UTC timezone instance). -+ attribute (a UTC :class:`!timezone` instance). - -- ``zoneinfo`` brings the *IANA timezone database* (also known as the Olson -+ ``zoneinfo`` brings the *IANA time zone database* (also known as the Olson - database) to Python, and its usage is recommended. - -- `IANA timezone database `_ -+ `IANA time zone database `_ - The Time Zone Database (often called tz, tzdata or zoneinfo) contains code - and data that represent the history of local time for many representative - locations around the globe. It is updated periodically to reflect changes -@@ -2235,10 +2241,10 @@ - ------------------------- - - The :class:`timezone` class is a subclass of :class:`tzinfo`, each --instance of which represents a timezone defined by a fixed offset from -+instance of which represents a time zone defined by a fixed offset from - UTC. - --Objects of this class cannot be used to represent timezone information in the -+Objects of this class cannot be used to represent time zone information in the - locations where different offsets are used in different days of the year or - where historical changes have been made to civil time. - -@@ -2299,7 +2305,7 @@ - - .. attribute:: timezone.utc - -- The UTC timezone, ``timezone(timedelta(0))``. -+ The UTC time zone, ``timezone(timedelta(0))``. - - - .. index:: -@@ -2508,7 +2514,7 @@ - - datetime(*(time.strptime(date_string, format)[0:6])) - --except when the format includes sub-second components or timezone offset -+except when the format includes sub-second components or time zone offset - information, which are supported in ``datetime.strptime`` but are discarded by - ``time.strptime``. - -diff --git a/Doc/library/dbm.rst b/Doc/library/dbm.rst -index 500e831908f..7f7d650bf5d 100644 ---- a/Doc/library/dbm.rst -+++ b/Doc/library/dbm.rst -@@ -14,6 +14,7 @@ - is a `third party interface `_ to - the Oracle Berkeley DB. - -+.. include:: ../includes/wasm-ios-notavail.rst - - .. exception:: error - -@@ -398,4 +399,3 @@ - .. method:: dumbdbm.close() - - Close the database. -- -diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst -index 9aa4254ab80..c5544f6216f 100644 ---- a/Doc/library/decimal.rst -+++ b/Doc/library/decimal.rst -@@ -1,4 +1,4 @@ --:mod:`!decimal` --- Decimal fixed point and floating point arithmetic -+:mod:`!decimal` --- Decimal fixed-point and floating-point arithmetic - ===================================================================== - - .. module:: decimal -@@ -31,7 +31,7 @@ - -------------- - - The :mod:`decimal` module provides support for fast correctly rounded --decimal floating point arithmetic. It offers several advantages over the -+decimal floating-point arithmetic. It offers several advantages over the - :class:`float` datatype: - - * Decimal "is based on a floating-point model which was designed with people -@@ -207,7 +207,7 @@ - .. versionchanged:: 3.3 - - Decimals interact well with much of the rest of Python. Here is a small decimal --floating point flying circus: -+floating-point flying circus: - - .. doctest:: - :options: +NORMALIZE_WHITESPACE -@@ -373,7 +373,7 @@ - digits, and an integer exponent. For example, ``Decimal((0, (1, 4, 1, 4), -3))`` - returns ``Decimal('1.414')``. - -- If *value* is a :class:`float`, the binary floating point value is losslessly -+ If *value* is a :class:`float`, the binary floating-point value is losslessly - converted to its exact decimal equivalent. This conversion can often require - 53 or more digits of precision. For example, ``Decimal(float('1.1'))`` - converts to -@@ -403,7 +403,7 @@ - Underscores are allowed for grouping, as with integral and floating-point - literals in code. - -- Decimal floating point objects share many properties with the other built-in -+ Decimal floating-point objects share many properties with the other built-in - numeric types such as :class:`float` and :class:`int`. All of the usual math - operations and special methods apply. Likewise, decimal objects can be - copied, pickled, printed, used as dictionary keys, used as set elements, -@@ -445,7 +445,7 @@ - Mixed-type comparisons between :class:`Decimal` instances and other - numeric types are now fully supported. - -- In addition to the standard numeric properties, decimal floating point -+ In addition to the standard numeric properties, decimal floating-point - objects also have a number of specialized methods: - - -@@ -897,6 +897,48 @@ - :const:`Rounded`. If given, applies *rounding*; otherwise, uses the - rounding method in either the supplied *context* or the current context. - -+ Decimal numbers can be rounded using the :func:`.round` function: -+ -+ .. describe:: round(number) -+ .. describe:: round(number, ndigits) -+ -+ If *ndigits* is not given or ``None``, -+ returns the nearest :class:`int` to *number*, -+ rounding ties to even, and ignoring the rounding mode of the -+ :class:`Decimal` context. Raises :exc:`OverflowError` if *number* is an -+ infinity or :exc:`ValueError` if it is a (quiet or signaling) NaN. -+ -+ If *ndigits* is an :class:`int`, the context's rounding mode is respected -+ and a :class:`Decimal` representing *number* rounded to the nearest -+ multiple of ``Decimal('1E-ndigits')`` is returned; in this case, -+ ``round(number, ndigits)`` is equivalent to -+ ``self.quantize(Decimal('1E-ndigits'))``. Returns ``Decimal('NaN')`` if -+ *number* is a quiet NaN. Raises :class:`InvalidOperation` if *number* -+ is an infinity, a signaling NaN, or if the length of the coefficient after -+ the quantize operation would be greater than the current context's -+ precision. In other words, for the non-corner cases: -+ -+ * if *ndigits* is positive, return *number* rounded to *ndigits* decimal -+ places; -+ * if *ndigits* is zero, return *number* rounded to the nearest integer; -+ * if *ndigits* is negative, return *number* rounded to the nearest -+ multiple of ``10**abs(ndigits)``. -+ -+ For example:: -+ -+ >>> from decimal import Decimal, getcontext, ROUND_DOWN -+ >>> getcontext().rounding = ROUND_DOWN -+ >>> round(Decimal('3.75')) # context rounding ignored -+ 4 -+ >>> round(Decimal('3.5')) # round-ties-to-even -+ 4 -+ >>> round(Decimal('3.75'), 0) # uses the context rounding -+ Decimal('3') -+ >>> round(Decimal('3.75'), 1) -+ Decimal('3.7') -+ >>> round(Decimal('3.75'), -1) -+ Decimal('0E+1') -+ - - .. _logical_operands_label: - -@@ -1699,7 +1741,7 @@ - - .. _decimal-notes: - --Floating Point Notes -+Floating-Point Notes - -------------------- - - -@@ -1712,7 +1754,7 @@ - - The effects of round-off error can be amplified by the addition or subtraction - of nearly offsetting quantities resulting in loss of significance. Knuth --provides two instructive examples where rounded floating point arithmetic with -+provides two instructive examples where rounded floating-point arithmetic with - insufficient precision causes the breakdown of the associative and distributive - properties of addition: - -@@ -1802,7 +1844,7 @@ - In addition to the two signed zeros which are distinct yet equal, there are - various representations of zero with differing precisions yet equivalent in - value. This takes a bit of getting used to. For an eye accustomed to --normalized floating point representations, it is not immediately obvious that -+normalized floating-point representations, it is not immediately obvious that - the following calculation returns a value equal to zero: - - >>> 1 / Decimal('Infinity') -@@ -2129,7 +2171,7 @@ - - Q. Is there a way to convert a regular float to a :class:`Decimal`? - --A. Yes, any binary floating point number can be exactly expressed as a -+A. Yes, any binary floating-point number can be exactly expressed as a - Decimal though an exact conversion may take more precision than intuition would - suggest: - -@@ -2183,7 +2225,7 @@ - A. Yes. In the CPython and PyPy3 implementations, the C/CFFI versions of - the decimal module integrate the high speed `libmpdec - `_ library for --arbitrary precision correctly rounded decimal floating point arithmetic [#]_. -+arbitrary precision correctly rounded decimal floating-point arithmetic [#]_. - ``libmpdec`` uses `Karatsuba multiplication - `_ - for medium-sized numbers and the `Number Theoretic Transform -diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst -index e3920587099..82b4aa28857 100644 ---- a/Doc/library/dis.rst -+++ b/Doc/library/dis.rst -@@ -995,11 +995,15 @@ - .. opcode:: BUILD_TUPLE (count) - - Creates a tuple consuming *count* items from the stack, and pushes the -- resulting tuple onto the stack.:: -+ resulting tuple onto the stack:: - -- assert count > 0 -- STACK, values = STACK[:-count], STACK[-count:] -- STACK.append(tuple(values)) -+ if count == 0: -+ value = () -+ else: -+ value = tuple(STACK[-count:]) -+ STACK = STACK[:-count] -+ -+ STACK.append(value) - - - .. opcode:: BUILD_LIST (count) -@@ -1128,7 +1132,10 @@ - .. opcode:: COMPARE_OP (opname) - - Performs a Boolean operation. The operation name can be found in -- ``cmp_op[opname]``. -+ ``cmp_op[opname >> 4]``. -+ -+ .. versionchanged:: 3.12 -+ The cmp_op index is now stored in the four-highest bits of oparg instead of the four-lowest bits of oparg. - - - .. opcode:: IS_OP (invert) -@@ -1455,7 +1462,7 @@ - - end = STACK.pop() - start = STACK.pop() -- STACK.append(slice(start, stop)) -+ STACK.append(slice(start, end)) - - if it is 3, implements:: - -@@ -1592,7 +1599,7 @@ - | ``INTRINSIC_STOPITERATION_ERROR`` | Extracts the return value from a | - | | ``StopIteration`` exception. | - +-----------------------------------+-----------------------------------+ -- | ``INTRINSIC_ASYNC_GEN_WRAP`` | Wraps an aync generator value | -+ | ``INTRINSIC_ASYNC_GEN_WRAP`` | Wraps an async generator value | - +-----------------------------------+-----------------------------------+ - | ``INTRINSIC_UNARY_POSITIVE`` | Performs the unary ``+`` | - | | operation | -diff --git a/Doc/library/email.compat32-message.rst b/Doc/library/email.compat32-message.rst -index c4c322a82e1..4285c436e8d 100644 ---- a/Doc/library/email.compat32-message.rst -+++ b/Doc/library/email.compat32-message.rst -@@ -7,6 +7,7 @@ - :synopsis: The base class representing email messages in a fashion - backward compatible with Python 3.2 - :noindex: -+ :no-index: - - - The :class:`Message` class is very similar to the -@@ -104,7 +105,7 @@ - - .. method:: __str__() - -- Equivalent to :meth:`.as_string()`. Allows ``str(msg)`` to produce a -+ Equivalent to :meth:`.as_string`. Allows ``str(msg)`` to produce a - string containing the formatted message. - - -@@ -142,7 +143,7 @@ - - .. method:: __bytes__() - -- Equivalent to :meth:`.as_bytes()`. Allows ``bytes(msg)`` to produce a -+ Equivalent to :meth:`.as_bytes`. Allows ``bytes(msg)`` to produce a - bytes object containing the formatted message. - - .. versionadded:: 3.4 -diff --git a/Doc/library/email.errors.rst b/Doc/library/email.errors.rst -index 33ab4265116..f8f43d82a3d 100644 ---- a/Doc/library/email.errors.rst -+++ b/Doc/library/email.errors.rst -@@ -58,6 +58,13 @@ - :class:`~email.mime.nonmultipart.MIMENonMultipart` (e.g. - :class:`~email.mime.image.MIMEImage`). - -+ -+.. exception:: HeaderWriteError() -+ -+ Raised when an error occurs when the :mod:`~email.generator` outputs -+ headers. -+ -+ - .. exception:: MessageDefect() - - This is the base class for all defects found when parsing email messages. -diff --git a/Doc/library/email.header.rst b/Doc/library/email.header.rst -index 6e230d5faf1..219fad0d2f6 100644 ---- a/Doc/library/email.header.rst -+++ b/Doc/library/email.header.rst -@@ -77,7 +77,7 @@ - The maximum line length can be specified explicitly via *maxlinelen*. For - splitting the first line to a shorter value (to account for the field header - which isn't included in *s*, e.g. :mailheader:`Subject`) pass in the name of the -- field in *header_name*. The default *maxlinelen* is 76, and the default value -+ field in *header_name*. The default *maxlinelen* is 78, and the default value - for *header_name* is ``None``, meaning it is not taken into account for the - first line of a long, split header. - -diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst -index e9cce1af186..71d6e321f38 100644 ---- a/Doc/library/email.message.rst -+++ b/Doc/library/email.message.rst -@@ -124,7 +124,7 @@ - - .. method:: __bytes__() - -- Equivalent to :meth:`.as_bytes()`. Allows ``bytes(msg)`` to produce a -+ Equivalent to :meth:`.as_bytes`. Allows ``bytes(msg)`` to produce a - bytes object containing the serialized message. - - -diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst -index 83feedf7283..51d65dc5ba9 100644 ---- a/Doc/library/email.policy.rst -+++ b/Doc/library/email.policy.rst -@@ -229,6 +229,24 @@ - - .. versionadded:: 3.6 - -+ -+ .. attribute:: verify_generated_headers -+ -+ If ``True`` (the default), the generator will raise -+ :exc:`~email.errors.HeaderWriteError` instead of writing a header -+ that is improperly folded or delimited, such that it would -+ be parsed as multiple headers or joined with adjacent data. -+ Such headers can be generated by custom header classes or bugs -+ in the ``email`` module. -+ -+ As it's a security feature, this defaults to ``True`` even in the -+ :class:`~email.policy.Compat32` policy. -+ For backwards compatible, but unsafe, behavior, it must be set to -+ ``False`` explicitly. -+ -+ .. versionadded:: 3.12.5 -+ -+ - The following :class:`Policy` method is intended to be called by code using - the email library to create policy instances with custom settings: - -diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst -index 092bfa81462..1cb744b545d 100644 ---- a/Doc/library/email.utils.rst -+++ b/Doc/library/email.utils.rst -@@ -58,13 +58,18 @@ - begins with angle brackets, they are stripped off. - - --.. function:: parseaddr(address) -+.. function:: parseaddr(address, *, strict=True) - - Parse address -- which should be the value of some address-containing field such - as :mailheader:`To` or :mailheader:`Cc` -- into its constituent *realname* and - *email address* parts. Returns a tuple of that information, unless the parse - fails, in which case a 2-tuple of ``('', '')`` is returned. - -+ If *strict* is true, use a strict parser which rejects malformed inputs. -+ -+ .. versionchanged:: 3.12.6 -+ Add *strict* optional parameter and reject malformed inputs by default. -+ - - .. function:: formataddr(pair, charset='utf-8') - -@@ -82,12 +87,15 @@ - Added the *charset* option. - - --.. function:: getaddresses(fieldvalues) -+.. function:: getaddresses(fieldvalues, *, strict=True) - - This method returns a list of 2-tuples of the form returned by ``parseaddr()``. - *fieldvalues* is a sequence of header field values as might be returned by -- :meth:`Message.get_all `. Here's a simple -- example that gets all the recipients of a message:: -+ :meth:`Message.get_all `. -+ -+ If *strict* is true, use a strict parser which rejects malformed inputs. -+ -+ Here's a simple example that gets all the recipients of a message:: - - from email.utils import getaddresses - -@@ -97,6 +105,9 @@ - resent_ccs = msg.get_all('resent-cc', []) - all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs) - -+ .. versionchanged:: 3.12.6 -+ Add *strict* optional parameter and reject malformed inputs by default. -+ - - .. function:: parsedate(date) - -@@ -148,7 +159,7 @@ - - Fri, 09 Nov 2001 01:08:47 -0000 - -- Optional *timeval* if given is a floating point time value as accepted by -+ Optional *timeval* if given is a floating-point time value as accepted by - :func:`time.gmtime` and :func:`time.localtime`, otherwise the current time is - used. - -diff --git a/Doc/library/ensurepip.rst b/Doc/library/ensurepip.rst -index 3726028492a..518a2940edc 100644 ---- a/Doc/library/ensurepip.rst -+++ b/Doc/library/ensurepip.rst -@@ -38,7 +38,7 @@ - :pep:`453`: Explicit bootstrapping of pip in Python installations - The original rationale and specification for this module. + Close the database. +- +diff --git a/Doc/library/ensurepip.rst b/Doc/library/ensurepip.rst +index 3726028492a..518a2940edc 100644 +--- a/Doc/library/ensurepip.rst ++++ b/Doc/library/ensurepip.rst +@@ -38,7 +38,7 @@ + :pep:`453`: Explicit bootstrapping of pip in Python installations + The original rationale and specification for this module. -.. include:: ../includes/wasm-notavail.rst +.. include:: ../includes/wasm-ios-notavail.rst Command line interface ---------------------- -diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst -index 10acff619f9..6e2872b9c70 100644 ---- a/Doc/library/enum.rst -+++ b/Doc/library/enum.rst -@@ -517,7 +517,7 @@ - - ``Flag`` is the same as :class:`Enum`, but its members support the bitwise - operators ``&`` (*AND*), ``|`` (*OR*), ``^`` (*XOR*), and ``~`` (*INVERT*); -- the results of those operators are members of the enumeration. -+ the results of those operations are (aliases of) members of the enumeration. - - .. method:: __contains__(self, value) - -@@ -560,6 +560,8 @@ - >>> len(white) - 3 - -+ .. versionadded:: 3.11 -+ - .. method:: __bool__(self): - - Returns *True* if any members in flag, *False* otherwise:: -diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst -index de46518e673..537547f6c9c 100644 ---- a/Doc/library/exceptions.rst -+++ b/Doc/library/exceptions.rst -@@ -412,8 +412,8 @@ - represented. This cannot occur for integers (which would rather raise - :exc:`MemoryError` than give up). However, for historical reasons, - OverflowError is sometimes raised for integers that are outside a required -- range. Because of the lack of standardization of floating point exception -- handling in C, most floating point operations are not checked. -+ range. Because of the lack of standardization of floating-point exception -+ handling in C, most floating-point operations are not checked. - - - .. exception:: RecursionError diff --git a/Doc/library/fcntl.rst b/Doc/library/fcntl.rst index d23a105cd5b..1faef54c116 100644 --- a/Doc/library/fcntl.rst @@ -5399,190 +79,6 @@ index d23a105cd5b..1faef54c116 100644 All functions in this module take a file descriptor *fd* as their first argument. This can be an integer file descriptor, such as returned by -diff --git a/Doc/library/fileinput.rst b/Doc/library/fileinput.rst -index 94a4139f64c..8f32b11e565 100644 ---- a/Doc/library/fileinput.rst -+++ b/Doc/library/fileinput.rst -@@ -47,7 +47,7 @@ - a file may not have one. - - You can control how files are opened by providing an opening hook via the --*openhook* parameter to :func:`fileinput.input` or :class:`FileInput()`. The -+*openhook* parameter to :func:`fileinput.input` or :func:`FileInput`. The - hook must be a function that takes two arguments, *filename* and *mode*, and - returns an accordingly opened file-like object. If *encoding* and/or *errors* - are specified, they will be passed to the hook as additional keyword arguments. -diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst -index 42569ec8e65..11591cb348d 100644 ---- a/Doc/library/fractions.rst -+++ b/Doc/library/fractions.rst -@@ -31,7 +31,7 @@ - :class:`Fraction` instance with the same value. The next two versions accept - either a :class:`float` or a :class:`decimal.Decimal` instance, and return a - :class:`Fraction` instance with exactly the same value. Note that due to the -- usual issues with binary floating-point (see :ref:`tut-fp-issues`), the -+ usual issues with binary floating point (see :ref:`tut-fp-issues`), the - argument to ``Fraction(1.1)`` is not exactly equal to 11/10, and so - ``Fraction(1.1)`` does *not* return ``Fraction(11, 10)`` as one might expect. - (But see the documentation for the :meth:`limit_denominator` method below.) -@@ -87,7 +87,7 @@ - - .. versionchanged:: 3.9 - The :func:`math.gcd` function is now used to normalize the *numerator* -- and *denominator*. :func:`math.gcd` always return a :class:`int` type. -+ and *denominator*. :func:`math.gcd` always returns an :class:`int` type. - Previously, the GCD type depended on *numerator* and *denominator*. - - .. versionchanged:: 3.11 -diff --git a/Doc/library/ftplib.rst b/Doc/library/ftplib.rst -index 8c39dc00f5d..bb153220672 100644 ---- a/Doc/library/ftplib.rst -+++ b/Doc/library/ftplib.rst -@@ -243,7 +243,7 @@ - Retrieve a file in binary transfer mode. - - :param str cmd: -- An appropriate ``STOR`` command: :samp:`"STOR {filename}"`. -+ An appropriate ``RETR`` command: :samp:`"RETR {filename}"`. - - :param callback: - A single parameter callable that is called -diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst -index 6901c021d7b..51b6a2f2941 100644 ---- a/Doc/library/functions.rst -+++ b/Doc/library/functions.rst -@@ -57,7 +57,7 @@ - .. function:: abs(x) - - Return the absolute value of a number. The argument may be an -- integer, a floating point number, or an object implementing -+ integer, a floating-point number, or an object implementing - :meth:`~object.__abs__`. - If the argument is a complex number, its magnitude is returned. - -@@ -161,7 +161,7 @@ - This function drops you into the debugger at the call site. Specifically, - it calls :func:`sys.breakpointhook`, passing ``args`` and ``kws`` straight - through. By default, ``sys.breakpointhook()`` calls -- :func:`pdb.set_trace()` expecting no arguments. In this case, it is -+ :func:`pdb.set_trace` expecting no arguments. In this case, it is - purely a convenience function so you don't have to explicitly import - :mod:`pdb` or type as much code to enter the debugger. However, - :func:`sys.breakpointhook` can be set to some other function and -@@ -538,7 +538,7 @@ - Take two (non-complex) numbers as arguments and return a pair of numbers - consisting of their quotient and remainder when using integer division. With - mixed operand types, the rules for binary arithmetic operators apply. For -- integers, the result is the same as ``(a // b, a % b)``. For floating point -+ integers, the result is the same as ``(a // b, a % b)``. For floating-point - numbers the result is ``(q, a % b)``, where *q* is usually ``math.floor(a / - b)`` but may be 1 less than that. In any case ``q * b + a % b`` is very - close to *a*, if ``a % b`` is non-zero it has the same sign as *b*, and ``0 -@@ -714,7 +714,7 @@ - single: NaN - single: Infinity - -- Return a floating point number constructed from a number or a string. -+ Return a floating-point number constructed from a number or a string. - - Examples: - -@@ -755,8 +755,8 @@ - Case is not significant, so, for example, "inf", "Inf", "INFINITY", and - "iNfINity" are all acceptable spellings for positive infinity. - -- Otherwise, if the argument is an integer or a floating point number, a -- floating point number with the same value (within Python's floating point -+ Otherwise, if the argument is an integer or a floating-point number, a -+ floating-point number with the same value (within Python's floating-point - precision) is returned. If the argument is outside the range of a Python - float, an :exc:`OverflowError` will be raised. - -@@ -983,7 +983,7 @@ - ``int(x)`` returns ``x.__int__()``. If the argument defines :meth:`~object.__index__`, - it returns ``x.__index__()``. If the argument defines :meth:`~object.__trunc__`, - it returns ``x.__trunc__()``. -- For floating point numbers, this truncates towards zero. -+ For floating-point numbers, this truncates towards zero. - - If the argument is not a number or if *base* is given, then it must be a string, - :class:`bytes`, or :class:`bytearray` instance representing an integer -@@ -1267,7 +1267,7 @@ - (which on *some* Unix systems, means that *all* writes append to the end of - the file regardless of the current seek position). In text mode, if - *encoding* is not specified the encoding used is platform-dependent: -- :func:`locale.getencoding()` is called to get the current locale encoding. -+ :func:`locale.getencoding` is called to get the current locale encoding. - (For reading and writing raw bytes use binary mode and leave - *encoding* unspecified.) The available modes are: - -@@ -1440,7 +1440,7 @@ - (where :func:`open` is declared), :mod:`os`, :mod:`os.path`, :mod:`tempfile`, - and :mod:`shutil`. - -- .. audit-event:: open file,mode,flags open -+ .. audit-event:: open path,mode,flags open - - The ``mode`` and ``flags`` arguments may have been modified or inferred from - the original call. -@@ -1496,7 +1496,9 @@ - returns ``100``, but ``pow(10, -2)`` returns ``0.01``. For a negative base of - type :class:`int` or :class:`float` and a non-integral exponent, a complex - result is delivered. For example, ``pow(-9, 0.5)`` returns a value close -- to ``3j``. -+ to ``3j``. Whereas, for a negative base of type :class:`int` or :class:`float` -+ with an integral exponent, a float result is delivered. For example, -+ ``pow(-9, 2.0)`` returns ``81.0``. - - For :class:`int` operands *base* and *exp*, if *mod* is present, *mod* must - also be of integer type and *mod* must be nonzero. If *mod* is present and -@@ -1857,7 +1859,7 @@ - - For some use cases, there are good alternatives to :func:`sum`. - The preferred, fast way to concatenate a sequence of strings is by calling -- ``''.join(sequence)``. To add floating point values with extended precision, -+ ``''.join(sequence)``. To add floating-point values with extended precision, - see :func:`math.fsum`\. To concatenate a series of iterables, consider using - :func:`itertools.chain`. - -@@ -1883,10 +1885,10 @@ - ``D -> B -> C -> A -> object`` and the value of *type* is ``B``, - then :func:`super` searches ``C -> A -> object``. - -- The :attr:`~class.__mro__` attribute of the *object_or_type* lists the method -- resolution search order used by both :func:`getattr` and :func:`super`. The -- attribute is dynamic and can change whenever the inheritance hierarchy is -- updated. -+ The :attr:`~class.__mro__` attribute of the class corresponding to -+ *object_or_type* lists the method resolution search order used by both -+ :func:`getattr` and :func:`super`. The attribute is dynamic and can change -+ whenever the inheritance hierarchy is updated. - - If the second argument is omitted, the super object returned is unbound. If - the second argument is an object, ``isinstance(obj, type)`` must be true. If -diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst -index 655e05f4ce2..6b6e599842d 100644 ---- a/Doc/library/functools.rst -+++ b/Doc/library/functools.rst -@@ -34,7 +34,7 @@ - Returns the same as ``lru_cache(maxsize=None)``, creating a thin - wrapper around a dictionary lookup for the function arguments. Because it - never needs to evict old values, this is smaller and faster than -- :func:`lru_cache()` with a size limit. -+ :func:`lru_cache` with a size limit. - - For example:: - -diff --git a/Doc/library/getpass.rst b/Doc/library/getpass.rst -index b364b1fe031..5c0de1889e5 100644 ---- a/Doc/library/getpass.rst -+++ b/Doc/library/getpass.rst -@@ -49,4 +49,4 @@ - systems which support the :mod:`pwd` module, otherwise, an exception is - raised. - -- In general, this function should be preferred over :func:`os.getlogin()`. -+ In general, this function should be preferred over :func:`os.getlogin`. diff --git a/Doc/library/grp.rst b/Doc/library/grp.rst index 57a77d51a02..f1157e189a3 100644 --- a/Doc/library/grp.rst @@ -5596,69 +92,10 @@ index 57a77d51a02..f1157e189a3 100644 Group database entries are reported as a tuple-like object, whose attributes correspond to the members of the ``group`` structure (Attribute field below, see -diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst -index a2fff0f9fcb..30088666716 100644 ---- a/Doc/library/gzip.rst -+++ b/Doc/library/gzip.rst -@@ -194,7 +194,9 @@ - .. versionchanged:: 3.11 - Speed is improved by compressing all data at once instead of in a - streamed fashion. Calls with *mtime* set to ``0`` are delegated to -- :func:`zlib.compress` for better speed. -+ :func:`zlib.compress` for better speed. In this situation the -+ output may contain a gzip header "OS" byte value other than 255 -+ "unknown" as supplied by the underlying zlib implementation. - - .. function:: decompress(data) - -diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst -index fcc314a8d88..d2e43f03f94 100644 ---- a/Doc/library/http.server.rst -+++ b/Doc/library/http.server.rst -@@ -263,7 +263,7 @@ - - Adds a blank line - (indicating the end of the HTTP headers in the response) -- to the headers buffer and calls :meth:`flush_headers()`. -+ to the headers buffer and calls :meth:`flush_headers`. - - .. versionchanged:: 3.2 - The buffered headers are written to the output stream. -@@ -378,7 +378,7 @@ - - If the request was mapped to a file, it is opened. Any :exc:`OSError` - exception in opening the requested file is mapped to a ``404``, -- ``'File not found'`` error. If there was a ``'If-Modified-Since'`` -+ ``'File not found'`` error. If there was an ``'If-Modified-Since'`` - header in the request, and the file was not modified after this time, - a ``304``, ``'Not Modified'`` response is sent. Otherwise, the content - type is guessed by calling the :meth:`guess_type` method, which in turn -diff --git a/Doc/library/importlib.resources.abc.rst b/Doc/library/importlib.resources.abc.rst -index 5ea8044e1ec..54995ddbfbc 100644 ---- a/Doc/library/importlib.resources.abc.rst -+++ b/Doc/library/importlib.resources.abc.rst -@@ -22,7 +22,7 @@ - something like a data file that lives next to the ``__init__.py`` - file of the package. The purpose of this class is to help abstract - out the accessing of such data files so that it does not matter if -- the package and its data file(s) are stored in a e.g. zip file -+ the package and its data file(s) are stored e.g. in a zip file - versus on the file system. - - For any of methods of this class, a *resource* argument is diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst -index d92bb2f8e5c..c7faf8ba218 100644 +index b100e6c8e85..c7faf8ba218 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst -@@ -657,7 +657,7 @@ - something like a data file that lives next to the ``__init__.py`` - file of the package. The purpose of this class is to help abstract - out the accessing of such data files so that it does not matter if -- the package and its data file(s) are stored in a e.g. zip file -+ the package and its data file(s) are stored e.g. in a zip file - versus on the file system. - - For any of methods of this class, a *resource* argument is @@ -1241,6 +1241,69 @@ and how the module's :attr:`__file__` is populated. @@ -5729,507 +166,6 @@ index d92bb2f8e5c..c7faf8ba218 100644 :mod:`importlib.util` -- Utility code for importers --------------------------------------------------- -diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst -index 7d1aab8e299..dbf7d6868b0 100644 ---- a/Doc/library/inspect.rst -+++ b/Doc/library/inspect.rst -@@ -42,220 +42,233 @@ - .. this function name is too big to fit in the ascii-art table below - .. |coroutine-origin-link| replace:: :func:`sys.set_coroutine_origin_tracking_depth` - --+-----------+-------------------+---------------------------+ --| Type | Attribute | Description | --+===========+===================+===========================+ --| class | __doc__ | documentation string | --+-----------+-------------------+---------------------------+ --| | __name__ | name with which this | --| | | class was defined | --+-----------+-------------------+---------------------------+ --| | __qualname__ | qualified name | --+-----------+-------------------+---------------------------+ --| | __module__ | name of module in which | --| | | this class was defined | --+-----------+-------------------+---------------------------+ --| | __type_params__ | A tuple containing the | --| | | :ref:`type parameters | --| | | ` of | --| | | a generic class | --+-----------+-------------------+---------------------------+ --| method | __doc__ | documentation string | --+-----------+-------------------+---------------------------+ --| | __name__ | name with which this | --| | | method was defined | --+-----------+-------------------+---------------------------+ --| | __qualname__ | qualified name | --+-----------+-------------------+---------------------------+ --| | __func__ | function object | --| | | containing implementation | --| | | of method | --+-----------+-------------------+---------------------------+ --| | __self__ | instance to which this | --| | | method is bound, or | --| | | ``None`` | --+-----------+-------------------+---------------------------+ --| | __module__ | name of module in which | --| | | this method was defined | --+-----------+-------------------+---------------------------+ --| function | __doc__ | documentation string | --+-----------+-------------------+---------------------------+ --| | __name__ | name with which this | --| | | function was defined | --+-----------+-------------------+---------------------------+ --| | __qualname__ | qualified name | --+-----------+-------------------+---------------------------+ --| | __code__ | code object containing | --| | | compiled function | --| | | :term:`bytecode` | --+-----------+-------------------+---------------------------+ --| | __defaults__ | tuple of any default | --| | | values for positional or | --| | | keyword parameters | --+-----------+-------------------+---------------------------+ --| | __kwdefaults__ | mapping of any default | --| | | values for keyword-only | --| | | parameters | --+-----------+-------------------+---------------------------+ --| | __globals__ | global namespace in which | --| | | this function was defined | --+-----------+-------------------+---------------------------+ --| | __builtins__ | builtins namespace | --+-----------+-------------------+---------------------------+ --| | __annotations__ | mapping of parameters | --| | | names to annotations; | --| | | ``"return"`` key is | --| | | reserved for return | --| | | annotations. | --+-----------+-------------------+---------------------------+ --| | __type_params__ | A tuple containing the | --| | | :ref:`type parameters | --| | | ` of | --| | | a generic function | --+-----------+-------------------+---------------------------+ --| | __module__ | name of module in which | --| | | this function was defined | --+-----------+-------------------+---------------------------+ --| traceback | tb_frame | frame object at this | --| | | level | --+-----------+-------------------+---------------------------+ --| | tb_lasti | index of last attempted | --| | | instruction in bytecode | --+-----------+-------------------+---------------------------+ --| | tb_lineno | current line number in | --| | | Python source code | --+-----------+-------------------+---------------------------+ --| | tb_next | next inner traceback | --| | | object (called by this | --| | | level) | --+-----------+-------------------+---------------------------+ --| frame | f_back | next outer frame object | --| | | (this frame's caller) | --+-----------+-------------------+---------------------------+ --| | f_builtins | builtins namespace seen | --| | | by this frame | --+-----------+-------------------+---------------------------+ --| | f_code | code object being | --| | | executed in this frame | --+-----------+-------------------+---------------------------+ --| | f_globals | global namespace seen by | --| | | this frame | --+-----------+-------------------+---------------------------+ --| | f_lasti | index of last attempted | --| | | instruction in bytecode | --+-----------+-------------------+---------------------------+ --| | f_lineno | current line number in | --| | | Python source code | --+-----------+-------------------+---------------------------+ --| | f_locals | local namespace seen by | --| | | this frame | --+-----------+-------------------+---------------------------+ --| | f_trace | tracing function for this | --| | | frame, or ``None`` | --+-----------+-------------------+---------------------------+ --| code | co_argcount | number of arguments (not | --| | | including keyword only | --| | | arguments, \* or \*\* | --| | | args) | --+-----------+-------------------+---------------------------+ --| | co_code | string of raw compiled | --| | | bytecode | --+-----------+-------------------+---------------------------+ --| | co_cellvars | tuple of names of cell | --| | | variables (referenced by | --| | | containing scopes) | --+-----------+-------------------+---------------------------+ --| | co_consts | tuple of constants used | --| | | in the bytecode | --+-----------+-------------------+---------------------------+ --| | co_filename | name of file in which | --| | | this code object was | --| | | created | --+-----------+-------------------+---------------------------+ --| | co_firstlineno | number of first line in | --| | | Python source code | --+-----------+-------------------+---------------------------+ --| | co_flags | bitmap of ``CO_*`` flags, | --| | | read more :ref:`here | --| | | `| --+-----------+-------------------+---------------------------+ --| | co_lnotab | encoded mapping of line | --| | | numbers to bytecode | --| | | indices | --+-----------+-------------------+---------------------------+ --| | co_freevars | tuple of names of free | --| | | variables (referenced via | --| | | a function's closure) | --+-----------+-------------------+---------------------------+ --| | co_posonlyargcount| number of positional only | --| | | arguments | --+-----------+-------------------+---------------------------+ --| | co_kwonlyargcount | number of keyword only | --| | | arguments (not including | --| | | \*\* arg) | --+-----------+-------------------+---------------------------+ --| | co_name | name with which this code | --| | | object was defined | --+-----------+-------------------+---------------------------+ --| | co_qualname | fully qualified name with | --| | | which this code object | --| | | was defined | --+-----------+-------------------+---------------------------+ --| | co_names | tuple of names other | --| | | than arguments and | --| | | function locals | --+-----------+-------------------+---------------------------+ --| | co_nlocals | number of local variables | --+-----------+-------------------+---------------------------+ --| | co_stacksize | virtual machine stack | --| | | space required | --+-----------+-------------------+---------------------------+ --| | co_varnames | tuple of names of | --| | | arguments and local | --| | | variables | --+-----------+-------------------+---------------------------+ --| generator | __name__ | name | --+-----------+-------------------+---------------------------+ --| | __qualname__ | qualified name | --+-----------+-------------------+---------------------------+ --| | gi_frame | frame | --+-----------+-------------------+---------------------------+ --| | gi_running | is the generator running? | --+-----------+-------------------+---------------------------+ --| | gi_code | code | --+-----------+-------------------+---------------------------+ --| | gi_yieldfrom | object being iterated by | --| | | ``yield from``, or | --| | | ``None`` | --+-----------+-------------------+---------------------------+ --| coroutine | __name__ | name | --+-----------+-------------------+---------------------------+ --| | __qualname__ | qualified name | --+-----------+-------------------+---------------------------+ --| | cr_await | object being awaited on, | --| | | or ``None`` | --+-----------+-------------------+---------------------------+ --| | cr_frame | frame | --+-----------+-------------------+---------------------------+ --| | cr_running | is the coroutine running? | --+-----------+-------------------+---------------------------+ --| | cr_code | code | --+-----------+-------------------+---------------------------+ --| | cr_origin | where coroutine was | --| | | created, or ``None``. See | --| | | |coroutine-origin-link| | --+-----------+-------------------+---------------------------+ --| builtin | __doc__ | documentation string | --+-----------+-------------------+---------------------------+ --| | __name__ | original name of this | --| | | function or method | --+-----------+-------------------+---------------------------+ --| | __qualname__ | qualified name | --+-----------+-------------------+---------------------------+ --| | __self__ | instance to which a | --| | | method is bound, or | --| | | ``None`` | --+-----------+-------------------+---------------------------+ -++-----------------+-------------------+---------------------------+ -+| Type | Attribute | Description | -++=================+===================+===========================+ -+| class | __doc__ | documentation string | -++-----------------+-------------------+---------------------------+ -+| | __name__ | name with which this | -+| | | class was defined | -++-----------------+-------------------+---------------------------+ -+| | __qualname__ | qualified name | -++-----------------+-------------------+---------------------------+ -+| | __module__ | name of module in which | -+| | | this class was defined | -++-----------------+-------------------+---------------------------+ -+| | __type_params__ | A tuple containing the | -+| | | :ref:`type parameters | -+| | | ` of | -+| | | a generic class | -++-----------------+-------------------+---------------------------+ -+| method | __doc__ | documentation string | -++-----------------+-------------------+---------------------------+ -+| | __name__ | name with which this | -+| | | method was defined | -++-----------------+-------------------+---------------------------+ -+| | __qualname__ | qualified name | -++-----------------+-------------------+---------------------------+ -+| | __func__ | function object | -+| | | containing implementation | -+| | | of method | -++-----------------+-------------------+---------------------------+ -+| | __self__ | instance to which this | -+| | | method is bound, or | -+| | | ``None`` | -++-----------------+-------------------+---------------------------+ -+| | __module__ | name of module in which | -+| | | this method was defined | -++-----------------+-------------------+---------------------------+ -+| function | __doc__ | documentation string | -++-----------------+-------------------+---------------------------+ -+| | __name__ | name with which this | -+| | | function was defined | -++-----------------+-------------------+---------------------------+ -+| | __qualname__ | qualified name | -++-----------------+-------------------+---------------------------+ -+| | __code__ | code object containing | -+| | | compiled function | -+| | | :term:`bytecode` | -++-----------------+-------------------+---------------------------+ -+| | __defaults__ | tuple of any default | -+| | | values for positional or | -+| | | keyword parameters | -++-----------------+-------------------+---------------------------+ -+| | __kwdefaults__ | mapping of any default | -+| | | values for keyword-only | -+| | | parameters | -++-----------------+-------------------+---------------------------+ -+| | __globals__ | global namespace in which | -+| | | this function was defined | -++-----------------+-------------------+---------------------------+ -+| | __builtins__ | builtins namespace | -++-----------------+-------------------+---------------------------+ -+| | __annotations__ | mapping of parameters | -+| | | names to annotations; | -+| | | ``"return"`` key is | -+| | | reserved for return | -+| | | annotations. | -++-----------------+-------------------+---------------------------+ -+| | __type_params__ | A tuple containing the | -+| | | :ref:`type parameters | -+| | | ` of | -+| | | a generic function | -++-----------------+-------------------+---------------------------+ -+| | __module__ | name of module in which | -+| | | this function was defined | -++-----------------+-------------------+---------------------------+ -+| traceback | tb_frame | frame object at this | -+| | | level | -++-----------------+-------------------+---------------------------+ -+| | tb_lasti | index of last attempted | -+| | | instruction in bytecode | -++-----------------+-------------------+---------------------------+ -+| | tb_lineno | current line number in | -+| | | Python source code | -++-----------------+-------------------+---------------------------+ -+| | tb_next | next inner traceback | -+| | | object (called by this | -+| | | level) | -++-----------------+-------------------+---------------------------+ -+| frame | f_back | next outer frame object | -+| | | (this frame's caller) | -++-----------------+-------------------+---------------------------+ -+| | f_builtins | builtins namespace seen | -+| | | by this frame | -++-----------------+-------------------+---------------------------+ -+| | f_code | code object being | -+| | | executed in this frame | -++-----------------+-------------------+---------------------------+ -+| | f_globals | global namespace seen by | -+| | | this frame | -++-----------------+-------------------+---------------------------+ -+| | f_lasti | index of last attempted | -+| | | instruction in bytecode | -++-----------------+-------------------+---------------------------+ -+| | f_lineno | current line number in | -+| | | Python source code | -++-----------------+-------------------+---------------------------+ -+| | f_locals | local namespace seen by | -+| | | this frame | -++-----------------+-------------------+---------------------------+ -+| | f_trace | tracing function for this | -+| | | frame, or ``None`` | -++-----------------+-------------------+---------------------------+ -+| code | co_argcount | number of arguments (not | -+| | | including keyword only | -+| | | arguments, \* or \*\* | -+| | | args) | -++-----------------+-------------------+---------------------------+ -+| | co_code | string of raw compiled | -+| | | bytecode | -++-----------------+-------------------+---------------------------+ -+| | co_cellvars | tuple of names of cell | -+| | | variables (referenced by | -+| | | containing scopes) | -++-----------------+-------------------+---------------------------+ -+| | co_consts | tuple of constants used | -+| | | in the bytecode | -++-----------------+-------------------+---------------------------+ -+| | co_filename | name of file in which | -+| | | this code object was | -+| | | created | -++-----------------+-------------------+---------------------------+ -+| | co_firstlineno | number of first line in | -+| | | Python source code | -++-----------------+-------------------+---------------------------+ -+| | co_flags | bitmap of ``CO_*`` flags, | -+| | | read more :ref:`here | -+| | | `| -++-----------------+-------------------+---------------------------+ -+| | co_lnotab | encoded mapping of line | -+| | | numbers to bytecode | -+| | | indices | -++-----------------+-------------------+---------------------------+ -+| | co_freevars | tuple of names of free | -+| | | variables (referenced via | -+| | | a function's closure) | -++-----------------+-------------------+---------------------------+ -+| | co_posonlyargcount| number of positional only | -+| | | arguments | -++-----------------+-------------------+---------------------------+ -+| | co_kwonlyargcount | number of keyword only | -+| | | arguments (not including | -+| | | \*\* arg) | -++-----------------+-------------------+---------------------------+ -+| | co_name | name with which this code | -+| | | object was defined | -++-----------------+-------------------+---------------------------+ -+| | co_qualname | fully qualified name with | -+| | | which this code object | -+| | | was defined | -++-----------------+-------------------+---------------------------+ -+| | co_names | tuple of names other | -+| | | than arguments and | -+| | | function locals | -++-----------------+-------------------+---------------------------+ -+| | co_nlocals | number of local variables | -++-----------------+-------------------+---------------------------+ -+| | co_stacksize | virtual machine stack | -+| | | space required | -++-----------------+-------------------+---------------------------+ -+| | co_varnames | tuple of names of | -+| | | arguments and local | -+| | | variables | -++-----------------+-------------------+---------------------------+ -+| generator | __name__ | name | -++-----------------+-------------------+---------------------------+ -+| | __qualname__ | qualified name | -++-----------------+-------------------+---------------------------+ -+| | gi_frame | frame | -++-----------------+-------------------+---------------------------+ -+| | gi_running | is the generator running? | -++-----------------+-------------------+---------------------------+ -+| | gi_code | code | -++-----------------+-------------------+---------------------------+ -+| | gi_yieldfrom | object being iterated by | -+| | | ``yield from``, or | -+| | | ``None`` | -++-----------------+-------------------+---------------------------+ -+| async generator | __name__ | name | -++-----------------+-------------------+---------------------------+ -+| | __qualname__ | qualified name | -++-----------------+-------------------+---------------------------+ -+| | ag_await | object being awaited on, | -+| | | or ``None`` | -++-----------------+-------------------+---------------------------+ -+| | ag_frame | frame | -++-----------------+-------------------+---------------------------+ -+| | ag_running | is the generator running? | -++-----------------+-------------------+---------------------------+ -+| | ag_code | code | -++-----------------+-------------------+---------------------------+ -+| coroutine | __name__ | name | -++-----------------+-------------------+---------------------------+ -+| | __qualname__ | qualified name | -++-----------------+-------------------+---------------------------+ -+| | cr_await | object being awaited on, | -+| | | or ``None`` | -++-----------------+-------------------+---------------------------+ -+| | cr_frame | frame | -++-----------------+-------------------+---------------------------+ -+| | cr_running | is the coroutine running? | -++-----------------+-------------------+---------------------------+ -+| | cr_code | code | -++-----------------+-------------------+---------------------------+ -+| | cr_origin | where coroutine was | -+| | | created, or ``None``. See | -+| | | |coroutine-origin-link| | -++-----------------+-------------------+---------------------------+ -+| builtin | __doc__ | documentation string | -++-----------------+-------------------+---------------------------+ -+| | __name__ | original name of this | -+| | | function or method | -++-----------------+-------------------+---------------------------+ -+| | __qualname__ | qualified name | -++-----------------+-------------------+---------------------------+ -+| | __self__ | instance to which a | -+| | | method is bound, or | -+| | | ``None`` | -++-----------------+-------------------+---------------------------+ - - .. versionchanged:: 3.5 - -@@ -437,7 +450,7 @@ - - .. versionchanged:: 3.8 - Functions wrapped in :func:`functools.partial` now return ``True`` if the -- wrapped function is a :term:`asynchronous generator` function. -+ wrapped function is an :term:`asynchronous generator` function. - - - .. function:: isasyncgen(object) -@@ -896,7 +909,7 @@ - - .. attribute:: Parameter.kind.description - -- Describes a enum value of :attr:`Parameter.kind`. -+ Describes an enum value of :attr:`Parameter.kind`. - - .. versionadded:: 3.8 - -@@ -1191,7 +1204,7 @@ - This function handles several details for you: - - * If ``eval_str`` is true, values of type ``str`` will -- be un-stringized using :func:`eval()`. This is intended -+ be un-stringized using :func:`eval`. This is intended - for use with stringized annotations - (``from __future__ import annotations``). - * If ``obj`` doesn't have an annotations dict, returns an -@@ -1205,16 +1218,16 @@ - * Always, always, always returns a freshly created dict. - - ``eval_str`` controls whether or not values of type ``str`` are replaced -- with the result of calling :func:`eval()` on those values: -+ with the result of calling :func:`eval` on those values: - -- * If eval_str is true, :func:`eval()` is called on values of type ``str``. -- (Note that ``get_annotations`` doesn't catch exceptions; if :func:`eval()` -+ * If eval_str is true, :func:`eval` is called on values of type ``str``. -+ (Note that ``get_annotations`` doesn't catch exceptions; if :func:`eval` - raises an exception, it will unwind the stack past the ``get_annotations`` - call.) - * If eval_str is false (the default), values of type ``str`` are unchanged. - -- ``globals`` and ``locals`` are passed in to :func:`eval()`; see the documentation -- for :func:`eval()` for more information. If ``globals`` or ``locals`` -+ ``globals`` and ``locals`` are passed in to :func:`eval`; see the documentation -+ for :func:`eval` for more information. If ``globals`` or ``locals`` - is ``None``, this function may replace that value with a context-specific - default, contingent on ``type(obj)``: - diff --git a/Doc/library/intro.rst b/Doc/library/intro.rst index 5a4c9b8b16a..ffc8939d211 100644 --- a/Doc/library/intro.rst @@ -6288,336 +224,8 @@ index 5a4c9b8b16a..ffc8939d211 100644 + + As a result, Python library that involve console manipulation (such as + :mod:`curses` and :mod:`readline`) are not available on iOS. -diff --git a/Doc/library/io.rst b/Doc/library/io.rst -index 748c49968f5..f793d7a7ef9 100644 ---- a/Doc/library/io.rst -+++ b/Doc/library/io.rst -@@ -55,7 +55,7 @@ - encoding and decoding of data is made transparently as well as optional - translation of platform-specific newline characters. - --The easiest way to create a text stream is with :meth:`open()`, optionally -+The easiest way to create a text stream is with :meth:`open`, optionally - specifying an encoding:: - - f = open("myfile.txt", "r", encoding="utf-8") -@@ -77,7 +77,7 @@ - category of streams can be used for all kinds of non-text data, and also when - manual control over the handling of text data is desired. - --The easiest way to create a binary stream is with :meth:`open()` with ``'b'`` in -+The easiest way to create a binary stream is with :meth:`open` with ``'b'`` in - the mode string:: - - f = open("myfile.jpg", "rb") -@@ -950,7 +950,7 @@ - :class:`TextIOBase`. - - *encoding* gives the name of the encoding that the stream will be decoded or -- encoded with. It defaults to :func:`locale.getencoding()`. -+ encoded with. It defaults to :func:`locale.getencoding`. - ``encoding="locale"`` can be used to specify the current locale's encoding - explicitly. See :ref:`io-text-encoding` for more information. - -@@ -1182,7 +1182,7 @@ - is raised. Note this doesn't prohibit a different thread from entering the - buffered object. - --The above implicitly extends to text files, since the :func:`open()` function -+The above implicitly extends to text files, since the :func:`open` function - will wrap a buffered object inside a :class:`TextIOWrapper`. This includes --standard streams and therefore affects the built-in :func:`print()` function as -+standard streams and therefore affects the built-in :func:`print` function as - well. -diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst -index d359451b397..d780969ca4d 100644 ---- a/Doc/library/ipaddress.rst -+++ b/Doc/library/ipaddress.rst -@@ -983,7 +983,7 @@ - .. function:: collapse_addresses(addresses) - - Return an iterator of the collapsed :class:`IPv4Network` or -- :class:`IPv6Network` objects. *addresses* is an iterator of -+ :class:`IPv6Network` objects. *addresses* is an :term:`iterable` of - :class:`IPv4Network` or :class:`IPv6Network` objects. A :exc:`TypeError` is - raised if *addresses* contains mixed version objects. - -@@ -1003,7 +1003,7 @@ - - doesn't make sense. There are some times however, where you may wish to - have :mod:`ipaddress` sort these anyway. If you need to do this, you can use -- this function as the *key* argument to :func:`sorted()`. -+ this function as the *key* argument to :func:`sorted`. - - *obj* is either a network or address object. - -diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst -index 21bb3f1f840..3fab46c3c0a 100644 ---- a/Doc/library/itertools.rst -+++ b/Doc/library/itertools.rst -@@ -329,7 +329,7 @@ - yield n - n += step - -- When counting with floating point numbers, better accuracy can sometimes be -+ When counting with floating-point numbers, better accuracy can sometimes be - achieved by substituting multiplicative code such as: ``(start + step * i - for i in count())``. - -diff --git a/Doc/library/json.rst b/Doc/library/json.rst -index a1aba65cecf..892972d297c 100644 ---- a/Doc/library/json.rst -+++ b/Doc/library/json.rst -@@ -230,28 +230,28 @@ - - *object_hook* is an optional function that will be called with the result of - any object literal decoded (a :class:`dict`). The return value of -- *object_hook* will be used instead of the :class:`dict`. This feature can be used -- to implement custom decoders (e.g. `JSON-RPC `_ -- class hinting). -+ *object_hook* will be used instead of the :class:`dict`. This feature can -+ be used to implement custom decoders (e.g. `JSON-RPC -+ `_ class hinting). - - *object_pairs_hook* is an optional function that will be called with the - result of any object literal decoded with an ordered list of pairs. The - return value of *object_pairs_hook* will be used instead of the -- :class:`dict`. This feature can be used to implement custom decoders. -- If *object_hook* is also defined, the *object_pairs_hook* takes priority. -+ :class:`dict`. This feature can be used to implement custom decoders. If -+ *object_hook* is also defined, the *object_pairs_hook* takes priority. - - .. versionchanged:: 3.1 - Added support for *object_pairs_hook*. - -- *parse_float*, if specified, will be called with the string of every JSON -- float to be decoded. By default, this is equivalent to ``float(num_str)``. -- This can be used to use another datatype or parser for JSON floats -- (e.g. :class:`decimal.Decimal`). -+ *parse_float* is an optional function that will be called with the string of -+ every JSON float to be decoded. By default, this is equivalent to -+ ``float(num_str)``. This can be used to use another datatype or parser for -+ JSON floats (e.g. :class:`decimal.Decimal`). - -- *parse_int*, if specified, will be called with the string of every JSON int -- to be decoded. By default, this is equivalent to ``int(num_str)``. This can -- be used to use another datatype or parser for JSON integers -- (e.g. :class:`float`). -+ *parse_int* is an optional function that will be called with the string of -+ every JSON int to be decoded. By default, this is equivalent to -+ ``int(num_str)``. This can be used to use another datatype or parser for -+ JSON integers (e.g. :class:`float`). - - .. versionchanged:: 3.11 - The default *parse_int* of :func:`int` now limits the maximum length of -@@ -259,10 +259,9 @@ - conversion length limitation ` to help avoid denial - of service attacks. - -- *parse_constant*, if specified, will be called with one of the following -- strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. -- This can be used to raise an exception if invalid JSON numbers -- are encountered. -+ *parse_constant* is an optional function that will be called with one of the -+ following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be -+ used to raise an exception if invalid JSON numbers are encountered. - - .. versionchanged:: 3.1 - *parse_constant* doesn't get called on 'null', 'true', 'false' anymore. -@@ -334,34 +333,33 @@ - It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as their - corresponding ``float`` values, which is outside the JSON spec. - -- *object_hook*, if specified, will be called with the result of every JSON -- object decoded and its return value will be used in place of the given -- :class:`dict`. This can be used to provide custom deserializations (e.g. to -- support `JSON-RPC `_ class hinting). -+ *object_hook* is an optional function that will be called with the result of -+ every JSON object decoded and its return value will be used in place of the -+ given :class:`dict`. This can be used to provide custom deserializations -+ (e.g. to support `JSON-RPC `_ class hinting). - -- *object_pairs_hook*, if specified will be called with the result of every -- JSON object decoded with an ordered list of pairs. The return value of -- *object_pairs_hook* will be used instead of the :class:`dict`. This -- feature can be used to implement custom decoders. If *object_hook* is also -- defined, the *object_pairs_hook* takes priority. -+ *object_pairs_hook* is an optional function that will be called with the -+ result of every JSON object decoded with an ordered list of pairs. The -+ return value of *object_pairs_hook* will be used instead of the -+ :class:`dict`. This feature can be used to implement custom decoders. If -+ *object_hook* is also defined, the *object_pairs_hook* takes priority. - - .. versionchanged:: 3.1 - Added support for *object_pairs_hook*. - -- *parse_float*, if specified, will be called with the string of every JSON -- float to be decoded. By default, this is equivalent to ``float(num_str)``. -- This can be used to use another datatype or parser for JSON floats -- (e.g. :class:`decimal.Decimal`). -+ *parse_float* is an optional function that will be called with the string of -+ every JSON float to be decoded. By default, this is equivalent to -+ ``float(num_str)``. This can be used to use another datatype or parser for -+ JSON floats (e.g. :class:`decimal.Decimal`). - -- *parse_int*, if specified, will be called with the string of every JSON int -- to be decoded. By default, this is equivalent to ``int(num_str)``. This can -- be used to use another datatype or parser for JSON integers -- (e.g. :class:`float`). -+ *parse_int* is an optional function that will be called with the string of -+ every JSON int to be decoded. By default, this is equivalent to -+ ``int(num_str)``. This can be used to use another datatype or parser for -+ JSON integers (e.g. :class:`float`). - -- *parse_constant*, if specified, will be called with one of the following -- strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. -- This can be used to raise an exception if invalid JSON numbers -- are encountered. -+ *parse_constant* is an optional function that will be called with one of the -+ following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be -+ used to raise an exception if invalid JSON numbers are encountered. - - If *strict* is false (``True`` is the default), then control characters - will be allowed inside strings. Control characters in this context are -diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst -index 10c376397cf..60975bf9177 100644 ---- a/Doc/library/locale.rst -+++ b/Doc/library/locale.rst -@@ -434,7 +434,7 @@ - .. function:: format_string(format, val, grouping=False, monetary=False) - - Formats a number *val* according to the current :const:`LC_NUMERIC` setting. -- The format follows the conventions of the ``%`` operator. For floating point -+ The format follows the conventions of the ``%`` operator. For floating-point - values, the decimal point is modified if appropriate. If *grouping* is ``True``, - also takes the grouping into account. - -@@ -465,7 +465,7 @@ - - .. function:: str(float) - -- Formats a floating point number using the same format as the built-in function -+ Formats a floating-point number using the same format as the built-in function - ``str(float)``, but takes the decimal point into account. - - -diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst -index 23aac191f05..50ffff60250 100644 ---- a/Doc/library/logging.config.rst -+++ b/Doc/library/logging.config.rst -@@ -69,7 +69,7 @@ - dictConfigClass(config).configure() - - For example, a subclass of :class:`DictConfigurator` could call -- ``DictConfigurator.__init__()`` in its own :meth:`__init__()`, then -+ ``DictConfigurator.__init__()`` in its own :meth:`__init__`, then - set up custom prefixes which would be usable in the subsequent - :meth:`configure` call. :attr:`dictConfigClass` would be bound to - this new subclass, and then :func:`dictConfig` could be called exactly as -@@ -752,9 +752,12 @@ - - If the ``queue`` key is present, the corresponding value can be one of the following: - --* An actual instance of :class:`queue.Queue` or a subclass thereof. This is of course -- only possible if you are constructing or modifying the configuration dictionary in -- code. -+* An object implementing the :class:`queue.Queue` public API. For instance, -+ this may be an actual instance of :class:`queue.Queue` or a subclass thereof, -+ or a proxy obtained by :meth:`multiprocessing.managers.SyncManager.Queue`. -+ -+ This is of course only possible if you are constructing or modifying -+ the configuration dictionary in code. - - * A string that resolves to a callable which, when called with no arguments, returns - the :class:`queue.Queue` instance to use. That callable could be a -diff --git a/Doc/library/mailbox.rst b/Doc/library/mailbox.rst -index 1e4e728395b..6eb8dec44eb 100644 ---- a/Doc/library/mailbox.rst -+++ b/Doc/library/mailbox.rst -@@ -1278,7 +1278,7 @@ - - .. method:: get_visible() - -- Return an :class:`Message` instance whose headers are the message's -+ Return a :class:`Message` instance whose headers are the message's - visible headers and whose body is empty. - - -diff --git a/Doc/library/marshal.rst b/Doc/library/marshal.rst -index ce549b73fe5..a85d9206810 100644 ---- a/Doc/library/marshal.rst -+++ b/Doc/library/marshal.rst -@@ -38,8 +38,8 @@ - - Not all Python object types are supported; in general, only objects whose value - is independent from a particular invocation of Python can be written and read by --this module. The following types are supported: booleans, integers, floating --point numbers, complex numbers, strings, bytes, bytearrays, tuples, lists, sets, -+this module. The following types are supported: booleans, integers, floating-point -+numbers, complex numbers, strings, bytes, bytearrays, tuples, lists, sets, - frozensets, dictionaries, and code objects, where it should be understood that - tuples, lists, sets, frozensets and dictionaries are only supported as long as - the values contained therein are themselves supported. The -@@ -121,7 +121,7 @@ - - Indicates the format that the module uses. Version 0 is the historical - format, version 1 shares interned strings and version 2 uses a binary format -- for floating point numbers. -+ for floating-point numbers. - Version 3 adds support for object instancing and recursion. - The current version is 4. - -diff --git a/Doc/library/math.rst b/Doc/library/math.rst -index b6a7d98a295..40742fdafea 100644 ---- a/Doc/library/math.rst -+++ b/Doc/library/math.rst -@@ -107,7 +107,7 @@ - - .. function:: fsum(iterable) - -- Return an accurate floating point sum of values in the iterable. Avoids -+ Return an accurate floating-point sum of values in the iterable. Avoids - loss of precision by tracking multiple intermediate partial sums. - - The algorithm's accuracy depends on IEEE-754 arithmetic guarantees and the -@@ -117,7 +117,7 @@ - least significant bit. - - For further discussion and two alternative approaches, see the `ASPN cookbook -- recipes for accurate floating point summation -+ recipes for accurate floating-point summation - `_\. - - -@@ -288,7 +288,7 @@ - If the result of the remainder operation is zero, that zero will have - the same sign as *x*. - -- On platforms using IEEE 754 binary floating-point, the result of this -+ On platforms using IEEE 754 binary floating point, the result of this - operation is always exactly representable: no rounding error is introduced. - - .. versionadded:: 3.7 -diff --git a/Doc/library/mimetypes.rst b/Doc/library/mimetypes.rst -index 930b4793189..1522285b4ef 100644 ---- a/Doc/library/mimetypes.rst -+++ b/Doc/library/mimetypes.rst -@@ -272,3 +272,13 @@ - types, else to the list of non-standard types. - - .. versionadded:: 3.2 -+ -+ -+ .. method:: MimeTypes.add_type(type, ext, strict=True) -+ -+ Add a mapping from the MIME type *type* to the extension *ext*. When the -+ extension is already known, the new type will replace the old one. When the type -+ is already known the extension will be added to the list of known extensions. -+ -+ When *strict* is ``True`` (the default), the mapping will be added to the -+ official MIME types, otherwise to the non-standard ones. diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst -index d6474ef975b..cd9ace02f6d 100644 +index f2a9ada85e2..cd9ace02f6d 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -8,7 +8,7 @@ @@ -6629,218 +237,8 @@ index d6474ef975b..cd9ace02f6d 100644 Introduction ------------ -@@ -254,6 +254,7 @@ - p.join() - - Queues are thread and process safe. -+ Any object put into a :mod:`~multiprocessing` queue will be serialized. - - **Pipes** - -@@ -281,6 +282,8 @@ - of corruption from processes using different ends of the pipe at the same - time. - -+ The :meth:`~Connection.send` method serializes the the object and -+ :meth:`~Connection.recv` re-creates the object. - - Synchronization between processes - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -@@ -502,7 +505,7 @@ - The constructor should always be called with keyword arguments. *group* - should always be ``None``; it exists solely for compatibility with - :class:`threading.Thread`. *target* is the callable object to be invoked by -- the :meth:`run()` method. It defaults to ``None``, meaning nothing is -+ the :meth:`run` method. It defaults to ``None``, meaning nothing is - called. *name* is the process name (see :attr:`name` for more details). - *args* is the argument tuple for the target invocation. *kwargs* is a - dictionary of keyword arguments for the target invocation. If provided, -@@ -639,7 +642,7 @@ - - You can use this value if you want to wait on several events at - once using :func:`multiprocessing.connection.wait`. Otherwise -- calling :meth:`join()` is simpler. -+ calling :meth:`join` is simpler. - - On Windows, this is an OS handle usable with the ``WaitForSingleObject`` - and ``WaitForMultipleObjects`` family of API calls. On POSIX, this is -@@ -666,7 +669,7 @@ - - .. method:: kill() - -- Same as :meth:`terminate()` but using the ``SIGKILL`` signal on POSIX. -+ Same as :meth:`terminate` but using the ``SIGKILL`` signal on POSIX. - - .. versionadded:: 3.7 - -@@ -709,7 +712,7 @@ - - .. exception:: BufferTooShort - -- Exception raised by :meth:`Connection.recv_bytes_into()` when the supplied -+ Exception raised by :meth:`Connection.recv_bytes_into` when the supplied - buffer object is too small for the message read. - - If ``e`` is an instance of :exc:`BufferTooShort` then ``e.args[0]`` will give -@@ -745,6 +748,11 @@ - semaphore used to count the number of unfinished tasks may eventually overflow, - raising an exception. - -+One difference from other Python queue implementations, is that :mod:`multiprocessing` -+queues serializes all objects that are put into them using :mod:`pickle`. -+The object return by the get method is a re-created object that does not share memory -+with the original object. -+ - Note that one can also create a shared queue by using a manager object -- see - :ref:`multiprocessing-managers`. - -@@ -811,6 +819,8 @@ - used for receiving messages and ``conn2`` can only be used for sending - messages. - -+ The :meth:`~multiprocessing.Connection.send` method serializes the the object using -+ :mod:`pickle` and the :meth:`~multiprocessing.Connection.recv` re-creates the object. - - .. class:: Queue([maxsize]) - -@@ -837,6 +847,8 @@ - Return ``True`` if the queue is empty, ``False`` otherwise. Because of - multithreading/multiprocessing semantics, this is not reliable. - -+ May raise an :exc:`OSError` on closed queues. (not guaranteed) -+ - .. method:: full() - - Return ``True`` if the queue is full, ``False`` otherwise. Because of -@@ -940,6 +952,8 @@ - - Return ``True`` if the queue is empty, ``False`` otherwise. - -+ Always raises an :exc:`OSError` if the SimpleQueue is closed. -+ - .. method:: get() - - Remove and return an item from the queue. -@@ -1452,17 +1466,6 @@ - On macOS, ``sem_timedwait`` is unsupported, so calling ``acquire()`` with - a timeout will emulate that function's behavior using a sleeping loop. - --.. note:: -- -- If the SIGINT signal generated by :kbd:`Ctrl-C` arrives while the main thread is -- blocked by a call to :meth:`BoundedSemaphore.acquire`, :meth:`Lock.acquire`, -- :meth:`RLock.acquire`, :meth:`Semaphore.acquire`, :meth:`Condition.acquire` -- or :meth:`Condition.wait` then the call will be immediately interrupted and -- :exc:`KeyboardInterrupt` will be raised. -- -- This differs from the behaviour of :mod:`threading` where SIGINT will be -- ignored while the equivalent blocking calls are in progress. -- - .. note:: - - Some of this package's functionality requires a functioning shared semaphore -@@ -2948,7 +2951,7 @@ - resulting in a bad file descriptor error, but introduces a potential danger - to applications which replace :func:`sys.stdin` with a "file-like object" - with output buffering. This danger is that if multiple processes call -- :meth:`~io.IOBase.close()` on this file-like object, it could result in the same -+ :meth:`~io.IOBase.close` on this file-like object, it could result in the same - data being flushed to the object multiple times, resulting in corruption. - - If you write a file-like object and implement your own caching, you can -diff --git a/Doc/library/nntplib.rst b/Doc/library/nntplib.rst -index 143e4e0c427..fde0bfc9f38 100644 ---- a/Doc/library/nntplib.rst -+++ b/Doc/library/nntplib.rst -@@ -484,14 +484,14 @@ - - .. method:: NNTP.head(message_spec=None, *, file=None) - -- Same as :meth:`article()`, but sends a ``HEAD`` command. The *lines* -+ Same as :meth:`article`, but sends a ``HEAD`` command. The *lines* - returned (or written to *file*) will only contain the message headers, not - the body. - - - .. method:: NNTP.body(message_spec=None, *, file=None) - -- Same as :meth:`article()`, but sends a ``BODY`` command. The *lines* -+ Same as :meth:`article`, but sends a ``BODY`` command. The *lines* - returned (or written to *file*) will only contain the message body, not the - headers. - -@@ -513,7 +513,7 @@ - - Send an ``IHAVE`` command. *message_id* is the id of the message to send - to the server (enclosed in ``'<'`` and ``'>'``). The *data* parameter -- and the return value are the same as for :meth:`post()`. -+ and the return value are the same as for :meth:`post`. - - - .. method:: NNTP.date() -@@ -560,7 +560,7 @@ - - Send an ``XOVER`` command. *start* and *end* are article numbers - delimiting the range of articles to select. The return value is the -- same of for :meth:`over()`. It is recommended to use :meth:`over()` -+ same of for :meth:`over`. It is recommended to use :meth:`over` - instead, since it will automatically use the newer ``OVER`` command - if available. - -diff --git a/Doc/library/optparse.rst b/Doc/library/optparse.rst -index fc652d23f4f..15b5d5c3466 100644 ---- a/Doc/library/optparse.rst -+++ b/Doc/library/optparse.rst -@@ -1351,7 +1351,7 @@ - the list of arguments to process (default: ``sys.argv[1:]``) - - ``values`` -- an :class:`Values` object to store option arguments in (default: a -+ a :class:`Values` object to store option arguments in (default: a - new instance of :class:`Values`) -- if you give an existing object, the - option defaults will not be initialized on it - -diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst -index c5004c3f0df..51e89087e7f 100644 ---- a/Doc/library/os.path.rst -+++ b/Doc/library/os.path.rst -@@ -81,7 +81,7 @@ - - Return the longest common sub-path of each pathname in the sequence - *paths*. Raise :exc:`ValueError` if *paths* contain both absolute -- and relative pathnames, the *paths* are on the different drives or -+ and relative pathnames, if *paths* are on different drives, or - if *paths* is empty. Unlike :func:`commonprefix`, this returns a - valid path. - -@@ -198,14 +198,14 @@ - - .. function:: getatime(path) - -- Return the time of last access of *path*. The return value is a floating point number giving -+ Return the time of last access of *path*. The return value is a floating-point number giving - the number of seconds since the epoch (see the :mod:`time` module). Raise - :exc:`OSError` if the file does not exist or is inaccessible. - - - .. function:: getmtime(path) - -- Return the time of last modification of *path*. The return value is a floating point number -+ Return the time of last modification of *path*. The return value is a floating-point number - giving the number of seconds since the epoch (see the :mod:`time` module). - Raise :exc:`OSError` if the file does not exist or is inaccessible. - -@@ -359,7 +359,7 @@ - that contains symbolic links. On Windows, it converts forward slashes to - backward slashes. To normalize case, use :func:`normcase`. - -- .. note:: -+ .. note:: - On POSIX systems, in accordance with `IEEE Std 1003.1 2013 Edition; 4.13 - Pathname Resolution `_, - if a pathname begins with exactly two slashes, the first component diff --git a/Doc/library/os.rst b/Doc/library/os.rst -index a793d244de9..6beafbd9d00 100644 +index 3a5deaa1d69..6beafbd9d00 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -34,12 +34,13 @@ @@ -6863,28 +261,6 @@ index a793d244de9..6beafbd9d00 100644 .. note:: -@@ -113,8 +114,8 @@ - - * Use UTF-8 as the :term:`filesystem encoding `. --* :func:`sys.getfilesystemencoding()` returns ``'utf-8'``. --* :func:`locale.getpreferredencoding()` returns ``'utf-8'`` (the *do_setlocale* -+* :func:`sys.getfilesystemencoding` returns ``'utf-8'``. -+* :func:`locale.getpreferredencoding` returns ``'utf-8'`` (the *do_setlocale* - argument has no effect). - * :data:`sys.stdin`, :data:`sys.stdout`, and :data:`sys.stderr` all use - UTF-8 as their text encoding, with the ``surrogateescape`` -@@ -133,8 +134,8 @@ - - * Command line arguments, environment variables and filenames are decoded - to text using the UTF-8 encoding. --* :func:`os.fsdecode()` and :func:`os.fsencode()` use the UTF-8 encoding. --* :func:`open()`, :func:`io.open()`, and :func:`codecs.open()` use the UTF-8 -+* :func:`os.fsdecode` and :func:`os.fsencode` use the UTF-8 encoding. -+* :func:`open`, :func:`io.open`, and :func:`codecs.open` use the UTF-8 - encoding by default. However, they still use the strict error handler by - default so that attempting to open a binary file in text mode is likely - to raise an exception rather than producing nonsense data. @@ -784,6 +785,11 @@ :func:`socket.gethostname` or even ``socket.gethostbyaddr(socket.gethostname())``. @@ -6897,42 +273,6 @@ index a793d244de9..6beafbd9d00 100644 .. availability:: Unix. .. versionchanged:: 3.3 -@@ -1497,7 +1503,7 @@ - - .. function:: pwritev(fd, buffers, offset, flags=0, /) - -- Write the *buffers* contents to file descriptor *fd* at a offset *offset*, -+ Write the *buffers* contents to file descriptor *fd* at an offset *offset*, - leaving the file offset unchanged. *buffers* must be a sequence of - :term:`bytes-like objects `. Buffers are processed in - array order. Entire contents of the first buffer is written before -@@ -2756,7 +2762,7 @@ - - .. versionchanged:: 3.6 - Added support for the :term:`context manager` protocol and the -- :func:`~scandir.close()` method. If a :func:`scandir` iterator is neither -+ :func:`~scandir.close` method. If a :func:`scandir` iterator is neither - exhausted nor explicitly closed a :exc:`ResourceWarning` will be emitted - in its destructor. - -@@ -3701,7 +3707,7 @@ - new file descriptor is :ref:`non-inheritable `. - - *initval* is the initial value of the event counter. The initial value -- must be an 32 bit unsigned integer. Please note that the initial value is -+ must be a 32 bit unsigned integer. Please note that the initial value is - limited to a 32 bit unsigned int although the event counter is an unsigned - 64 bit integer with a maximum value of 2\ :sup:`64`\ -\ 2. - -@@ -3780,7 +3786,7 @@ - - .. data:: EFD_SEMAPHORE - -- Provide semaphore-like semantics for reads from a :func:`eventfd` file -+ Provide semaphore-like semantics for reads from an :func:`eventfd` file - descriptor. On read the internal counter is decremented by one. - - .. availability:: Linux >= 2.6.30 @@ -3997,7 +4003,7 @@ .. audit-event:: os.exec path,args,env os.execl @@ -7241,1276 +581,90 @@ index a793d244de9..6beafbd9d00 100644 Interface to the scheduler -diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst -index d4b0e072084..c5cf406372d 100644 ---- a/Doc/library/pathlib.rst -+++ b/Doc/library/pathlib.rst -@@ -21,6 +21,12 @@ - .. image:: pathlib-inheritance.png - :align: center - :class: invert-in-dark-mode -+ :alt: Inheritance diagram showing the classes available in pathlib. The -+ most basic class is PurePath, which has three direct subclasses: -+ PurePosixPath, PureWindowsPath, and Path. Further to these four -+ classes, there are two classes that use multiple inheritance: -+ PosixPath subclasses PurePosixPath and Path, and WindowsPath -+ subclasses PureWindowsPath and Path. - - If you've never used this module before or just aren't sure which class is - right for your task, :class:`Path` is most likely what you need. It instantiates -@@ -161,8 +167,8 @@ - A subclass of :class:`PurePath`, this path flavour represents non-Windows - filesystem paths:: - -- >>> PurePosixPath('/etc') -- PurePosixPath('/etc') -+ >>> PurePosixPath('/etc/hosts') -+ PurePosixPath('/etc/hosts') - - *pathsegments* is specified similarly to :class:`PurePath`. - -@@ -171,8 +177,8 @@ - A subclass of :class:`PurePath`, this path flavour represents Windows - filesystem paths, including `UNC paths`_:: +diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst +index 2f5bf53bc5c..0cc5e532711 100644 +--- a/Doc/library/platform.rst ++++ b/Doc/library/platform.rst +@@ -148,6 +148,9 @@ + Returns the system/OS name, such as ``'Linux'``, ``'Darwin'``, ``'Java'``, + ``'Windows'``. An empty string is returned if the value cannot be determined. -- >>> PureWindowsPath('c:/Program Files/') -- PureWindowsPath('c:/Program Files') -+ >>> PureWindowsPath('c:/', 'Users', 'Ximénez') -+ PureWindowsPath('c:/Users/Ximénez') - >>> PureWindowsPath('//server/share/file') - PureWindowsPath('//server/share/file') ++ On iOS and Android, this returns the user-facing OS name (i.e, ``'iOS``, ++ ``'iPadOS'`` or ``'Android'``). To obtain the kernel name (``'Darwin'`` or ++ ``'Linux'``), use :func:`os.uname()`. -@@ -756,8 +762,8 @@ - A subclass of :class:`Path` and :class:`PurePosixPath`, this class - represents concrete non-Windows filesystem paths:: + .. function:: system_alias(system, release, version) -- >>> PosixPath('/etc') -- PosixPath('/etc') -+ >>> PosixPath('/etc/hosts') -+ PosixPath('/etc/hosts') +@@ -161,6 +164,8 @@ + Returns the system's release version, e.g. ``'#3 on degas'``. An empty string is + returned if the value cannot be determined. - *pathsegments* is specified similarly to :class:`PurePath`. ++ On iOS and Android, this is the user-facing OS version. To obtain the ++ Darwin or Linux kernel version, use :func:`os.uname()`. -@@ -766,8 +772,8 @@ - A subclass of :class:`Path` and :class:`PureWindowsPath`, this class - represents concrete Windows filesystem paths:: + .. function:: uname() -- >>> WindowsPath('c:/Program Files/') -- WindowsPath('c:/Program Files') -+ >>> WindowsPath('c:/', 'Users', 'Ximénez') -+ WindowsPath('c:/Users/Ximénez') +@@ -234,7 +239,6 @@ + macOS Platform + -------------- - *pathsegments* is specified similarly to :class:`PurePath`. +- + .. function:: mac_ver(release='', versioninfo=('','',''), machine='') -@@ -789,23 +795,119 @@ - % (cls.__name__,)) - NotImplementedError: cannot instantiate 'WindowsPath' on your system + Get macOS version information and return it as tuple ``(release, versioninfo, +@@ -244,6 +248,24 @@ + Entries which cannot be determined are set to ``''``. All tuple entries are + strings. -+Some concrete path methods can raise an :exc:`OSError` if a system call fails -+(for example because the path doesn't exist). -+ -+ -+Expanding and resolving paths -+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -+ -+.. classmethod:: Path.home() ++iOS Platform ++------------ + -+ Return a new path object representing the user's home directory (as -+ returned by :func:`os.path.expanduser` with ``~`` construct). If the home -+ directory can't be resolved, :exc:`RuntimeError` is raised. ++.. function:: ios_ver(system='', release='', model='', is_simulator=False) + -+ :: ++ Get iOS version information and return it as a ++ :func:`~collections.namedtuple` with the following attributes: + -+ >>> Path.home() -+ PosixPath('/home/antoine') ++ * ``system`` is the OS name; either ``'iOS'`` or ``'iPadOS'``. ++ * ``release`` is the iOS version number as a string (e.g., ``'17.2'``). ++ * ``model`` is the device model identifier; this will be a string like ++ ``'iPhone13,2'`` for a physical device, or ``'iPhone'`` on a simulator. ++ * ``is_simulator`` is a boolean describing if the app is running on a ++ simulator or a physical device. + -+ .. versionadded:: 3.5 ++ Entries which cannot be determined are set to the defaults given as ++ parameters. + -+ -+.. method:: Path.expanduser() -+ -+ Return a new path with expanded ``~`` and ``~user`` constructs, -+ as returned by :meth:`os.path.expanduser`. If a home directory can't be -+ resolved, :exc:`RuntimeError` is raised. -+ -+ :: -+ -+ >>> p = PosixPath('~/films/Monty Python') -+ >>> p.expanduser() -+ PosixPath('/home/eric/films/Monty Python') -+ -+ .. versionadded:: 3.5 -+ -+ -+.. classmethod:: Path.cwd() -+ -+ Return a new path object representing the current directory (as returned -+ by :func:`os.getcwd`):: -+ -+ >>> Path.cwd() -+ PosixPath('/home/antoine/pathlib') -+ -+ -+.. method:: Path.absolute() -+ -+ Make the path absolute, without normalization or resolving symlinks. -+ Returns a new path object:: -+ -+ >>> p = Path('tests') -+ >>> p -+ PosixPath('tests') -+ >>> p.absolute() -+ PosixPath('/home/antoine/pathlib/tests') -+ -+ -+.. method:: Path.resolve(strict=False) -+ -+ Make the path absolute, resolving any symlinks. A new path object is -+ returned:: -+ -+ >>> p = Path() -+ >>> p -+ PosixPath('.') -+ >>> p.resolve() -+ PosixPath('/home/antoine/pathlib') -+ -+ "``..``" components are also eliminated (this is the only method to do so):: -+ -+ >>> p = Path('docs/../setup.py') -+ >>> p.resolve() -+ PosixPath('/home/antoine/pathlib/setup.py') -+ -+ If the path doesn't exist and *strict* is ``True``, :exc:`FileNotFoundError` -+ is raised. If *strict* is ``False``, the path is resolved as far as possible -+ and any remainder is appended without checking whether it exists. If an -+ infinite loop is encountered along the resolution path, :exc:`RuntimeError` -+ is raised. -+ -+ .. versionchanged:: 3.6 -+ The *strict* parameter was added (pre-3.6 behavior is strict). -+ -+ -+.. method:: Path.readlink() -+ -+ Return the path to which the symbolic link points (as returned by -+ :func:`os.readlink`):: -+ -+ >>> p = Path('mylink') -+ >>> p.symlink_to('setup.py') -+ >>> p.readlink() -+ PosixPath('setup.py') -+ -+ .. versionadded:: 3.9 -+ - - Querying file type and status - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - - .. versionchanged:: 3.8 - -- :meth:`~Path.exists()`, :meth:`~Path.is_dir()`, :meth:`~Path.is_file()`, -- :meth:`~Path.is_mount()`, :meth:`~Path.is_symlink()`, -- :meth:`~Path.is_block_device()`, :meth:`~Path.is_char_device()`, -- :meth:`~Path.is_fifo()`, :meth:`~Path.is_socket()` now return ``False`` -+ :meth:`~Path.exists`, :meth:`~Path.is_dir`, :meth:`~Path.is_file`, -+ :meth:`~Path.is_mount`, :meth:`~Path.is_symlink`, -+ :meth:`~Path.is_block_device`, :meth:`~Path.is_char_device`, -+ :meth:`~Path.is_fifo`, :meth:`~Path.is_socket` now return ``False`` - instead of raising an exception for paths that contain characters - unrepresentable at the OS level. - - - .. method:: Path.stat(*, follow_symlinks=True) - -- Return a :class:`os.stat_result` object containing information about this path, like :func:`os.stat`. -+ Return an :class:`os.stat_result` object containing information about this path, like :func:`os.stat`. - The result is looked up at each call to this method. - - This method normally follows symlinks; to stat a symlink add the argument -@@ -1040,71 +1142,32 @@ - .. versionadded:: 3.5 - - --Other methods --^^^^^^^^^^^^^ -- --Many of these methods can raise an :exc:`OSError` if a system call fails (for --example because the path doesn't exist). -- -- --.. classmethod:: Path.cwd() -- -- Return a new path object representing the current directory (as returned -- by :func:`os.getcwd`):: -- -- >>> Path.cwd() -- PosixPath('/home/antoine/pathlib') -- -- --.. classmethod:: Path.home() -- -- Return a new path object representing the user's home directory (as -- returned by :func:`os.path.expanduser` with ``~`` construct). If the home -- directory can't be resolved, :exc:`RuntimeError` is raised. -- -- :: -- -- >>> Path.home() -- PosixPath('/home/antoine') -- -- .. versionadded:: 3.5 -- -- --.. method:: Path.chmod(mode, *, follow_symlinks=True) -- -- Change the file mode and permissions, like :func:`os.chmod`. -- -- This method normally follows symlinks. Some Unix flavours support changing -- permissions on the symlink itself; on these platforms you may add the -- argument ``follow_symlinks=False``, or use :meth:`~Path.lchmod`. -- -- :: -- -- >>> p = Path('setup.py') -- >>> p.stat().st_mode -- 33277 -- >>> p.chmod(0o444) -- >>> p.stat().st_mode -- 33060 -- -- .. versionchanged:: 3.10 -- The *follow_symlinks* parameter was added. -- -- --.. method:: Path.expanduser() -+Reading directories -+^^^^^^^^^^^^^^^^^^^ - -- Return a new path with expanded ``~`` and ``~user`` constructs, -- as returned by :meth:`os.path.expanduser`. If a home directory can't be -- resolved, :exc:`RuntimeError` is raised. -+.. method:: Path.iterdir() - -- :: -+ When the path points to a directory, yield path objects of the directory -+ contents:: - -- >>> p = PosixPath('~/films/Monty Python') -- >>> p.expanduser() -- PosixPath('/home/eric/films/Monty Python') -+ >>> p = Path('docs') -+ >>> for child in p.iterdir(): child -+ ... -+ PosixPath('docs/conf.py') -+ PosixPath('docs/_templates') -+ PosixPath('docs/make.bat') -+ PosixPath('docs/index.rst') -+ PosixPath('docs/_build') -+ PosixPath('docs/_static') -+ PosixPath('docs/Makefile') - -- .. versionadded:: 3.5 -+ The children are yielded in arbitrary order, and the special entries -+ ``'.'`` and ``'..'`` are not included. If a file is removed from or added -+ to the directory after creating the iterator, it is unspecified whether -+ a path object for that file is included. - -+ If the path is not a directory or otherwise inaccessible, :exc:`OSError` is -+ raised. - - .. method:: Path.glob(pattern, *, case_sensitive=None) - -@@ -1150,32 +1213,33 @@ - The *case_sensitive* parameter was added. - - --.. method:: Path.group() -+.. method:: Path.rglob(pattern, *, case_sensitive=None) - -- Return the name of the group owning the file. :exc:`KeyError` is raised -- if the file's gid isn't found in the system database. -+ Glob the given relative *pattern* recursively. This is like calling -+ :func:`Path.glob` with "``**/``" added in front of the *pattern*, where -+ *patterns* are the same as for :mod:`fnmatch`:: - -+ >>> sorted(Path().rglob("*.py")) -+ [PosixPath('build/lib/pathlib.py'), -+ PosixPath('docs/conf.py'), -+ PosixPath('pathlib.py'), -+ PosixPath('setup.py'), -+ PosixPath('test_pathlib.py')] - --.. method:: Path.iterdir() -+ By default, or when the *case_sensitive* keyword-only argument is set to -+ ``None``, this method matches paths using platform-specific casing rules: -+ typically, case-sensitive on POSIX, and case-insensitive on Windows. -+ Set *case_sensitive* to ``True`` or ``False`` to override this behaviour. - -- When the path points to a directory, yield path objects of the directory -- contents:: -+ .. audit-event:: pathlib.Path.rglob self,pattern pathlib.Path.rglob - -- >>> p = Path('docs') -- >>> for child in p.iterdir(): child -- ... -- PosixPath('docs/conf.py') -- PosixPath('docs/_templates') -- PosixPath('docs/make.bat') -- PosixPath('docs/index.rst') -- PosixPath('docs/_build') -- PosixPath('docs/_static') -- PosixPath('docs/Makefile') -+ .. versionchanged:: 3.11 -+ Return only directories if *pattern* ends with a pathname components -+ separator (:data:`~os.sep` or :data:`~os.altsep`). -+ -+ .. versionchanged:: 3.12 -+ The *case_sensitive* parameter was added. - -- The children are yielded in arbitrary order, and the special entries -- ``'.'`` and ``'..'`` are not included. If a file is removed from or added -- to the directory after creating the iterator, whether a path object for -- that file be included is unspecified. - - .. method:: Path.walk(top_down=True, on_error=None, follow_symlinks=False) - -@@ -1208,7 +1272,7 @@ - This can be used to prune the search, or to impose a specific order of visiting, - or even to inform :meth:`Path.walk` about directories the caller creates or - renames before it resumes :meth:`Path.walk` again. Modifying *dirnames* when -- *top_down* is false has no effect on the behavior of :meth:`Path.walk()` since the -+ *top_down* is false has no effect on the behavior of :meth:`Path.walk` since the - directories in *dirnames* have already been generated by the time *dirnames* - is yielded to the caller. - -@@ -1272,16 +1336,27 @@ - - .. versionadded:: 3.12 - --.. method:: Path.lchmod(mode) - -- Like :meth:`Path.chmod` but, if the path points to a symbolic link, the -- symbolic link's mode is changed rather than its target's. -+Creating files and directories -+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -+ -+.. method:: Path.touch(mode=0o666, exist_ok=True) -+ -+ Create a file at this given path. If *mode* is given, it is combined -+ with the process's ``umask`` value to determine the file mode and access -+ flags. If the file already exists, the function succeeds when *exist_ok* -+ is true (and its modification time is updated to the current time), -+ otherwise :exc:`FileExistsError` is raised. -+ -+ .. seealso:: -+ The :meth:`~Path.open`, :meth:`~Path.write_text` and -+ :meth:`~Path.write_bytes` methods are often used to create files. - - - .. method:: Path.mkdir(mode=0o777, parents=False, exist_ok=False) - - Create a new directory at this given path. If *mode* is given, it is -- combined with the process' ``umask`` value to determine the file mode -+ combined with the process's ``umask`` value to determine the file mode - and access flags. If the path already exists, :exc:`FileExistsError` - is raised. - -@@ -1303,30 +1378,51 @@ - The *exist_ok* parameter was added. - - --.. method:: Path.owner() -- -- Return the name of the user owning the file. :exc:`KeyError` is raised -- if the file's uid isn't found in the system database. -+.. method:: Path.symlink_to(target, target_is_directory=False) - -+ Make this path a symbolic link pointing to *target*. - --.. method:: Path.readlink() -+ On Windows, a symlink represents either a file or a directory, and does not -+ morph to the target dynamically. If the target is present, the type of the -+ symlink will be created to match. Otherwise, the symlink will be created -+ as a directory if *target_is_directory* is true or a file symlink (the -+ default) otherwise. On non-Windows platforms, *target_is_directory* is ignored. - -- Return the path to which the symbolic link points (as returned by -- :func:`os.readlink`):: -+ :: - - >>> p = Path('mylink') - >>> p.symlink_to('setup.py') -- >>> p.readlink() -- PosixPath('setup.py') -+ >>> p.resolve() -+ PosixPath('/home/antoine/pathlib/setup.py') -+ >>> p.stat().st_size -+ 956 -+ >>> p.lstat().st_size -+ 8 - -- .. versionadded:: 3.9 -+ .. note:: -+ The order of arguments (link, target) is the reverse -+ of :func:`os.symlink`'s. -+ -+ -+.. method:: Path.hardlink_to(target) -+ -+ Make this path a hard link to the same file as *target*. -+ -+ .. note:: -+ The order of arguments (link, target) is the reverse -+ of :func:`os.link`'s. -+ -+ .. versionadded:: 3.10 - - -+Renaming and deleting -+^^^^^^^^^^^^^^^^^^^^^ -+ - .. method:: Path.rename(target) - -- Rename this file or directory to the given *target*, and return a new Path -- instance pointing to *target*. On Unix, if *target* exists and is a file, -- it will be replaced silently if the user has permission. -+ Rename this file or directory to the given *target*, and return a new -+ :class:`!Path` instance pointing to *target*. On Unix, if *target* exists -+ and is a file, it will be replaced silently if the user has permission. - On Windows, if *target* exists, :exc:`FileExistsError` will be raised. - *target* can be either a string or another path object:: - -@@ -1340,93 +1436,42 @@ - 'some text' - - The target path may be absolute or relative. Relative paths are interpreted -- relative to the current working directory, *not* the directory of the Path -- object. -+ relative to the current working directory, *not* the directory of the -+ :class:`!Path` object. - - It is implemented in terms of :func:`os.rename` and gives the same guarantees. - - .. versionchanged:: 3.8 -- Added return value, return the new Path instance. -+ Added return value, return the new :class:`!Path` instance. - - - .. method:: Path.replace(target) - -- Rename this file or directory to the given *target*, and return a new Path -- instance pointing to *target*. If *target* points to an existing file or -- empty directory, it will be unconditionally replaced. -+ Rename this file or directory to the given *target*, and return a new -+ :class:`!Path` instance pointing to *target*. If *target* points to an -+ existing file or empty directory, it will be unconditionally replaced. - - The target path may be absolute or relative. Relative paths are interpreted -- relative to the current working directory, *not* the directory of the Path -- object. -+ relative to the current working directory, *not* the directory of the -+ :class:`!Path` object. - - .. versionchanged:: 3.8 -- Added return value, return the new Path instance. -- -- --.. method:: Path.absolute() -- -- Make the path absolute, without normalization or resolving symlinks. -- Returns a new path object:: -- -- >>> p = Path('tests') -- >>> p -- PosixPath('tests') -- >>> p.absolute() -- PosixPath('/home/antoine/pathlib/tests') -- -- --.. method:: Path.resolve(strict=False) -- -- Make the path absolute, resolving any symlinks. A new path object is -- returned:: -- -- >>> p = Path() -- >>> p -- PosixPath('.') -- >>> p.resolve() -- PosixPath('/home/antoine/pathlib') -- -- "``..``" components are also eliminated (this is the only method to do so):: -+ Added return value, return the new :class:`!Path` instance. - -- >>> p = Path('docs/../setup.py') -- >>> p.resolve() -- PosixPath('/home/antoine/pathlib/setup.py') -- -- If the path doesn't exist and *strict* is ``True``, :exc:`FileNotFoundError` -- is raised. If *strict* is ``False``, the path is resolved as far as possible -- and any remainder is appended without checking whether it exists. If an -- infinite loop is encountered along the resolution path, :exc:`RuntimeError` -- is raised. -- -- .. versionchanged:: 3.6 -- The *strict* parameter was added (pre-3.6 behavior is strict). - --.. method:: Path.rglob(pattern, *, case_sensitive=None) -- -- Glob the given relative *pattern* recursively. This is like calling -- :func:`Path.glob` with "``**/``" added in front of the *pattern*, where -- *patterns* are the same as for :mod:`fnmatch`:: -- -- >>> sorted(Path().rglob("*.py")) -- [PosixPath('build/lib/pathlib.py'), -- PosixPath('docs/conf.py'), -- PosixPath('pathlib.py'), -- PosixPath('setup.py'), -- PosixPath('test_pathlib.py')] -+.. method:: Path.unlink(missing_ok=False) - -- By default, or when the *case_sensitive* keyword-only argument is set to -- ``None``, this method matches paths using platform-specific casing rules: -- typically, case-sensitive on POSIX, and case-insensitive on Windows. -- Set *case_sensitive* to ``True`` or ``False`` to override this behaviour. -+ Remove this file or symbolic link. If the path points to a directory, -+ use :func:`Path.rmdir` instead. - -- .. audit-event:: pathlib.Path.rglob self,pattern pathlib.Path.rglob -+ If *missing_ok* is false (the default), :exc:`FileNotFoundError` is -+ raised if the path does not exist. - -- .. versionchanged:: 3.11 -- Return only directories if *pattern* ends with a pathname components -- separator (:data:`~os.sep` or :data:`~os.altsep`). -+ If *missing_ok* is true, :exc:`FileNotFoundError` exceptions will be -+ ignored (same behavior as the POSIX ``rm -f`` command). - -- .. versionchanged:: 3.12 -- The *case_sensitive* parameter was added. -+ .. versionchanged:: 3.8 -+ The *missing_ok* parameter was added. - - - .. method:: Path.rmdir() -@@ -1434,64 +1479,46 @@ - Remove this directory. The directory must be empty. - - --.. method:: Path.symlink_to(target, target_is_directory=False) -- -- Make this path a symbolic link pointing to *target*. -- -- On Windows, a symlink represents either a file or a directory, and does not -- morph to the target dynamically. If the target is present, the type of the -- symlink will be created to match. Otherwise, the symlink will be created -- as a directory if *target_is_directory* is ``True`` or a file symlink (the -- default) otherwise. On non-Windows platforms, *target_is_directory* is ignored. -- -- :: -+Permissions and ownership -+^^^^^^^^^^^^^^^^^^^^^^^^^ - -- >>> p = Path('mylink') -- >>> p.symlink_to('setup.py') -- >>> p.resolve() -- PosixPath('/home/antoine/pathlib/setup.py') -- >>> p.stat().st_size -- 956 -- >>> p.lstat().st_size -- 8 -+.. method:: Path.owner() - -- .. note:: -- The order of arguments (link, target) is the reverse -- of :func:`os.symlink`'s. -+ Return the name of the user owning the file. :exc:`KeyError` is raised -+ if the file's user identifier (UID) isn't found in the system database. - --.. method:: Path.hardlink_to(target) - -- Make this path a hard link to the same file as *target*. -+.. method:: Path.group() - -- .. note:: -- The order of arguments (link, target) is the reverse -- of :func:`os.link`'s. -+ Return the name of the group owning the file. :exc:`KeyError` is raised -+ if the file's group identifier (GID) isn't found in the system database. - -- .. versionadded:: 3.10 - -+.. method:: Path.chmod(mode, *, follow_symlinks=True) - --.. method:: Path.touch(mode=0o666, exist_ok=True) -+ Change the file mode and permissions, like :func:`os.chmod`. - -- Create a file at this given path. If *mode* is given, it is combined -- with the process' ``umask`` value to determine the file mode and access -- flags. If the file already exists, the function succeeds if *exist_ok* -- is true (and its modification time is updated to the current time), -- otherwise :exc:`FileExistsError` is raised. -+ This method normally follows symlinks. Some Unix flavours support changing -+ permissions on the symlink itself; on these platforms you may add the -+ argument ``follow_symlinks=False``, or use :meth:`~Path.lchmod`. - -+ :: - --.. method:: Path.unlink(missing_ok=False) -+ >>> p = Path('setup.py') -+ >>> p.stat().st_mode -+ 33277 -+ >>> p.chmod(0o444) -+ >>> p.stat().st_mode -+ 33060 - -- Remove this file or symbolic link. If the path points to a directory, -- use :func:`Path.rmdir` instead. -+ .. versionchanged:: 3.10 -+ The *follow_symlinks* parameter was added. - -- If *missing_ok* is false (the default), :exc:`FileNotFoundError` is -- raised if the path does not exist. - -- If *missing_ok* is true, :exc:`FileNotFoundError` exceptions will be -- ignored (same behavior as the POSIX ``rm -f`` command). -+.. method:: Path.lchmod(mode) - -- .. versionchanged:: 3.8 -- The *missing_ok* parameter was added. -+ Like :meth:`Path.chmod` but, if the path points to a symbolic link, the -+ symbolic link's mode is changed rather than its target's. - - - Correspondence to tools in the :mod:`os` module -@@ -1500,51 +1527,54 @@ - Below is a table mapping various :mod:`os` functions to their corresponding - :class:`PurePath`/:class:`Path` equivalent. - --.. note:: -- -- Not all pairs of functions/methods below are equivalent. Some of them, -- despite having some overlapping use-cases, have different semantics. They -- include :func:`os.path.abspath` and :meth:`Path.absolute`, -- :func:`os.path.relpath` and :meth:`PurePath.relative_to`. -- --==================================== ============================== --:mod:`os` and :mod:`os.path` :mod:`pathlib` --==================================== ============================== --:func:`os.path.abspath` :meth:`Path.absolute` [#]_ --:func:`os.path.realpath` :meth:`Path.resolve` --:func:`os.chmod` :meth:`Path.chmod` --:func:`os.mkdir` :meth:`Path.mkdir` --:func:`os.makedirs` :meth:`Path.mkdir` --:func:`os.rename` :meth:`Path.rename` --:func:`os.replace` :meth:`Path.replace` --:func:`os.rmdir` :meth:`Path.rmdir` --:func:`os.remove`, :func:`os.unlink` :meth:`Path.unlink` --:func:`os.getcwd` :func:`Path.cwd` --:func:`os.path.exists` :meth:`Path.exists` --:func:`os.path.expanduser` :meth:`Path.expanduser` and -- :meth:`Path.home` --:func:`os.listdir` :meth:`Path.iterdir` --:func:`os.walk` :meth:`Path.walk` --:func:`os.path.isdir` :meth:`Path.is_dir` --:func:`os.path.isfile` :meth:`Path.is_file` --:func:`os.path.islink` :meth:`Path.is_symlink` --:func:`os.link` :meth:`Path.hardlink_to` --:func:`os.symlink` :meth:`Path.symlink_to` --:func:`os.readlink` :meth:`Path.readlink` --:func:`os.path.relpath` :meth:`PurePath.relative_to` [#]_ --:func:`os.stat` :meth:`Path.stat`, -- :meth:`Path.owner`, -- :meth:`Path.group` --:func:`os.path.isabs` :meth:`PurePath.is_absolute` --:func:`os.path.join` :func:`PurePath.joinpath` --:func:`os.path.basename` :attr:`PurePath.name` --:func:`os.path.dirname` :attr:`PurePath.parent` --:func:`os.path.samefile` :meth:`Path.samefile` --:func:`os.path.splitext` :attr:`PurePath.stem` and -- :attr:`PurePath.suffix` --==================================== ============================== -+===================================== ============================================== -+:mod:`os` and :mod:`os.path` :mod:`pathlib` -+===================================== ============================================== -+:func:`os.path.dirname` :attr:`PurePath.parent` -+:func:`os.path.basename` :attr:`PurePath.name` -+:func:`os.path.splitext` :attr:`PurePath.stem`, :attr:`PurePath.suffix` -+:func:`os.path.join` :meth:`PurePath.joinpath` -+:func:`os.path.isabs` :meth:`PurePath.is_absolute` -+:func:`os.path.relpath` :meth:`PurePath.relative_to` [1]_ -+:func:`os.path.expanduser` :meth:`Path.expanduser` [2]_ -+:func:`os.path.realpath` :meth:`Path.resolve` -+:func:`os.path.abspath` :meth:`Path.absolute` [3]_ -+:func:`os.path.exists` :meth:`Path.exists` -+:func:`os.path.isfile` :meth:`Path.is_file` -+:func:`os.path.isdir` :meth:`Path.is_dir` -+:func:`os.path.islink` :meth:`Path.is_symlink` -+:func:`os.path.isjunction` :meth:`Path.is_junction` -+:func:`os.path.ismount` :meth:`Path.is_mount` -+:func:`os.path.samefile` :meth:`Path.samefile` -+:func:`os.getcwd` :meth:`Path.cwd` -+:func:`os.stat` :meth:`Path.stat` -+:func:`os.lstat` :meth:`Path.lstat` -+:func:`os.listdir` :meth:`Path.iterdir` -+:func:`os.walk` :meth:`Path.walk` [4]_ -+:func:`os.mkdir`, :func:`os.makedirs` :meth:`Path.mkdir` -+:func:`os.link` :meth:`Path.hardlink_to` -+:func:`os.symlink` :meth:`Path.symlink_to` -+:func:`os.readlink` :meth:`Path.readlink` -+:func:`os.rename` :meth:`Path.rename` -+:func:`os.replace` :meth:`Path.replace` -+:func:`os.remove`, :func:`os.unlink` :meth:`Path.unlink` -+:func:`os.rmdir` :meth:`Path.rmdir` -+:func:`os.chmod` :meth:`Path.chmod` -+:func:`os.lchmod` :meth:`Path.lchmod` -+===================================== ============================================== - - .. rubric:: Footnotes - --.. [#] :func:`os.path.abspath` normalizes the resulting path, which may change its meaning in the presence of symlinks, while :meth:`Path.absolute` does not. --.. [#] :meth:`PurePath.relative_to` requires ``self`` to be the subpath of the argument, but :func:`os.path.relpath` does not. -+.. [1] :func:`os.path.relpath` calls :func:`~os.path.abspath` to make paths -+ absolute and remove "``..``" parts, whereas :meth:`PurePath.relative_to` -+ is a lexical operation that raises :exc:`ValueError` when its inputs' -+ anchors differ (e.g. if one path is absolute and the other relative.) -+.. [2] :func:`os.path.expanduser` returns the path unchanged if the home -+ directory can't be resolved, whereas :meth:`Path.expanduser` raises -+ :exc:`RuntimeError`. -+.. [3] :func:`os.path.abspath` removes "``..``" components without resolving -+ symlinks, which may change the meaning of the path, whereas -+ :meth:`Path.absolute` leaves any "``..``" components in the path. -+.. [4] :func:`os.walk` always follows symlinks when categorizing paths into -+ *dirnames* and *filenames*, whereas :meth:`Path.walk` categorizes all -+ symlinks into *filenames* when *follow_symlinks* is false (the default.) -diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst -index 8a6ee9c5c19..32c41b8b2c1 100644 ---- a/Doc/library/pdb.rst -+++ b/Doc/library/pdb.rst -@@ -49,7 +49,7 @@ - running without the debugger using the :pdbcmd:`continue` command. - - .. versionchanged:: 3.7 -- The built-in :func:`breakpoint()`, when called with defaults, can be used -+ The built-in :func:`breakpoint`, when called with defaults, can be used - instead of ``import pdb; pdb.set_trace()``. - - :: -diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst -index 5d4ff34ba02..f095cc84173 100644 ---- a/Doc/library/pkgutil.rst -+++ b/Doc/library/pkgutil.rst -@@ -34,9 +34,9 @@ - *name* argument. This feature is similar to :file:`\*.pth` files (see the - :mod:`site` module for more information), except that it doesn't special-case - lines starting with ``import``. A :file:`\*.pkg` file is trusted at face -- value: apart from checking for duplicates, all entries found in a -- :file:`\*.pkg` file are added to the path, regardless of whether they exist -- on the filesystem. (This is a feature.) -+ value: apart from skipping blank lines and ignoring comments, all entries -+ found in a :file:`\*.pkg` file are added to the path, regardless of whether -+ they exist on the filesystem (this is a feature). - - If the input path is not a list (as is the case for frozen packages) it is - returned unchanged. The input path is not modified; an extended copy is -diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst -index 2f5bf53bc5c..0cc5e532711 100644 ---- a/Doc/library/platform.rst -+++ b/Doc/library/platform.rst -@@ -148,6 +148,9 @@ - Returns the system/OS name, such as ``'Linux'``, ``'Darwin'``, ``'Java'``, - ``'Windows'``. An empty string is returned if the value cannot be determined. - -+ On iOS and Android, this returns the user-facing OS name (i.e, ``'iOS``, -+ ``'iPadOS'`` or ``'Android'``). To obtain the kernel name (``'Darwin'`` or -+ ``'Linux'``), use :func:`os.uname()`. - - .. function:: system_alias(system, release, version) - -@@ -161,6 +164,8 @@ - Returns the system's release version, e.g. ``'#3 on degas'``. An empty string is - returned if the value cannot be determined. - -+ On iOS and Android, this is the user-facing OS version. To obtain the -+ Darwin or Linux kernel version, use :func:`os.uname()`. - - .. function:: uname() - -@@ -234,7 +239,6 @@ - macOS Platform - -------------- - -- - .. function:: mac_ver(release='', versioninfo=('','',''), machine='') - - Get macOS version information and return it as tuple ``(release, versioninfo, -@@ -244,6 +248,24 @@ - Entries which cannot be determined are set to ``''``. All tuple entries are - strings. - -+iOS Platform -+------------ -+ -+.. function:: ios_ver(system='', release='', model='', is_simulator=False) -+ -+ Get iOS version information and return it as a -+ :func:`~collections.namedtuple` with the following attributes: -+ -+ * ``system`` is the OS name; either ``'iOS'`` or ``'iPadOS'``. -+ * ``release`` is the iOS version number as a string (e.g., ``'17.2'``). -+ * ``model`` is the device model identifier; this will be a string like -+ ``'iPhone13,2'`` for a physical device, or ``'iPhone'`` on a simulator. -+ * ``is_simulator`` is a boolean describing if the app is running on a -+ simulator or a physical device. -+ -+ Entries which cannot be determined are set to the defaults given as -+ parameters. -+ - - Unix Platforms - -------------- -diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst -index df706c10ce9..1b3498e51f7 100644 ---- a/Doc/library/pprint.rst -+++ b/Doc/library/pprint.rst -@@ -35,24 +35,66 @@ - Functions - --------- - --.. function:: pp(object, *args, sort_dicts=False, **kwargs) -- -- Prints the formatted representation of *object* followed by a newline. -- If *sort_dicts* is false (the default), dictionaries will be displayed with -- their keys in insertion order, otherwise the dict keys will be sorted. -- *args* and *kwargs* will be passed to :func:`~pprint.pprint` as formatting -- parameters. -- -- >>> import pprint -- >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni'] -- >>> stuff.insert(0, stuff) -- >>> pprint.pp(stuff) -- [, -- 'spam', -- 'eggs', -- 'lumberjack', -- 'knights', -- 'ni'] -+.. function:: pp(object, stream=None, indent=1, width=80, depth=None, *, \ -+ compact=False, sort_dicts=False, underscore_numbers=False) -+ -+ Prints the formatted representation of *object*, followed by a newline. -+ This function may be used in the interactive interpreter -+ instead of the :func:`print` function for inspecting values. -+ Tip: you can reassign ``print = pprint.pp`` for use within a scope. -+ -+ :param object: -+ The object to be printed. -+ -+ :param stream: -+ A file-like object to which the output will be written -+ by calling its :meth:`!write` method. -+ If ``None`` (the default), :data:`sys.stdout` is used. -+ :type stream: :term:`file-like object` | None -+ -+ :param int indent: -+ The amount of indentation added for each nesting level. -+ -+ :param int width: -+ The desired maximum number of characters per line in the output. -+ If a structure cannot be formatted within the width constraint, -+ a best effort will be made. -+ -+ :param depth: -+ The number of nesting levels which may be printed. -+ If the data structure being printed is too deep, -+ the next contained level is replaced by ``...``. -+ If ``None`` (the default), there is no constraint -+ on the depth of the objects being formatted. -+ :type depth: int | None -+ -+ :param bool compact: -+ Control the way long :term:`sequences ` are formatted. -+ If ``False`` (the default), -+ each item of a sequence will be formatted on a separate line, -+ otherwise as many items as will fit within the *width* -+ will be formatted on each output line. -+ -+ :param bool sort_dicts: -+ If ``True``, dictionaries will be formatted with -+ their keys sorted, otherwise -+ they will be displayed in insertion order (the default). -+ -+ :param bool underscore_numbers: -+ If ``True``, -+ integers will be formatted with the ``_`` character for a thousands separator, -+ otherwise underscores are not displayed (the default). -+ -+ >>> import pprint -+ >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni'] -+ >>> stuff.insert(0, stuff) -+ >>> pprint.pp(stuff) -+ [, -+ 'spam', -+ 'eggs', -+ 'lumberjack', -+ 'knights', -+ 'ni'] - - .. versionadded:: 3.8 - -@@ -60,19 +102,10 @@ - .. function:: pprint(object, stream=None, indent=1, width=80, depth=None, *, \ - compact=False, sort_dicts=True, underscore_numbers=False) - -- Prints the formatted representation of *object* on *stream*, followed by a -- newline. If *stream* is ``None``, :data:`sys.stdout` is used. This may be used -- in the interactive interpreter instead of the :func:`print` function for -- inspecting values (you can even reassign ``print = pprint.pprint`` for use -- within a scope). -- -- The configuration parameters *stream*, *indent*, *width*, *depth*, -- *compact*, *sort_dicts* and *underscore_numbers* are passed to the -- :class:`PrettyPrinter` constructor and their meanings are as -- described in its documentation below. -+ Alias for :func:`~pprint.pp` with *sort_dicts* set to ``True`` by default, -+ which would automatically sort the dictionaries' keys, -+ you might want to use :func:`~pprint.pp` instead where it is ``False`` by default. - -- Note that *sort_dicts* is ``True`` by default and you might want to use -- :func:`~pprint.pp` instead where it is ``False`` by default. - - .. function:: pformat(object, indent=1, width=80, depth=None, *, \ - compact=False, sort_dicts=True, underscore_numbers=False) -@@ -80,7 +113,7 @@ - Return the formatted representation of *object* as a string. *indent*, - *width*, *depth*, *compact*, *sort_dicts* and *underscore_numbers* are - passed to the :class:`PrettyPrinter` constructor as formatting parameters -- and their meanings are as described in its documentation below. -+ and their meanings are as described in the documentation above. - - - .. function:: isreadable(object) -@@ -119,51 +152,39 @@ - PrettyPrinter Objects - --------------------- - --This module defines one class: -- --.. First the implementation class: -- -- - .. index:: single: ...; placeholder - - .. class:: PrettyPrinter(indent=1, width=80, depth=None, stream=None, *, \ - compact=False, sort_dicts=True, underscore_numbers=False) - -- Construct a :class:`PrettyPrinter` instance. This constructor understands -- several keyword parameters. -- -- *stream* (default :data:`!sys.stdout`) is a :term:`file-like object` to -- which the output will be written by calling its :meth:`!write` method. -- If both *stream* and :data:`!sys.stdout` are ``None``, then -- :meth:`~PrettyPrinter.pprint` silently returns. -+ Construct a :class:`PrettyPrinter` instance. - -- Other values configure the manner in which nesting of complex data -- structures is displayed. -+ Arguments have the same meaning as for :func:`~pprint.pp`. -+ Note that they are in a different order, and that *sort_dicts* defaults to ``True``. - -- *indent* (default 1) specifies the amount of indentation added for -- each nesting level. -- -- *depth* controls the number of nesting levels which may be printed; if -- the data structure being printed is too deep, the next contained level -- is replaced by ``...``. By default, there is no constraint on the -- depth of the objects being formatted. -- -- *width* (default 80) specifies the desired maximum number of characters per -- line in the output. If a structure cannot be formatted within the width -- constraint, a best effort will be made. -- -- *compact* impacts the way that long sequences (lists, tuples, sets, etc) -- are formatted. If *compact* is false (the default) then each item of a -- sequence will be formatted on a separate line. If *compact* is true, as -- many items as will fit within the *width* will be formatted on each output -- line. -- -- If *sort_dicts* is true (the default), dictionaries will be formatted with -- their keys sorted, otherwise they will display in insertion order. -+ >>> import pprint -+ >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni'] -+ >>> stuff.insert(0, stuff[:]) -+ >>> pp = pprint.PrettyPrinter(indent=4) -+ >>> pp.pprint(stuff) -+ [ ['spam', 'eggs', 'lumberjack', 'knights', 'ni'], -+ 'spam', -+ 'eggs', -+ 'lumberjack', -+ 'knights', -+ 'ni'] -+ >>> pp = pprint.PrettyPrinter(width=41, compact=True) -+ >>> pp.pprint(stuff) -+ [['spam', 'eggs', 'lumberjack', -+ 'knights', 'ni'], -+ 'spam', 'eggs', 'lumberjack', 'knights', -+ 'ni'] -+ >>> tup = ('spam', ('eggs', ('lumberjack', ('knights', ('ni', ('dead', -+ ... ('parrot', ('fresh fruit',)))))))) -+ >>> pp = pprint.PrettyPrinter(depth=6) -+ >>> pp.pprint(tup) -+ ('spam', ('eggs', ('lumberjack', ('knights', ('ni', ('dead', (...))))))) - -- If *underscore_numbers* is true, integers will be formatted with the -- ``_`` character for a thousands separator, otherwise underscores are not -- displayed (the default). - - .. versionchanged:: 3.4 - Added the *compact* parameter. -@@ -177,29 +198,6 @@ - .. versionchanged:: 3.11 - No longer attempts to write to :data:`!sys.stdout` if it is ``None``. - -- >>> import pprint -- >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni'] -- >>> stuff.insert(0, stuff[:]) -- >>> pp = pprint.PrettyPrinter(indent=4) -- >>> pp.pprint(stuff) -- [ ['spam', 'eggs', 'lumberjack', 'knights', 'ni'], -- 'spam', -- 'eggs', -- 'lumberjack', -- 'knights', -- 'ni'] -- >>> pp = pprint.PrettyPrinter(width=41, compact=True) -- >>> pp.pprint(stuff) -- [['spam', 'eggs', 'lumberjack', -- 'knights', 'ni'], -- 'spam', 'eggs', 'lumberjack', 'knights', -- 'ni'] -- >>> tup = ('spam', ('eggs', ('lumberjack', ('knights', ('ni', ('dead', -- ... ('parrot', ('fresh fruit',)))))))) -- >>> pp = pprint.PrettyPrinter(depth=6) -- >>> pp.pprint(tup) -- ('spam', ('eggs', ('lumberjack', ('knights', ('ni', ('dead', (...))))))) -- - - :class:`PrettyPrinter` instances have the following methods: - -diff --git a/Doc/library/profile.rst b/Doc/library/profile.rst -index cc059b66fcb..b89655ea472 100644 ---- a/Doc/library/profile.rst -+++ b/Doc/library/profile.rst -@@ -675,7 +675,7 @@ - that you choose (see :ref:`profile-calibration`). For most machines, a timer - that returns a lone integer value will provide the best results in terms of - low overhead during profiling. (:func:`os.times` is *pretty* bad, as it -- returns a tuple of floating point values). If you want to substitute a -+ returns a tuple of floating-point values). If you want to substitute a - better timer in the cleanest fashion, derive a class and hardwire a - replacement dispatch method that best handles your timer call, along with the - appropriate calibration constant. -@@ -692,7 +692,7 @@ - As the :class:`cProfile.Profile` class cannot be calibrated, custom timer - functions should be used with care and should be as fast as possible. For - the best results with a custom timer, it might be necessary to hard-code it -- in the C source of the internal :mod:`_lsprof` module. -+ in the C source of the internal :mod:`!_lsprof` module. - - Python 3.3 adds several new functions in :mod:`time` that can be used to make - precise measurements of process or wall-clock time. For example, see -diff --git a/Doc/library/pwd.rst b/Doc/library/pwd.rst -index 98ca174d9e3..d71d7212cfd 100644 ---- a/Doc/library/pwd.rst -+++ b/Doc/library/pwd.rst -@@ -10,7 +10,7 @@ - This module provides access to the Unix user account and password database. It - is available on all Unix versions. - --.. availability:: Unix, not Emscripten, not WASI. -+.. availability:: Unix, not WASI, not iOS. - - Password database entries are reported as a tuple-like object, whose attributes - correspond to the members of the ``passwd`` structure (Attribute field below, -diff --git a/Doc/library/random.rst b/Doc/library/random.rst -index 10c88ac68a8..a589bf76b5c 100644 ---- a/Doc/library/random.rst -+++ b/Doc/library/random.rst -@@ -194,8 +194,8 @@ - - For a given seed, the :func:`choices` function with equal weighting - typically produces a different sequence than repeated calls to -- :func:`choice`. The algorithm used by :func:`choices` uses floating -- point arithmetic for internal consistency and speed. The algorithm used -+ :func:`choice`. The algorithm used by :func:`choices` uses floating-point -+ arithmetic for internal consistency and speed. The algorithm used - by :func:`choice` defaults to integer arithmetic with repeated selections - to avoid small biases from round-off error. - -@@ -292,12 +292,12 @@ - - .. function:: random() - -- Return the next random floating point number in the range ``0.0 <= X < 1.0`` -+ Return the next random floating-point number in the range ``0.0 <= X < 1.0`` - - - .. function:: uniform(a, b) - -- Return a random floating point number *N* such that ``a <= N <= b`` for -+ Return a random floating-point number *N* such that ``a <= N <= b`` for - ``a <= b`` and ``b <= N <= a`` for ``b < a``. - - The end-point value ``b`` may or may not be included in the range -@@ -307,7 +307,7 @@ - - .. function:: triangular(low, high, mode) - -- Return a random floating point number *N* such that ``low <= N <= high`` and -+ Return a random floating-point number *N* such that ``low <= N <= high`` and - with the specified *mode* between those bounds. The *low* and *high* bounds - default to zero and one. The *mode* argument defaults to the midpoint - between the bounds, giving a symmetric distribution. -diff --git a/Doc/library/re.rst b/Doc/library/re.rst -index 220bd687bc1..3c9b99c6438 100644 ---- a/Doc/library/re.rst -+++ b/Doc/library/re.rst -@@ -101,7 +101,7 @@ - ``.`` - (Dot.) In the default mode, this matches any character except a newline. If - the :const:`DOTALL` flag has been specified, this matches any character -- including a newline. -+ including a newline. ``(?s:.)`` matches any character regardless of flags. - - .. index:: single: ^ (caret); in regular expressions - -@@ -600,10 +600,9 @@ - - ``\s`` - For Unicode (str) patterns: -- Matches Unicode whitespace characters (which includes -- ``[ \t\n\r\f\v]``, and also many other characters, for example the -- non-breaking spaces mandated by typography rules in many -- languages). -+ Matches Unicode whitespace characters (as defined by :py:meth:`str.isspace`). -+ This includes ``[ \t\n\r\f\v]``, and also many other characters, for example the -+ non-breaking spaces mandated by typography rules in many languages. - - Matches ``[ \t\n\r\f\v]`` if the :py:const:`~re.ASCII` flag is used. - -@@ -911,6 +910,10 @@ - ``None`` if no position in the string matches the pattern; note that this is - different from finding a zero-length match at some point in the string. - -+ The expression's behaviour can be modified by specifying a *flags* value. -+ Values can be any of the `flags`_ variables, combined using bitwise OR -+ (the ``|`` operator). -+ - - .. function:: match(pattern, string, flags=0) - -@@ -925,6 +928,10 @@ - If you want to locate a match anywhere in *string*, use :func:`search` - instead (see also :ref:`search-vs-match`). - -+ The expression's behaviour can be modified by specifying a *flags* value. -+ Values can be any of the `flags`_ variables, combined using bitwise OR -+ (the ``|`` operator). -+ - - .. function:: fullmatch(pattern, string, flags=0) - -@@ -932,6 +939,10 @@ - corresponding :class:`~re.Match`. Return ``None`` if the string does not match - the pattern; note that this is different from a zero-length match. - -+ The expression's behaviour can be modified by specifying a *flags* value. -+ Values can be any of the `flags`_ variables, combined using bitwise OR -+ (the ``|`` operator). -+ - .. versionadded:: 3.4 - - -@@ -974,6 +985,10 @@ - >>> re.split(r'(\W*)', '...words...') - ['', '...', '', '', 'w', '', 'o', '', 'r', '', 'd', '', 's', '...', '', '', ''] - -+ The expression's behaviour can be modified by specifying a *flags* value. -+ Values can be any of the `flags`_ variables, combined using bitwise OR -+ (the ``|`` operator). -+ - .. versionchanged:: 3.1 - Added the optional flags argument. - -@@ -999,6 +1014,10 @@ - >>> re.findall(r'(\w+)=(\d+)', 'set width=20 and height=10') - [('width', '20'), ('height', '10')] - -+ The expression's behaviour can be modified by specifying a *flags* value. -+ Values can be any of the `flags`_ variables, combined using bitwise OR -+ (the ``|`` operator). -+ - .. versionchanged:: 3.7 - Non-empty matches can now start just after a previous empty match. - -@@ -1010,6 +1029,10 @@ - is scanned left-to-right, and matches are returned in the order found. Empty - matches are included in the result. - -+ The expression's behaviour can be modified by specifying a *flags* value. -+ Values can be any of the `flags`_ variables, combined using bitwise OR -+ (the ``|`` operator). -+ - .. versionchanged:: 3.7 - Non-empty matches can now start just after a previous empty match. - -@@ -1065,6 +1088,10 @@ - character ``'0'``. The backreference ``\g<0>`` substitutes in the entire - substring matched by the RE. - -+ The expression's behaviour can be modified by specifying a *flags* value. -+ Values can be any of the `flags`_ variables, combined using bitwise OR -+ (the ``|`` operator). -+ - .. versionchanged:: 3.1 - Added the optional flags argument. - -@@ -1100,6 +1127,10 @@ - .. versionchanged:: 3.5 - Unmatched groups are replaced with an empty string. - -+ The expression's behaviour can be modified by specifying a *flags* value. -+ Values can be any of the `flags`_ variables, combined using bitwise OR -+ (the ``|`` operator). -+ - - .. function:: escape(pattern) - -diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst -index 43cf8d5cdac..b6486576872 100644 ---- a/Doc/library/readline.rst -+++ b/Doc/library/readline.rst -@@ -24,6 +24,8 @@ - allowable constructs of that file, and the capabilities of the - Readline library in general. - -+.. include:: ../includes/wasm-ios-notavail.rst + + Unix Platforms + -------------- +diff --git a/Doc/library/pwd.rst b/Doc/library/pwd.rst +index 98ca174d9e3..d71d7212cfd 100644 +--- a/Doc/library/pwd.rst ++++ b/Doc/library/pwd.rst +@@ -10,7 +10,7 @@ + This module provides access to the Unix user account and password database. It + is available on all Unix versions. + +-.. availability:: Unix, not Emscripten, not WASI. ++.. availability:: Unix, not WASI, not iOS. + + Password database entries are reported as a tuple-like object, whose attributes + correspond to the members of the ``passwd`` structure (Attribute field below, +diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst +index f02aec8a6a8..b6486576872 100644 +--- a/Doc/library/readline.rst ++++ b/Doc/library/readline.rst +@@ -24,6 +24,8 @@ + allowable constructs of that file, and the capabilities of the + Readline library in general. + ++.. include:: ../includes/wasm-ios-notavail.rst + .. note:: The underlying Readline library API may be implemented by -@@ -44,6 +46,10 @@ - python:bind -v - python:bind ^I rl_complete - -+ Also note that different libraries may use different history file formats. -+ When switching the underlying library, existing history files may become -+ unusable. -+ - - Init file - --------- diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst -index 7465bc5402c..0515d205bbc 100644 +index 02009d82104..0515d205bbc 100644 --- a/Doc/library/resource.rst +++ b/Doc/library/resource.rst @@ -13,7 +13,7 @@ @@ -8522,30 +676,8 @@ index 7465bc5402c..0515d205bbc 100644 Symbolic constants are used to specify particular system resources and to request usage information about either the current process or its children. -@@ -305,7 +305,7 @@ - elements. - - The fields :attr:`ru_utime` and :attr:`ru_stime` of the return value are -- floating point values representing the amount of time spent executing in user -+ floating-point values representing the amount of time spent executing in user - mode and the amount of time spent executing in system mode, respectively. The - remaining values are integers. Consult the :manpage:`getrusage(2)` man page for - detailed information about these values. A brief summary is presented here: -diff --git a/Doc/library/select.rst b/Doc/library/select.rst -index 06ebaf0201e..f23a249f44b 100644 ---- a/Doc/library/select.rst -+++ b/Doc/library/select.rst -@@ -129,7 +129,7 @@ - - Empty iterables are allowed, but acceptance of three empty iterables is - platform-dependent. (It is known to work on Unix but not on Windows.) The -- optional *timeout* argument specifies a time-out as a floating point number -+ optional *timeout* argument specifies a time-out as a floating-point number - in seconds. When the *timeout* argument is omitted the function blocks until - at least one file descriptor is ready. A time-out value of zero specifies a - poll and never blocks. diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst -index 60f21bc9105..79c4948e99e 100644 +index 641a6c021c1..79c4948e99e 100644 --- a/Doc/library/signal.rst +++ b/Doc/library/signal.rst @@ -26,9 +26,9 @@ @@ -8561,109 +693,11 @@ index 60f21bc9105..79c4948e99e 100644 Execution of Python signal handlers ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -@@ -425,7 +425,7 @@ - signal to a particular Python thread would be to force a running system call - to fail with :exc:`InterruptedError`. - -- Use :func:`threading.get_ident()` or the :attr:`~threading.Thread.ident` -+ Use :func:`threading.get_ident` or the :attr:`~threading.Thread.ident` - attribute of :class:`threading.Thread` objects to get a suitable value - for *thread_id*. - -diff --git a/Doc/library/site.rst b/Doc/library/site.rst -index f5cf81fb1c9..514eed314ea 100644 ---- a/Doc/library/site.rst -+++ b/Doc/library/site.rst -@@ -15,8 +15,9 @@ - - .. index:: triple: module; search; path - --Importing this module will append site-specific paths to the module search path --and add a few builtins, unless :option:`-S` was used. In that case, this module -+Importing this module normally appends site-specific paths to the module search path -+and adds :ref:`callables `, including :func:`help` to the built-in -+namespace. However, Python startup option :option:`-S` blocks this and this module - can be safely imported with no automatic modifications to the module search path - or additions to the builtins. To explicitly trigger the usual site-specific - additions, call the :func:`main` function. -diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst -index 2511ef7f2ad..7cd530a5fd6 100644 ---- a/Doc/library/smtplib.rst -+++ b/Doc/library/smtplib.rst -@@ -556,34 +556,33 @@ - and 'From' addresses), and the message to be delivered. Note that the headers - to be included with the message must be included in the message as entered; this - example doesn't do any processing of the :rfc:`822` headers. In particular, the --'To' and 'From' addresses must be included in the message headers explicitly. :: -+'To' and 'From' addresses must be included in the message headers explicitly:: - - import smtplib - -- def prompt(prompt): -- return input(prompt).strip() -+ def prompt(title): -+ return input(title).strip() - -- fromaddr = prompt("From: ") -- toaddrs = prompt("To: ").split() -+ from_addr = prompt("From: ") -+ to_addrs = prompt("To: ").split() - print("Enter message, end with ^D (Unix) or ^Z (Windows):") - - # Add the From: and To: headers at the start! -- msg = ("From: %s\r\nTo: %s\r\n\r\n" -- % (fromaddr, ", ".join(toaddrs))) -+ lines = [f"From: {from_addr}", f"To: {', '.join(to_addrs)}", ""] - while True: - try: - line = input() - except EOFError: - break -- if not line: -- break -- msg = msg + line -+ else: -+ lines.append(line) - -+ msg = "\r\n".join(lines) - print("Message length is", len(msg)) - -- server = smtplib.SMTP('localhost') -+ server = smtplib.SMTP("localhost") - server.set_debuglevel(1) -- server.sendmail(fromaddr, toaddrs, msg) -+ server.sendmail(from_addr, to_addrs, msg) - server.quit() - - .. note:: diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst -index 10f03b3fe02..337bad40c5a 100644 +index 584a12c2514..337bad40c5a 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst -@@ -695,6 +695,13 @@ - - .. versionadded:: 3.12 - -+.. data:: SHUT_RD -+ SHUT_WR -+ SHUT_RDWR -+ -+ These constants are used by the :meth:`~socket.socket.shutdown` method of socket objects. -+ -+ .. availability:: not WASI. - - Functions - ^^^^^^^^^ -@@ -724,7 +731,7 @@ - of :meth:`socket.getpeername` but not the actual OS resource. Unlike - :func:`socket.fromfd`, *fileno* will return the same socket and not a - duplicate. This may help close a detached socket using -- :meth:`socket.close()`. -+ :meth:`socket.close`. - - The newly created socket is :ref:`non-inheritable `. - -@@ -1208,7 +1215,7 @@ +@@ -1215,7 +1215,7 @@ buffer. Raises :exc:`OverflowError` if *length* is outside the permissible range of values. @@ -8672,7 +706,7 @@ index 10f03b3fe02..337bad40c5a 100644 Most Unix platforms. -@@ -1231,7 +1238,7 @@ +@@ -1238,7 +1238,7 @@ amount of ancillary data that can be received, since additional data may be able to fit into the padding area. @@ -8681,7 +715,7 @@ index 10f03b3fe02..337bad40c5a 100644 most Unix platforms. -@@ -1271,7 +1278,7 @@ +@@ -1278,7 +1278,7 @@ (index int, name string) tuples. :exc:`OSError` if the system call fails. @@ -8690,7 +724,7 @@ index 10f03b3fe02..337bad40c5a 100644 .. versionadded:: 3.3 -@@ -1298,7 +1305,7 @@ +@@ -1305,7 +1305,7 @@ interface name. :exc:`OSError` if no interface with the given name exists. @@ -8699,7 +733,7 @@ index 10f03b3fe02..337bad40c5a 100644 .. versionadded:: 3.3 -@@ -1315,7 +1322,7 @@ +@@ -1322,7 +1322,7 @@ interface index number. :exc:`OSError` if no interface with the given index exists. @@ -8708,7 +742,7 @@ index 10f03b3fe02..337bad40c5a 100644 .. versionadded:: 3.3 -@@ -1332,7 +1339,7 @@ +@@ -1339,7 +1339,7 @@ The *fds* parameter is a sequence of file descriptors. Consult :meth:`~socket.sendmsg` for the documentation of these parameters. @@ -8717,7 +751,7 @@ index 10f03b3fe02..337bad40c5a 100644 Unix platforms supporting :meth:`~socket.sendmsg` and :const:`SCM_RIGHTS` mechanism. -@@ -1346,7 +1353,7 @@ +@@ -1353,7 +1353,7 @@ Return ``(msg, list(fds), flags, addr)``. Consult :meth:`~socket.recvmsg` for the documentation of these parameters. @@ -8726,435 +760,8 @@ index 10f03b3fe02..337bad40c5a 100644 Unix platforms supporting :meth:`~socket.sendmsg` and :const:`SCM_RIGHTS` mechanism. -@@ -1403,7 +1410,7 @@ - .. method:: socket.close() - - Mark the socket closed. The underlying system resource (e.g. a file -- descriptor) is also closed when all file objects from :meth:`makefile()` -+ descriptor) is also closed when all file objects from :meth:`makefile` - are closed. Once that happens, all future operations on the socket - object will fail. The remote end will receive no more data (after - queued data is flushed). -@@ -1418,10 +1425,10 @@ - - .. note:: - -- :meth:`close()` releases the resource associated with a connection but -+ :meth:`close` releases the resource associated with a connection but - does not necessarily close the connection immediately. If you want -- to close the connection in a timely fashion, call :meth:`shutdown()` -- before :meth:`close()`. -+ to close the connection in a timely fashion, call :meth:`shutdown` -+ before :meth:`close`. - - - .. method:: socket.connect(address) -@@ -1917,7 +1924,7 @@ - .. method:: socket.settimeout(value) - - Set a timeout on blocking socket operations. The *value* argument can be a -- nonnegative floating point number expressing seconds, or ``None``. -+ nonnegative floating-point number expressing seconds, or ``None``. - If a non-zero value is given, subsequent socket operations will raise a - :exc:`timeout` exception if the timeout period *value* has elapsed before - the operation has completed. If zero is given, the socket is put in -@@ -2030,7 +2037,7 @@ - in non-blocking mode. Also, the blocking and timeout modes are shared between - file descriptors and socket objects that refer to the same network endpoint. - This implementation detail can have visible consequences if e.g. you decide -- to use the :meth:`~socket.fileno()` of a socket. -+ to use the :meth:`~socket.fileno` of a socket. - - Timeouts and the ``connect`` method - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst -index 70f1e05a653..6d078c59223 100644 ---- a/Doc/library/sqlite3.rst -+++ b/Doc/library/sqlite3.rst -@@ -127,7 +127,7 @@ - We can see that the table has been created, - as the query returns a :class:`tuple` containing the table's name. - If we query ``sqlite_master`` for a non-existent table ``spam``, --:meth:`!res.fetchone()` will return ``None``: -+:meth:`!res.fetchone` will return ``None``: - - .. doctest:: - -diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst -index 8fb0d5056c1..b7cf2b28695 100644 ---- a/Doc/library/ssl.rst -+++ b/Doc/library/ssl.rst -@@ -1021,25 +1021,25 @@ - - SSL sockets provide the following methods of :ref:`socket-objects`: - -- - :meth:`~socket.socket.accept()` -- - :meth:`~socket.socket.bind()` -- - :meth:`~socket.socket.close()` -- - :meth:`~socket.socket.connect()` -- - :meth:`~socket.socket.detach()` -- - :meth:`~socket.socket.fileno()` -- - :meth:`~socket.socket.getpeername()`, :meth:`~socket.socket.getsockname()` -- - :meth:`~socket.socket.getsockopt()`, :meth:`~socket.socket.setsockopt()` -- - :meth:`~socket.socket.gettimeout()`, :meth:`~socket.socket.settimeout()`, -- :meth:`~socket.socket.setblocking()` -- - :meth:`~socket.socket.listen()` -- - :meth:`~socket.socket.makefile()` -- - :meth:`~socket.socket.recv()`, :meth:`~socket.socket.recv_into()` -+ - :meth:`~socket.socket.accept` -+ - :meth:`~socket.socket.bind` -+ - :meth:`~socket.socket.close` -+ - :meth:`~socket.socket.connect` -+ - :meth:`~socket.socket.detach` -+ - :meth:`~socket.socket.fileno` -+ - :meth:`~socket.socket.getpeername`, :meth:`~socket.socket.getsockname` -+ - :meth:`~socket.socket.getsockopt`, :meth:`~socket.socket.setsockopt` -+ - :meth:`~socket.socket.gettimeout`, :meth:`~socket.socket.settimeout`, -+ :meth:`~socket.socket.setblocking` -+ - :meth:`~socket.socket.listen` -+ - :meth:`~socket.socket.makefile` -+ - :meth:`~socket.socket.recv`, :meth:`~socket.socket.recv_into` - (but passing a non-zero ``flags`` argument is not allowed) -- - :meth:`~socket.socket.send()`, :meth:`~socket.socket.sendall()` (with -+ - :meth:`~socket.socket.send`, :meth:`~socket.socket.sendall` (with - the same limitation) -- - :meth:`~socket.socket.sendfile()` (but :mod:`os.sendfile` will be used -- for plain-text sockets only, else :meth:`~socket.socket.send()` will be used) -- - :meth:`~socket.socket.shutdown()` -+ - :meth:`~socket.socket.sendfile` (but :mod:`os.sendfile` will be used -+ for plain-text sockets only, else :meth:`~socket.socket.send` will be used) -+ - :meth:`~socket.socket.shutdown` - - However, since the SSL (and TLS) protocol has its own framing atop - of TCP, the SSL sockets abstraction can, in certain respects, diverge from -@@ -1428,6 +1428,19 @@ - :data:`PROTOCOL_TLS`, :data:`PROTOCOL_TLS_CLIENT`, and - :data:`PROTOCOL_TLS_SERVER` use TLS 1.2 as minimum TLS version. - -+ .. note:: -+ -+ :class:`SSLContext` only supports limited mutation once it has been used -+ by a connection. Adding new certificates to the internal trust store is -+ allowed, but changing ciphers, verification settings, or mTLS -+ certificates may result in surprising behavior. -+ -+ .. note:: -+ -+ :class:`SSLContext` is designed to be shared and used by multiple -+ connections. -+ Thus, it is thread-safe as long as it is not reconfigured after being -+ used by a connection. - - :class:`SSLContext` objects have the following methods and attributes: - -@@ -1684,7 +1697,7 @@ - IDN-encoded internationalized domain name, the *server_name_callback* - receives a decoded U-label (``"pythön.org"``). - -- If there is an decoding error on the server name, the TLS connection will -+ If there is a decoding error on the server name, the TLS connection will - terminate with an :const:`ALERT_DESCRIPTION_INTERNAL_ERROR` fatal TLS - alert message to the client. - -@@ -2556,7 +2569,7 @@ - - When calling the :class:`SSLContext` constructor directly, - :const:`CERT_NONE` is the default. Since it does not authenticate the other --peer, it can be insecure, especially in client mode where most of time you -+peer, it can be insecure, especially in client mode where most of the time you - would like to ensure the authenticity of the server you're talking to. - Therefore, when in client mode, it is highly recommended to use - :const:`CERT_REQUIRED`. However, it is in itself not sufficient; you also -diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst -index 6da3bced99d..ce1b8a112e8 100644 ---- a/Doc/library/statistics.rst -+++ b/Doc/library/statistics.rst -@@ -73,7 +73,7 @@ - - ======================= =============================================================== - :func:`mean` Arithmetic mean ("average") of data. --:func:`fmean` Fast, floating point arithmetic mean, with optional weighting. -+:func:`fmean` Fast, floating-point arithmetic mean, with optional weighting. - :func:`geometric_mean` Geometric mean of data. - :func:`harmonic_mean` Harmonic mean of data. - :func:`median` Median (middle value) of data. -@@ -408,6 +408,12 @@ - >>> mode(["red", "blue", "blue", "red", "green", "red", "red"]) - 'red' - -+ Only hashable inputs are supported. To handle type :class:`set`, -+ consider casting to :class:`frozenset`. To handle type :class:`list`, -+ consider casting to :class:`tuple`. For mixed or nested inputs, consider -+ using this slower quadratic algorithm that only depends on equality tests: -+ ``max(data, key=data.count)``. -+ - .. versionchanged:: 3.8 - Now handles multimodal datasets by returning the first mode encountered. - Formerly, it raised :exc:`StatisticsError` when more than one mode was -diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst -index d11bfb803f8..bf11e2891db 100644 ---- a/Doc/library/stdtypes.rst -+++ b/Doc/library/stdtypes.rst -@@ -209,18 +209,18 @@ - pair: object; numeric - pair: object; Boolean - pair: object; integer -- pair: object; floating point -+ pair: object; floating-point - pair: object; complex number - pair: C; language - --There are three distinct numeric types: :dfn:`integers`, :dfn:`floating --point numbers`, and :dfn:`complex numbers`. In addition, Booleans are a --subtype of integers. Integers have unlimited precision. Floating point -+There are three distinct numeric types: :dfn:`integers`, :dfn:`floating-point -+numbers`, and :dfn:`complex numbers`. In addition, Booleans are a -+subtype of integers. Integers have unlimited precision. Floating-point - numbers are usually implemented using :c:expr:`double` in C; information --about the precision and internal representation of floating point -+about the precision and internal representation of floating-point - numbers for the machine on which your program is running is available - in :data:`sys.float_info`. Complex numbers have a real and imaginary --part, which are each a floating point number. To extract these parts -+part, which are each a floating-point number. To extract these parts - from a complex number *z*, use ``z.real`` and ``z.imag``. (The standard - library includes the additional numeric types :mod:`fractions.Fraction`, for - rationals, and :mod:`decimal.Decimal`, for floating-point numbers with -@@ -229,7 +229,7 @@ - .. index:: - pair: numeric; literals - pair: integer; literals -- pair: floating point; literals -+ pair: floating-point; literals - pair: complex number; literals - pair: hexadecimal; literals - pair: octal; literals -@@ -238,7 +238,7 @@ - Numbers are created by numeric literals or as the result of built-in functions - and operators. Unadorned integer literals (including hex, octal and binary - numbers) yield integers. Numeric literals containing a decimal point or an --exponent sign yield floating point numbers. Appending ``'j'`` or ``'J'`` to a -+exponent sign yield floating-point numbers. Appending ``'j'`` or ``'J'`` to a - numeric literal yields an imaginary number (a complex number with a zero real - part) which you can add to an integer or float to get a complex number with real - and imaginary parts. -@@ -832,7 +832,7 @@ - .. deprecated:: 3.12 - - The use of the bitwise inversion operator ``~`` is deprecated and will -- raise an error in Python 3.14. -+ raise an error in Python 3.16. - - :class:`bool` is a subclass of :class:`int` (see :ref:`typesnumeric`). In - many numeric contexts, ``False`` and ``True`` behave like the integers 0 and 1, respectively. -@@ -1209,8 +1209,9 @@ - | ``s.pop()`` or ``s.pop(i)`` | retrieves the item at *i* and | \(2) | - | | also removes it from *s* | | - +------------------------------+--------------------------------+---------------------+ --| ``s.remove(x)`` | remove the first item from *s* | \(3) | --| | where ``s[i]`` is equal to *x* | | -+| ``s.remove(x)`` | removes the first item from | \(3) | -+| | *s* where ``s[i]`` is equal to | | -+| | *x* | | - +------------------------------+--------------------------------+---------------------+ - | ``s.reverse()`` | reverses the items of *s* in | \(4) | - | | place | | -@@ -1220,7 +1221,7 @@ - Notes: - - (1) -- *t* must have the same length as the slice it is replacing. -+ If *k* is not equal to ``1``, *t* must have the same length as the slice it is replacing. - - (2) - The optional argument *i* defaults to ``-1``, so that by default the last -@@ -1497,8 +1498,8 @@ - .. seealso:: - - * The `linspace recipe `_ -- shows how to implement a lazy version of range suitable for floating -- point applications. -+ shows how to implement a lazy version of range suitable for floating-point -+ applications. - - .. index:: - single: string; text sequence type -@@ -2092,8 +2093,9 @@ - If *sep* is given, consecutive delimiters are not grouped together and are - deemed to delimit empty strings (for example, ``'1,,2'.split(',')`` returns - ``['1', '', '2']``). The *sep* argument may consist of multiple characters -- (for example, ``'1<>2<>3'.split('<>')`` returns ``['1', '2', '3']``). -- Splitting an empty string with a specified separator returns ``['']``. -+ as a single delimiter (to split with multiple delimiters, use -+ :func:`re.split`). Splitting an empty string with a specified separator -+ returns ``['']``. - - For example:: - -@@ -2103,6 +2105,8 @@ - ['1', '2,3'] - >>> '1,2,,3,'.split(',') - ['1', '2', '', '3', ''] -+ >>> '1<>2<>3<4'.split('<>') -+ ['1', '2', '3<4'] - - If *sep* is not specified or is ``None``, a different splitting algorithm is - applied: runs of consecutive whitespace are regarded as a single separator, -@@ -2430,19 +2434,19 @@ - +------------+-----------------------------------------------------+-------+ - | ``'X'`` | Signed hexadecimal (uppercase). | \(2) | - +------------+-----------------------------------------------------+-------+ --| ``'e'`` | Floating point exponential format (lowercase). | \(3) | -+| ``'e'`` | Floating-point exponential format (lowercase). | \(3) | - +------------+-----------------------------------------------------+-------+ --| ``'E'`` | Floating point exponential format (uppercase). | \(3) | -+| ``'E'`` | Floating-point exponential format (uppercase). | \(3) | - +------------+-----------------------------------------------------+-------+ --| ``'f'`` | Floating point decimal format. | \(3) | -+| ``'f'`` | Floating-point decimal format. | \(3) | - +------------+-----------------------------------------------------+-------+ --| ``'F'`` | Floating point decimal format. | \(3) | -+| ``'F'`` | Floating-point decimal format. | \(3) | - +------------+-----------------------------------------------------+-------+ --| ``'g'`` | Floating point format. Uses lowercase exponential | \(4) | -+| ``'g'`` | Floating-point format. Uses lowercase exponential | \(4) | - | | format if exponent is less than -4 or not less than | | - | | precision, decimal format otherwise. | | - +------------+-----------------------------------------------------+-------+ --| ``'G'`` | Floating point format. Uses uppercase exponential | \(4) | -+| ``'G'`` | Floating-point format. Uses uppercase exponential | \(4) | - | | format if exponent is less than -4 or not less than | | - | | precision, decimal format otherwise. | | - +------------+-----------------------------------------------------+-------+ -@@ -3140,10 +3144,9 @@ - If *sep* is given, consecutive delimiters are not grouped together and are - deemed to delimit empty subsequences (for example, ``b'1,,2'.split(b',')`` - returns ``[b'1', b'', b'2']``). The *sep* argument may consist of a -- multibyte sequence (for example, ``b'1<>2<>3'.split(b'<>')`` returns -- ``[b'1', b'2', b'3']``). Splitting an empty sequence with a specified -- separator returns ``[b'']`` or ``[bytearray(b'')]`` depending on the type -- of object being split. The *sep* argument may be any -+ multibyte sequence as a single delimiter. Splitting an empty sequence with -+ a specified separator returns ``[b'']`` or ``[bytearray(b'')]`` depending -+ on the type of object being split. The *sep* argument may be any - :term:`bytes-like object`. - - For example:: -@@ -3154,6 +3157,8 @@ - [b'1', b'2,3'] - >>> b'1,2,,3,'.split(b',') - [b'1', b'2', b'', b'3', b''] -+ >>> b'1<>2<>3<4'.split(b'<>') -+ [b'1', b'2', b'3<4'] - - If *sep* is not specified or is ``None``, a different splitting algorithm - is applied: runs of consecutive ASCII whitespace are regarded as a single -@@ -3427,7 +3432,7 @@ - ``b'abcdefghijklmnopqrstuvwxyz'``. Uppercase ASCII characters - are those byte values in the sequence ``b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'``. - -- Unlike :func:`str.swapcase()`, it is always the case that -+ Unlike :func:`str.swapcase`, it is always the case that - ``bin.swapcase().swapcase() == bin`` for the binary versions. Case - conversions are symmetrical in ASCII, even though that is not generally - true for arbitrary Unicode code points. -@@ -3648,19 +3653,19 @@ - +------------+-----------------------------------------------------+-------+ - | ``'X'`` | Signed hexadecimal (uppercase). | \(2) | - +------------+-----------------------------------------------------+-------+ --| ``'e'`` | Floating point exponential format (lowercase). | \(3) | -+| ``'e'`` | Floating-point exponential format (lowercase). | \(3) | - +------------+-----------------------------------------------------+-------+ --| ``'E'`` | Floating point exponential format (uppercase). | \(3) | -+| ``'E'`` | Floating-point exponential format (uppercase). | \(3) | - +------------+-----------------------------------------------------+-------+ --| ``'f'`` | Floating point decimal format. | \(3) | -+| ``'f'`` | Floating-point decimal format. | \(3) | - +------------+-----------------------------------------------------+-------+ --| ``'F'`` | Floating point decimal format. | \(3) | -+| ``'F'`` | Floating-point decimal format. | \(3) | - +------------+-----------------------------------------------------+-------+ --| ``'g'`` | Floating point format. Uses lowercase exponential | \(4) | -+| ``'g'`` | Floating-point format. Uses lowercase exponential | \(4) | - | | format if exponent is less than -4 or not less than | | - | | precision, decimal format otherwise. | | - +------------+-----------------------------------------------------+-------+ --| ``'G'`` | Floating point format. Uses uppercase exponential | \(4) | -+| ``'G'`` | Floating-point format. Uses uppercase exponential | \(4) | - | | format if exponent is less than -4 or not less than | | - | | precision, decimal format otherwise. | | - +------------+-----------------------------------------------------+-------+ -@@ -3882,7 +3887,7 @@ - >>> a == b - False - -- Note that, as with floating point numbers, ``v is w`` does *not* imply -+ Note that, as with floating-point numbers, ``v is w`` does *not* imply - ``v == w`` for memoryview objects. - - .. versionchanged:: 3.3 -@@ -3973,7 +3978,7 @@ - dangling resources) as soon as possible. - - After this method has been called, any further operation on the view -- raises a :class:`ValueError` (except :meth:`release()` itself which can -+ raises a :class:`ValueError` (except :meth:`release` itself which can - be called multiple times):: - - >>> m = memoryview(b'abc') -@@ -4556,7 +4561,7 @@ - - Return a shallow copy of the dictionary. - -- .. classmethod:: fromkeys(iterable, value=None) -+ .. classmethod:: fromkeys(iterable, value=None, /) - - Create a new dictionary with keys from *iterable* and values set to *value*. - -diff --git a/Doc/library/string.rst b/Doc/library/string.rst -index c3c0d732cf1..1f316307965 100644 ---- a/Doc/library/string.rst -+++ b/Doc/library/string.rst -@@ -418,7 +418,7 @@ - .. index:: single: _ (underscore); in string formatting - - The ``'_'`` option signals the use of an underscore for a thousands --separator for floating point presentation types and for integer -+separator for floating-point presentation types and for integer - presentation type ``'d'``. For integer presentation types ``'b'``, - ``'o'``, ``'x'``, and ``'X'``, underscores will be inserted every 4 - digits. For other presentation types, specifying this option is an -@@ -491,9 +491,9 @@ - +---------+----------------------------------------------------------+ - - In addition to the above presentation types, integers can be formatted --with the floating point presentation types listed below (except -+with the floating-point presentation types listed below (except - ``'n'`` and ``None``). When doing so, :func:`float` is used to convert the --integer to a floating point number before formatting. -+integer to a floating-point number before formatting. - - The available presentation types for :class:`float` and - :class:`~decimal.Decimal` values are: -diff --git a/Doc/library/struct.rst b/Doc/library/struct.rst -index 346784d86e3..29bce521ee1 100644 ---- a/Doc/library/struct.rst -+++ b/Doc/library/struct.rst -@@ -275,9 +275,9 @@ - (1) - .. index:: single: ? (question mark); in struct format strings - -- The ``'?'`` conversion code corresponds to the :c:expr:`_Bool` type defined by -- C99. If this type is not available, it is simulated using a :c:expr:`char`. In -- standard mode, it is always represented by one byte. -+ The ``'?'`` conversion code corresponds to the :c:expr:`_Bool` type -+ defined by C standards since C99. In standard mode, it is -+ represented by one byte. - - (2) - When attempting to pack a non-integer using any of the integer conversion diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst -index 33f96a2f744..b03db8f3e0a 100644 +index 755ff4c6f0f..b03db8f3e0a 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -25,7 +25,7 @@ @@ -9166,128 +773,6 @@ index 33f96a2f744..b03db8f3e0a 100644 Using the :mod:`subprocess` Module ---------------------------------- -@@ -608,7 +608,7 @@ - - If *group* is not ``None``, the setregid() system call will be made in the - child process prior to the execution of the subprocess. If the provided -- value is a string, it will be looked up via :func:`grp.getgrnam()` and -+ value is a string, it will be looked up via :func:`grp.getgrnam` and - the value in ``gr_gid`` will be used. If the value is an integer, it - will be passed verbatim. (POSIX only) - -@@ -618,7 +618,7 @@ - If *extra_groups* is not ``None``, the setgroups() system call will be - made in the child process prior to the execution of the subprocess. - Strings provided in *extra_groups* will be looked up via -- :func:`grp.getgrnam()` and the values in ``gr_gid`` will be used. -+ :func:`grp.getgrnam` and the values in ``gr_gid`` will be used. - Integer values will be passed verbatim. (POSIX only) - - .. availability:: POSIX -@@ -626,7 +626,7 @@ - - If *user* is not ``None``, the setreuid() system call will be made in the - child process prior to the execution of the subprocess. If the provided -- value is a string, it will be looked up via :func:`pwd.getpwnam()` and -+ value is a string, it will be looked up via :func:`pwd.getpwnam` and - the value in ``pw_uid`` will be used. If the value is an integer, it will - be passed verbatim. (POSIX only) - -@@ -1110,7 +1110,7 @@ - .. data:: NORMAL_PRIORITY_CLASS - - A :class:`Popen` ``creationflags`` parameter to specify that a new process -- will have an normal priority. (default) -+ will have a normal priority. (default) - - .. versionadded:: 3.7 - -diff --git a/Doc/library/symtable.rst b/Doc/library/symtable.rst -index fc2d79b77cf..de9a961592a 100644 ---- a/Doc/library/symtable.rst -+++ b/Doc/library/symtable.rst -@@ -127,8 +127,39 @@ - - .. method:: get_methods() - -- Return a tuple containing the names of methods declared in the class. -- -+ Return a tuple containing the names of method-like functions declared -+ in the class. -+ -+ Here, the term 'method' designates *any* function defined in the class -+ body via :keyword:`def` or :keyword:`async def`. -+ -+ Functions defined in a deeper scope (e.g., in an inner class) are not -+ picked up by :meth:`get_methods`. -+ -+ For example: -+ -+ >>> import symtable -+ >>> st = symtable.symtable(''' -+ ... def outer(): pass -+ ... -+ ... class A: -+ ... def f(): -+ ... def w(): pass -+ ... -+ ... def g(self): pass -+ ... -+ ... @classmethod -+ ... async def h(cls): pass -+ ... -+ ... global outer -+ ... def outer(self): pass -+ ... ''', 'test', 'exec') -+ >>> class_A = st.get_children()[1] -+ >>> class_A.get_methods() -+ ('f', 'g', 'h') -+ -+ Although ``A().f()`` raises :exc:`TypeError` at runtime, ``A.f`` is still -+ considered as a method-like function. - - .. class:: Symbol - -diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst -index 03f1f309f65..40d0ef80a3f 100644 ---- a/Doc/library/sys.rst -+++ b/Doc/library/sys.rst -@@ -724,11 +724,11 @@ - regardless of their size. This function is mainly useful for tracking - and debugging memory leaks. Because of the interpreter's internal - caches, the result can vary from call to call; you may have to call -- :func:`_clear_type_cache()` and :func:`gc.collect()` to get more -+ :func:`_clear_type_cache` and :func:`gc.collect` to get more - predictable results. - - If a Python build or implementation cannot reasonably compute this -- information, :func:`getallocatedblocks()` is allowed to return 0 instead. -+ information, :func:`getallocatedblocks` is allowed to return 0 instead. - - .. versionadded:: 3.4 - -diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst -index 75672913943..aaccc0431d7 100644 ---- a/Doc/library/sysconfig.rst -+++ b/Doc/library/sysconfig.rst -@@ -305,7 +305,7 @@ - mix with those by the other. - - End users should not use this function, but :func:`get_default_scheme` and -- :func:`get_preferred_scheme()` instead. -+ :func:`get_preferred_scheme` instead. - - .. versionadded:: 3.10 - -@@ -376,7 +376,7 @@ - - This is used mainly to distinguish platform-specific build directories and - platform-specific built distributions. Typically includes the OS name and -- version and the architecture (as supplied by 'os.uname()'), although the -+ version and the architecture (as supplied by :func:`os.uname`), although the - exact information included depends on the OS; e.g., on Linux, the kernel - version isn't particularly important. - diff --git a/Doc/library/syslog.rst b/Doc/library/syslog.rst index 79b808ab63c..332b58413d3 100644 --- a/Doc/library/syslog.rst @@ -9301,498 +786,8 @@ index 79b808ab63c..332b58413d3 100644 This module wraps the system ``syslog`` family of routines. A pure Python library that can speak to a syslog server is available in the -diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst -index bd745c78823..0352cddb16e 100644 ---- a/Doc/library/tarfile.rst -+++ b/Doc/library/tarfile.rst -@@ -608,7 +608,7 @@ - it is best practice to only do so in top-level applications or - :mod:`site configuration `. - To set a global default this way, a filter function needs to be wrapped in -- :func:`staticmethod()` to prevent injection of a ``self`` argument. -+ :func:`staticmethod` to prevent injection of a ``self`` argument. - - .. method:: TarFile.add(name, arcname=None, recursive=True, *, filter=None) - -diff --git a/Doc/library/test.rst b/Doc/library/test.rst -index 64bf8174478..b11bdb42907 100644 ---- a/Doc/library/test.rst -+++ b/Doc/library/test.rst -@@ -1695,7 +1695,7 @@ - - .. function:: check_warnings(*filters, quiet=True) - -- A convenience wrapper for :func:`warnings.catch_warnings()` that makes it -+ A convenience wrapper for :func:`warnings.catch_warnings` that makes it - easier to test that a warning was correctly raised. It is approximately - equivalent to calling ``warnings.catch_warnings(record=True)`` with - :meth:`warnings.simplefilter` set to ``always`` and with the option to -diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst -index c88dcabcd91..5d94a7760f9 100644 ---- a/Doc/library/threading.rst -+++ b/Doc/library/threading.rst -@@ -409,7 +409,7 @@ - timeout occurs. - - When the *timeout* argument is present and not ``None``, it should be a -- floating point number specifying a timeout for the operation in seconds -+ floating-point number specifying a timeout for the operation in seconds - (or fractions thereof). As :meth:`~Thread.join` always returns ``None``, - you must call :meth:`~Thread.is_alive` after :meth:`~Thread.join` to - decide whether a timeout happened -- if the thread is still alive, the -@@ -790,7 +790,7 @@ - occurs. Once awakened or timed out, it re-acquires the lock and returns. - - When the *timeout* argument is present and not ``None``, it should be a -- floating point number specifying a timeout for the operation in seconds -+ floating-point number specifying a timeout for the operation in seconds - (or fractions thereof). - - When the underlying lock is an :class:`RLock`, it is not released using -@@ -1014,10 +1014,10 @@ - has not expired. The return value represents the - reason that this blocking method returned; ``True`` if returning because - the internal flag is set to true, or ``False`` if a timeout is given and -- the the internal flag did not become true within the given wait time. -+ the internal flag did not become true within the given wait time. - - When the timeout argument is present and not ``None``, it should be a -- floating point number specifying a timeout for the operation in seconds, -+ floating-point number specifying a timeout for the operation in seconds, - or fractions thereof. - - .. versionchanged:: 3.1 -diff --git a/Doc/library/time.rst b/Doc/library/time.rst -index d792d5633dd..188dbca8fd1 100644 ---- a/Doc/library/time.rst -+++ b/Doc/library/time.rst -@@ -69,7 +69,7 @@ - systems, the clock "ticks" only 50 or 100 times a second. - - * On the other hand, the precision of :func:`.time` and :func:`sleep` is better -- than their Unix equivalents: times are expressed as floating point numbers, -+ than their Unix equivalents: times are expressed as floating-point numbers, - :func:`.time` returns the most accurate time available (using Unix - :c:func:`!gettimeofday` where available), and :func:`sleep` will accept a time - with a nonzero fraction (Unix :c:func:`!select` is used to implement this, where -@@ -273,7 +273,7 @@ - This is the inverse function of :func:`localtime`. Its argument is the - :class:`struct_time` or full 9-tuple (since the dst flag is needed; use ``-1`` - as the dst flag if it is unknown) which expresses the time in *local* time, not -- UTC. It returns a floating point number, for compatibility with :func:`.time`. -+ UTC. It returns a floating-point number, for compatibility with :func:`.time`. - If the input value cannot be represented as a valid time, either - :exc:`OverflowError` or :exc:`ValueError` will be raised (which depends on - whether the invalid value is caught by Python or the underlying C libraries). -@@ -358,7 +358,7 @@ - .. function:: sleep(secs) - - Suspend execution of the calling thread for the given number of seconds. -- The argument may be a floating point number to indicate a more precise sleep -+ The argument may be a floating-point number to indicate a more precise sleep - time. - - If the sleep is interrupted by a signal and no exception is raised by the -@@ -642,13 +642,13 @@ - - .. function:: time() -> float - -- Return the time in seconds since the epoch_ as a floating point -+ Return the time in seconds since the epoch_ as a floating-point - number. The handling of `leap seconds`_ is platform dependent. - On Windows and most Unix systems, the leap seconds are not counted towards - the time in seconds since the epoch_. This is commonly referred to as `Unix - time `_. - -- Note that even though the time is always returned as a floating point -+ Note that even though the time is always returned as a floating-point - number, not all systems provide time with a better precision than 1 second. - While this function normally returns non-decreasing values, it can return a - lower value than a previous call if the system clock has been set back -diff --git a/Doc/library/token.rst b/Doc/library/token.rst -index 9368ced97ab..e27a3b96d8f 100644 ---- a/Doc/library/token.rst -+++ b/Doc/library/token.rst -@@ -75,7 +75,7 @@ - :noindex: - - Token value indicating that a type comment was recognized. Such -- tokens are only produced when :func:`ast.parse()` is invoked with -+ tokens are only produced when :func:`ast.parse` is invoked with - ``type_comments=True``. - - -diff --git a/Doc/library/traceback.rst b/Doc/library/traceback.rst -index 85dae82104a..d3f47b9e4fb 100644 ---- a/Doc/library/traceback.rst -+++ b/Doc/library/traceback.rst -@@ -42,6 +42,14 @@ - :term:`file ` or :term:`file-like object` to - receive the output. - -+ .. note:: -+ -+ The meaning of the *limit* parameter is different than the meaning -+ of :const:`sys.tracebacklimit`. A negative *limit* value corresponds to -+ a positive value of :const:`!sys.tracebacklimit`, whereas the behaviour of -+ a positive *limit* value cannot be achieved with -+ :const:`!sys.tracebacklimit`. -+ - .. versionchanged:: 3.5 - Added negative *limit* support. - -diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst -index 1a5c21d3c94..446ee6a0ec3 100644 ---- a/Doc/library/typing.rst -+++ b/Doc/library/typing.rst -@@ -27,12 +27,13 @@ - - Consider the function below:: - -- def moon_weight(earth_weight: float) -> str: -- return f'On the moon, you would weigh {earth_weight * 0.166} kilograms.' -+ def surface_area_of_cube(edge_length: float) -> str: -+ return f"The surface area of the cube is {6 * edge_length ** 2}." - --The function ``moon_weight`` takes an argument expected to be an instance of :class:`float`, --as indicated by the *type hint* ``earth_weight: float``. The function is expected to --return an instance of :class:`str`, as indicated by the ``-> str`` hint. -+The function ``surface_area_of_cube`` takes an argument expected to -+be an instance of :class:`float`, as indicated by the :term:`type hint` -+``edge_length: float``. The function is expected to return an instance -+of :class:`str`, as indicated by the ``-> str`` hint. - - While type hints can be simple classes like :class:`float` or :class:`str`, - they can also be more complex. The :mod:`typing` module provides a vocabulary of -@@ -97,8 +98,9 @@ - # The static type checker will treat the previous type signature as - # being exactly equivalent to this one. - def broadcast_message( -- message: str, -- servers: Sequence[tuple[tuple[str, int], dict[str, str]]]) -> None: -+ message: str, -+ servers: Sequence[tuple[tuple[str, int], dict[str, str]]] -+ ) -> None: - ... - - The :keyword:`type` statement is new in Python 3.12. For backwards -@@ -206,7 +208,7 @@ - =========================== - - Functions -- or other :term:`callable` objects -- can be annotated using --:class:`collections.abc.Callable` or :data:`typing.Callable`. -+:class:`collections.abc.Callable` or deprecated :data:`typing.Callable`. - ``Callable[[int], str]`` signifies a function that takes a single parameter - of type :class:`int` and returns a :class:`str`. - -@@ -399,7 +401,7 @@ - ========================= - - A variable annotated with ``C`` may accept a value of type ``C``. In --contrast, a variable annotated with ``type[C]`` (or -+contrast, a variable annotated with ``type[C]`` (or deprecated - :class:`typing.Type[C] `) may accept values that are classes - themselves -- specifically, it will accept the *class object* of ``C``. For - example:: -@@ -439,6 +441,72 @@ - ``type[Any]`` is equivalent to :class:`type`, which is the root of Python's - :ref:`metaclass hierarchy `. - -+ -+.. _annotating-generators-and-coroutines: -+ -+Annotating generators and coroutines -+==================================== -+ -+A generator can be annotated using the generic type -+:class:`Generator[YieldType, SendType, ReturnType] `. -+For example:: -+ -+ def echo_round() -> Generator[int, float, str]: -+ sent = yield 0 -+ while sent >= 0: -+ sent = yield round(sent) -+ return 'Done' -+ -+Note that unlike many other generic classes in the standard library, -+the ``SendType`` of :class:`~collections.abc.Generator` behaves -+contravariantly, not covariantly or invariantly. -+ -+If your generator will only yield values, set the ``SendType`` and -+``ReturnType`` to ``None``:: -+ -+ def infinite_stream(start: int) -> Generator[int, None, None]: -+ while True: -+ yield start -+ start += 1 -+ -+Alternatively, annotate your generator as having a return type of -+either ``Iterable[YieldType]`` or ``Iterator[YieldType]``:: -+ -+ def infinite_stream(start: int) -> Iterator[int]: -+ while True: -+ yield start -+ start += 1 -+ -+Async generators are handled in a similar fashion, but don't -+expect a ``ReturnType`` type argument -+(:class:`AsyncGenerator[YieldType, SendType] `):: -+ -+ async def infinite_stream(start: int) -> AsyncGenerator[int, None]: -+ while True: -+ yield start -+ start = await increment(start) -+ -+As in the synchronous case, -+:class:`AsyncIterable[YieldType] ` -+and :class:`AsyncIterator[YieldType] ` are -+available as well:: -+ -+ async def infinite_stream(start: int) -> AsyncIterator[int]: -+ while True: -+ yield start -+ start = await increment(start) -+ -+Coroutines can be annotated using -+:class:`Coroutine[YieldType, SendType, ReturnType] `. -+Generic arguments correspond to those of :class:`~collections.abc.Generator`, -+for example:: -+ -+ from collections.abc import Coroutine -+ c: Coroutine[list[str], str, int] # Some coroutine defined elsewhere -+ x = c.send('hi') # Inferred type of 'x' is list[str] -+ async def bar() -> None: -+ y = await c # Inferred type of 'y' is int -+ - .. _user-defined-generics: - - User-defined generic types -@@ -1394,8 +1462,8 @@ - print("Not a list of strings!") - - If ``is_str_list`` is a class or instance method, then the type in -- ``TypeGuard`` maps to the type of the second parameter after ``cls`` or -- ``self``. -+ ``TypeGuard`` maps to the type of the second parameter (after ``cls`` or -+ ``self``). - - In short, the form ``def foo(arg: TypeA) -> TypeGuard[TypeB]: ...``, - means that if ``foo(arg)`` returns ``True``, then ``arg`` narrows from -@@ -1749,8 +1817,8 @@ - of ``*args``:: - - def call_soon[*Ts]( -- callback: Callable[[*Ts], None], -- *args: *Ts -+ callback: Callable[[*Ts], None], -+ *args: *Ts - ) -> None: - ... - callback(*args) -@@ -3071,14 +3139,9 @@ - Deprecated alias to :class:`dict`. - - Note that to annotate arguments, it is preferred -- to use an abstract collection type such as :class:`Mapping` -+ to use an abstract collection type such as :class:`~collections.abc.Mapping` - rather than to use :class:`dict` or :class:`!typing.Dict`. - -- This type can be used as follows:: -- -- def count_words(text: str) -> Dict[str, int]: -- ... -- - .. deprecated:: 3.9 - :class:`builtins.dict ` now supports subscripting (``[]``). - See :pep:`585` and :ref:`types-genericalias`. -@@ -3088,16 +3151,9 @@ - Deprecated alias to :class:`list`. - - Note that to annotate arguments, it is preferred -- to use an abstract collection type such as :class:`Sequence` or -- :class:`Iterable` rather than to use :class:`list` or :class:`!typing.List`. -- -- This type may be used as follows:: -- -- def vec2[T: (int, float)](x: T, y: T) -> List[T]: -- return [x, y] -- -- def keep_positives[T: (int, float)](vector: Sequence[T]) -> List[T]: -- return [item for item in vector if item > 0] -+ to use an abstract collection type such as -+ :class:`~collections.abc.Sequence` or :class:`~collections.abc.Iterable` -+ rather than to use :class:`list` or :class:`!typing.List`. - - .. deprecated:: 3.9 - :class:`builtins.list ` now supports subscripting (``[]``). -@@ -3108,8 +3164,8 @@ - Deprecated alias to :class:`builtins.set `. - - Note that to annotate arguments, it is preferred -- to use an abstract collection type such as :class:`AbstractSet` -- rather than to use :class:`set` or :class:`!typing.Set`. -+ to use an abstract collection type such as :class:`collections.abc.Set` -+ rather than to use :class:`set` or :class:`typing.Set`. - - .. deprecated:: 3.9 - :class:`builtins.set ` now supports subscripting (``[]``). -@@ -3313,11 +3369,6 @@ - - Deprecated alias to :class:`collections.abc.Mapping`. - -- This type can be used as follows:: -- -- def get_position_in_index(word_list: Mapping[str, int], word: str) -> int: -- return word_list[word] -- - .. deprecated:: 3.9 - :class:`collections.abc.Mapping` now supports subscripting (``[]``). - See :pep:`585` and :ref:`types-genericalias`. -@@ -3381,14 +3432,9 @@ - - Deprecated alias to :class:`collections.abc.Coroutine`. - -- The variance and order of type variables -- correspond to those of :class:`Generator`, for example:: -- -- from collections.abc import Coroutine -- c: Coroutine[list[str], str, int] # Some coroutine defined elsewhere -- x = c.send('hi') # Inferred type of 'x' is list[str] -- async def bar() -> None: -- y = await c # Inferred type of 'y' is int -+ See :ref:`annotating-generators-and-coroutines` -+ for details on using :class:`collections.abc.Coroutine` -+ and ``typing.Coroutine`` in type annotations. - - .. versionadded:: 3.5.3 - -@@ -3400,34 +3446,9 @@ - - Deprecated alias to :class:`collections.abc.AsyncGenerator`. - -- An async generator can be annotated by the generic type -- ``AsyncGenerator[YieldType, SendType]``. For example:: -- -- async def echo_round() -> AsyncGenerator[int, float]: -- sent = yield 0 -- while sent >= 0.0: -- rounded = await round(sent) -- sent = yield rounded -- -- Unlike normal generators, async generators cannot return a value, so there -- is no ``ReturnType`` type parameter. As with :class:`Generator`, the -- ``SendType`` behaves contravariantly. -- -- If your generator will only yield values, set the ``SendType`` to -- ``None``:: -- -- async def infinite_stream(start: int) -> AsyncGenerator[int, None]: -- while True: -- yield start -- start = await increment(start) -- -- Alternatively, annotate your generator as having a return type of -- either ``AsyncIterable[YieldType]`` or ``AsyncIterator[YieldType]``:: -- -- async def infinite_stream(start: int) -> AsyncIterator[int]: -- while True: -- yield start -- start = await increment(start) -+ See :ref:`annotating-generators-and-coroutines` -+ for details on using :class:`collections.abc.AsyncGenerator` -+ and ``typing.AsyncGenerator`` in type annotations. - - .. versionadded:: 3.6.1 - -@@ -3506,34 +3527,9 @@ - - Deprecated alias to :class:`collections.abc.Generator`. - -- A generator can be annotated by the generic type -- ``Generator[YieldType, SendType, ReturnType]``. For example:: -- -- def echo_round() -> Generator[int, float, str]: -- sent = yield 0 -- while sent >= 0: -- sent = yield round(sent) -- return 'Done' -- -- Note that unlike many other generics in the typing module, the ``SendType`` -- of :class:`Generator` behaves contravariantly, not covariantly or -- invariantly. -- -- If your generator will only yield values, set the ``SendType`` and -- ``ReturnType`` to ``None``:: -- -- def infinite_stream(start: int) -> Generator[int, None, None]: -- while True: -- yield start -- start += 1 -- -- Alternatively, annotate your generator as having a return type of -- either ``Iterable[YieldType]`` or ``Iterator[YieldType]``:: -- -- def infinite_stream(start: int) -> Iterator[int]: -- while True: -- yield start -- start += 1 -+ See :ref:`annotating-generators-and-coroutines` -+ for details on using :class:`collections.abc.Generator` -+ and ``typing.Generator`` in type annotations. - - .. deprecated:: 3.9 - :class:`collections.abc.Generator` now supports subscripting (``[]``). -diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst -index 8dcb8c2aa54..1fbef02a2c6 100644 ---- a/Doc/library/unittest.mock.rst -+++ b/Doc/library/unittest.mock.rst -@@ -856,6 +856,20 @@ - 3 - >>> p.assert_called_once_with() - -+.. caution:: -+ -+ If an :exc:`AttributeError` is raised by :class:`PropertyMock`, -+ it will be interpreted as a missing descriptor and -+ :meth:`~object.__getattr__` will be called on the parent mock:: -+ -+ >>> m = MagicMock() -+ >>> no_attribute = PropertyMock(side_effect=AttributeError) -+ >>> type(m).my_property = no_attribute -+ >>> m.my_property -+ -+ -+ See :meth:`~object.__getattr__` for details. -+ - - .. class:: AsyncMock(spec=None, side_effect=None, return_value=DEFAULT, wraps=None, name=None, spec_set=None, unsafe=False, **kwargs) - -diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst -index 68a8ddee0f2..54ea8bb40df 100644 ---- a/Doc/library/unittest.rst -+++ b/Doc/library/unittest.rst -@@ -2308,8 +2308,8 @@ - (see :ref:`Warning control `), - otherwise it will be set to ``'default'``. - -- Calling ``main`` actually returns an instance of the ``TestProgram`` class. -- This stores the result of the tests run as the ``result`` attribute. -+ Calling ``main`` returns an object with the ``result`` attribute that contains -+ the result of the tests run as a :class:`unittest.TestResult`. - - .. versionchanged:: 3.1 - The *exit* parameter was added. -@@ -2521,7 +2521,7 @@ - .. versionadded:: 3.2 - - The :option:`-c/--catch ` command-line option to unittest, --along with the ``catchbreak`` parameter to :func:`unittest.main()`, provide -+along with the ``catchbreak`` parameter to :func:`unittest.main`, provide - more friendly handling of control-C during a test run. With catch break - behavior enabled control-C will allow the currently running test to complete, - and the test run will then end and report all the results so far. A second diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst -index cd402e87a82..fb5353e1895 100644 +index 27909b763e9..fb5353e1895 100644 --- a/Doc/library/urllib.parse.rst +++ b/Doc/library/urllib.parse.rst @@ -22,11 +22,19 @@ @@ -9816,46 +811,6 @@ index cd402e87a82..fb5353e1895 100644 The :mod:`urllib.parse` module defines functions that fall into two broad categories: URL parsing and URL quoting. These are covered in detail in the following sections. -@@ -173,7 +181,7 @@ - Added IPv6 URL parsing capabilities. - - .. versionchanged:: 3.3 -- The fragment is now parsed for all URL schemes (unless *allow_fragment* is -+ The fragment is now parsed for all URL schemes (unless *allow_fragments* is - false), in accordance with :rfc:`3986`. Previously, an allowlist of - schemes that support fragments existed. - -diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst -index 8705adfb892..d7de8a16438 100644 ---- a/Doc/library/urllib.request.rst -+++ b/Doc/library/urllib.request.rst -@@ -252,7 +252,7 @@ - - *method* should be a string that indicates the HTTP request method that - will be used (e.g. ``'HEAD'``). If provided, its value is stored in the -- :attr:`~Request.method` attribute and is used by :meth:`get_method()`. -+ :attr:`~Request.method` attribute and is used by :meth:`get_method`. - The default is ``'GET'`` if *data* is ``None`` or ``'POST'`` otherwise. - Subclasses may indicate a different default method by setting the - :attr:`~Request.method` attribute in the class itself. -@@ -1103,7 +1103,7 @@ - - .. versionchanged:: 3.2 - This method is applicable only for local hostnames. When a remote -- hostname is given, an :exc:`~urllib.error.URLError` is raised. -+ hostname is given, a :exc:`~urllib.error.URLError` is raised. - - - .. _data-handler-objects: -@@ -1118,7 +1118,7 @@ - ignores white spaces in base64 encoded data URLs so the URL may be wrapped - in whatever source file it comes from. But even though some browsers don't - mind about a missing padding at the end of a base64 encoded data URL, this -- implementation will raise an :exc:`ValueError` in that case. -+ implementation will raise a :exc:`ValueError` in that case. - - - .. _ftp-handler-objects: diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index 57b0ee7157c..c24945169e8 100644 --- a/Doc/library/venv.rst @@ -9869,36 +824,8 @@ index 57b0ee7157c..c24945169e8 100644 Creating virtual environments ----------------------------- -diff --git a/Doc/library/wave.rst b/Doc/library/wave.rst -index ba0ed23b4e6..454f05419ab 100644 ---- a/Doc/library/wave.rst -+++ b/Doc/library/wave.rst -@@ -46,8 +46,8 @@ - the file object. - - The :func:`.open` function may be used in a :keyword:`with` statement. When -- the :keyword:`!with` block completes, the :meth:`Wave_read.close()` or -- :meth:`Wave_write.close()` method is called. -+ the :keyword:`!with` block completes, the :meth:`Wave_read.close` or -+ :meth:`Wave_write.close` method is called. - - .. versionchanged:: 3.4 - Added support for unseekable files. -diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst -index d6e062df945..2a25ed045c6 100644 ---- a/Doc/library/weakref.rst -+++ b/Doc/library/weakref.rst -@@ -197,7 +197,7 @@ - >>> del k1 # d = {k2: 2} - - .. versionchanged:: 3.9 -- Added support for ``|`` and ``|=`` operators, specified in :pep:`584`. -+ Added support for ``|`` and ``|=`` operators, as specified in :pep:`584`. - - :class:`WeakKeyDictionary` objects have an additional method that - exposes the internal references directly. The references are not guaranteed to diff --git a/Doc/library/webbrowser.rst b/Doc/library/webbrowser.rst -index df22c5f5e1e..2fed2e817e8 100644 +index c34b2170f8f..2fed2e817e8 100644 --- a/Doc/library/webbrowser.rst +++ b/Doc/library/webbrowser.rst @@ -33,6 +33,13 @@ @@ -9915,33 +842,7 @@ index df22c5f5e1e..2fed2e817e8 100644 The script :program:`webbrowser` can be used as a command-line interface for the module. It accepts a URL as the argument. It accepts the following optional parameters: ``-n`` opens the URL in a new browser window, if possible; -@@ -62,6 +69,8 @@ - (note that under many window managers this will occur regardless of the - setting of this variable). - -+ Returns ``True`` if a browser was successfully launched, ``False`` otherwise. -+ - Note that on some platforms, trying to open a filename using this function, - may work and start the operating system's associated program. However, this - is neither supported nor portable. -@@ -74,11 +83,16 @@ - Open *url* in a new window of the default browser, if possible, otherwise, open - *url* in the only browser window. - -+ Returns ``True`` if a browser was successfully launched, ``False`` otherwise. -+ -+ - .. function:: open_new_tab(url) - - Open *url* in a new page ("tab") of the default browser, if possible, otherwise - equivalent to :func:`open_new`. - -+ Returns ``True`` if a browser was successfully launched, ``False`` otherwise. -+ - - .. function:: get(using=None) - -@@ -147,6 +161,8 @@ +@@ -154,6 +161,8 @@ +------------------------+-----------------------------------------+-------+ | ``'chromium-browser'`` | :class:`Chromium('chromium-browser')` | | +------------------------+-----------------------------------------+-------+ @@ -9950,7 +851,7 @@ index df22c5f5e1e..2fed2e817e8 100644 Notes: -@@ -161,7 +177,11 @@ +@@ -168,7 +177,11 @@ Only on Windows platforms. (3) @@ -9963,7 +864,7 @@ index df22c5f5e1e..2fed2e817e8 100644 .. versionadded:: 3.3 Support for Chrome/Chromium has been added. -@@ -174,6 +194,9 @@ +@@ -181,6 +194,9 @@ .. deprecated-removed:: 3.11 3.13 :class:`MacOSX` is deprecated, use :class:`MacOSXOSAScript` instead. @@ -9973,1839 +874,11 @@ index df22c5f5e1e..2fed2e817e8 100644 Here are some simple examples:: url = 'https://docs.python.org/' -diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst -index 2fedd99e8be..c3c577bc351 100644 ---- a/Doc/library/xml.etree.elementtree.rst -+++ b/Doc/library/xml.etree.elementtree.rst -@@ -508,7 +508,7 @@ - `C14N 2.0 `_ transformation function. - - Canonicalization is a way to normalise XML output in a way that allows -- byte-by-byte comparisons and digital signatures. It reduced the freedom -+ byte-by-byte comparisons and digital signatures. It reduces the freedom - that XML serializers have and instead generates a more constrained XML - representation. The main restrictions regard the placement of namespace - declarations, the ordering of attributes, and ignorable whitespace. -@@ -869,6 +869,7 @@ - - .. module:: xml.etree.ElementTree - :noindex: -+ :no-index: - - .. class:: Element(tag, attrib={}, **extra) - -@@ -965,7 +966,7 @@ - - .. method:: extend(subelements) - -- Appends *subelements* from a sequence object with zero or more elements. -+ Appends *subelements* from an iterable of elements. - Raises :exc:`TypeError` if a subelement is not an :class:`Element`. - - .. versionadded:: 3.2 -@@ -1053,9 +1054,10 @@ - :meth:`~object.__getitem__`, :meth:`~object.__setitem__`, - :meth:`~object.__len__`. - -- Caution: Elements with no subelements will test as ``False``. Testing the -- truth value of an Element is deprecated and will raise an exception in -- Python 3.14. Use specific ``len(elem)`` or ``elem is None`` test instead.:: -+ Caution: Elements with no subelements will test as ``False``. In a future -+ release of Python, all elements will test as ``True`` regardless of whether -+ subelements exist. Instead, prefer explicit ``len(elem)`` or -+ ``elem is not None`` tests.:: - - element = root.find('foo') - -diff --git a/Doc/library/zipapp.rst b/Doc/library/zipapp.rst -index cf561b454e9..cdaba07ab46 100644 ---- a/Doc/library/zipapp.rst -+++ b/Doc/library/zipapp.rst -@@ -332,7 +332,7 @@ - interpreter name, and then a newline (``b'\n'``) character. The interpreter - name can be anything acceptable to the OS "shebang" processing, or the Python - launcher on Windows. The interpreter should be encoded in UTF-8 on Windows, -- and in :func:`sys.getfilesystemencoding()` on POSIX. -+ and in :func:`sys.getfilesystemencoding` on POSIX. - 2. Standard zipfile data, as generated by the :mod:`zipfile` module. The - zipfile content *must* include a file called ``__main__.py`` (which must be - in the "root" of the zipfile - i.e., it cannot be in a subdirectory). The -diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst -index 374404bf33a..b757cf6bee3 100644 ---- a/Doc/reference/compound_stmts.rst -+++ b/Doc/reference/compound_stmts.rst -@@ -245,13 +245,12 @@ - until one is found that matches the exception. - An expression-less :keyword:`!except` clause, if present, must be last; - it matches any exception. --For an :keyword:`!except` clause with an expression, --that expression is evaluated, and the clause matches the exception --if the resulting object is "compatible" with the exception. An object is --compatible with an exception if the object is the class or a --:term:`non-virtual base class ` of the exception object, --or a tuple containing an item that is the class or a non-virtual base class --of the exception object. -+ -+For an :keyword:`!except` clause with an expression, the -+expression must evaluate to an exception type or a tuple of exception types. -+The raised exception matches an :keyword:`!except` clause whose expression evaluates -+to the class or a :term:`non-virtual base class ` of the exception object, -+or to a tuple that contains such a class. - - If no :keyword:`!except` clause matches the exception, - the search for an exception handler -@@ -378,8 +377,10 @@ - ... - ExceptionGroup('', (BlockingIOError())) - --An :keyword:`!except*` clause must have a matching type, --and this type cannot be a subclass of :exc:`BaseExceptionGroup`. -+An :keyword:`!except*` clause must have a matching expression; it cannot be ``except*:``. -+Furthermore, this expression cannot contain exception group types, because that would -+have ambiguous semantics. -+ - It is not possible to mix :keyword:`except` and :keyword:`!except*` - in the same :keyword:`try`. - :keyword:`break`, :keyword:`continue` and :keyword:`return` -@@ -840,7 +841,7 @@ - : | "None" - : | "True" - : | "False" -- : | `signed_number`: NUMBER | "-" NUMBER -+ signed_number: ["-"] NUMBER - - The rule ``strings`` and the token ``NUMBER`` are defined in the - :doc:`standard Python grammar <./grammar>`. Triple-quoted strings are -diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst -index 602014deeba..7cff9545dd2 100644 ---- a/Doc/reference/datamodel.rst -+++ b/Doc/reference/datamodel.rst -@@ -106,12 +106,16 @@ - Types affect almost all aspects of object behavior. Even the importance of - object identity is affected in some sense: for immutable types, operations that - compute new values may actually return a reference to any existing object with --the same type and value, while for mutable objects this is not allowed. E.g., --after ``a = 1; b = 1``, ``a`` and ``b`` may or may not refer to the same object --with the value one, depending on the implementation, but after ``c = []; d = --[]``, ``c`` and ``d`` are guaranteed to refer to two different, unique, newly --created empty lists. (Note that ``c = d = []`` assigns the same object to both --``c`` and ``d``.) -+the same type and value, while for mutable objects this is not allowed. -+For example, after ``a = 1; b = 1``, *a* and *b* may or may not refer to -+the same object with the value one, depending on the implementation. -+This is because :class:`int` is an immutable type, so the reference to ``1`` -+can be reused. This behaviour depends on the implementation used, so should -+not be relied upon, but is something to be aware of when making use of object -+identity tests. -+However, after ``c = []; d = []``, *c* and *d* are guaranteed to refer to two -+different, unique, newly created empty lists. (Note that ``e = f = []`` assigns -+the *same* object to both *e* and *f*.) - - - .. _types: -@@ -215,7 +219,7 @@ - - * A sign is shown only when the number is negative. - --Python distinguishes between integers, floating point numbers, and complex -+Python distinguishes between integers, floating-point numbers, and complex - numbers: - - -@@ -259,18 +263,18 @@ - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - - .. index:: -- pair: object; floating point -- pair: floating point; number -+ pair: object; floating-point -+ pair: floating-point; number - pair: C; language - pair: Java; language - --These represent machine-level double precision floating point numbers. You are -+These represent machine-level double precision floating-point numbers. You are - at the mercy of the underlying machine architecture (and C or Java - implementation) for the accepted range and handling of overflow. Python does not --support single-precision floating point numbers; the savings in processor and -+support single-precision floating-point numbers; the savings in processor and - memory usage that are usually the reason for using these are dwarfed by the - overhead of using objects in Python, so there is no reason to complicate the --language with two kinds of floating point numbers. -+language with two kinds of floating-point numbers. - - - :class:`numbers.Complex` (:class:`complex`) -@@ -281,7 +285,7 @@ - pair: complex; number - - These represent complex numbers as a pair of machine-level double precision --floating point numbers. The same caveats apply as for floating point numbers. -+floating-point numbers. The same caveats apply as for floating-point numbers. - The real and imaginary parts of a complex number ``z`` can be retrieved through - the read-only attributes ``z.real`` and ``z.imag``. - -@@ -373,7 +377,7 @@ - - A bytes object is an immutable array. The items are 8-bit bytes, - represented by integers in the range 0 <= x < 256. Bytes literals -- (like ``b'abc'``) and the built-in :func:`bytes()` constructor -+ (like ``b'abc'``) and the built-in :func:`bytes` constructor - can be used to create bytes objects. Also, bytes objects can be - decoded to strings via the :meth:`~bytes.decode` method. - -@@ -492,7 +496,7 @@ - Replacing an existing key does not change the order, however removing a key - and re-inserting it will add it to the end instead of keeping its old place. - --Dictionaries are mutable; they can be created by the ``{...}`` notation (see -+Dictionaries are mutable; they can be created by the ``{}`` notation (see - section :ref:`dict`). - - .. index:: -@@ -727,14 +731,7 @@ - itself, so that calling either ``x.f(1)`` or ``C.f(1)`` is equivalent to - calling ``f(C,1)`` where ``f`` is the underlying function. - --Note that the transformation from :ref:`function object ` --to instance method --object happens each time the attribute is retrieved from the instance. In --some cases, a fruitful optimization is to assign the attribute to a local --variable and call that local variable. Also notice that this --transformation only happens for user-defined functions; other callable --objects (and all non-callable objects) are retrieved without --transformation. It is also important to note that user-defined functions -+It is important to note that user-defined functions - which are attributes of a class instance are not converted to bound - methods; this *only* happens when the function is an attribute of the - class. -@@ -1643,6 +1640,8 @@ - - It is not guaranteed that :meth:`__del__` methods are called for objects - that still exist when the interpreter exits. -+ :class:`weakref.finalize` provides a straightforward way to register -+ a cleanup function to be called when an object is garbage collected. - - .. note:: - -diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst -index 38f00ae2aa0..b38eadfaf30 100644 ---- a/Doc/reference/expressions.rst -+++ b/Doc/reference/expressions.rst -@@ -33,7 +33,7 @@ - - * If either argument is a complex number, the other is converted to complex; - --* otherwise, if either argument is a floating point number, the other is -+* otherwise, if either argument is a floating-point number, the other is - converted to floating point; - - * otherwise, both must be integers and no conversion is necessary. -@@ -83,18 +83,47 @@ - pair: name; mangling - pair: private; names - --**Private name mangling:** When an identifier that textually occurs in a class --definition begins with two or more underscore characters and does not end in two --or more underscores, it is considered a :dfn:`private name` of that class. --Private names are transformed to a longer form before code is generated for --them. The transformation inserts the class name, with leading underscores --removed and a single underscore inserted, in front of the name. For example, --the identifier ``__spam`` occurring in a class named ``Ham`` will be transformed --to ``_Ham__spam``. This transformation is independent of the syntactical --context in which the identifier is used. If the transformed name is extremely --long (longer than 255 characters), implementation defined truncation may happen. --If the class name consists only of underscores, no transformation is done. -+Private name mangling -+^^^^^^^^^^^^^^^^^^^^^ - -+When an identifier that textually occurs in a class definition begins with two -+or more underscore characters and does not end in two or more underscores, it -+is considered a :dfn:`private name` of that class. -+ -+.. seealso:: -+ -+ The :ref:`class specifications `. -+ -+More precisely, private names are transformed to a longer form before code is -+generated for them. If the transformed name is longer than 255 characters, -+implementation-defined truncation may happen. -+ -+The transformation is independent of the syntactical context in which the -+identifier is used but only the following private identifiers are mangled: -+ -+- Any name used as the name of a variable that is assigned or read or any -+ name of an attribute being accessed. -+ -+ The ``__name__`` attribute of nested functions, classes, and type aliases -+ is however not mangled. -+ -+- The name of imported modules, e.g., ``__spam`` in ``import __spam``. -+ If the module is part of a package (i.e., its name contains a dot), -+ the name is *not* mangled, e.g., the ``__foo`` in ``import __foo.bar`` -+ is not mangled. -+ -+- The name of an imported member, e.g., ``__f`` in ``from spam import __f``. -+ -+The transformation rule is defined as follows: -+ -+- The class name, with leading underscores removed and a single leading -+ underscore inserted, is inserted in front of the identifier, e.g., the -+ identifier ``__spam`` occurring in a class named ``Foo``, ``_Foo`` or -+ ``__Foo`` is transformed to ``_Foo__spam``. -+ -+- If the class name consists only of underscores, the transformation is the -+ identity, e.g., the identifier ``__spam`` occurring in a class named ``_`` -+ or ``__`` is left as is. - - .. _atom-literals: - -@@ -110,8 +139,8 @@ - : | `integer` | `floatnumber` | `imagnumber` - - Evaluation of a literal yields an object of the given type (string, bytes, --integer, floating point number, complex number) with the given value. The value --may be approximated in the case of floating point and imaginary (complex) -+integer, floating-point number, complex number) with the given value. The value -+may be approximated in the case of floating-point and imaginary (complex) - literals. See section :ref:`literals` for details. - - .. index:: -@@ -218,10 +247,12 @@ - :keyword:`!for` or :keyword:`!async for` clause following the leading - expression, may contain additional :keyword:`!for` or :keyword:`!async for` - clauses, and may also use :keyword:`await` expressions. --If a comprehension contains either :keyword:`!async for` clauses or --:keyword:`!await` expressions or other asynchronous comprehensions it is called --an :dfn:`asynchronous comprehension`. An asynchronous comprehension may --suspend the execution of the coroutine function in which it appears. -+ -+If a comprehension contains :keyword:`!async for` clauses, or if it contains -+:keyword:`!await` expressions or other asynchronous comprehensions anywhere except -+the iterable expression in the leftmost :keyword:`!for` clause, it is called an -+:dfn:`asynchronous comprehension`. An asynchronous comprehension may suspend the -+execution of the coroutine function in which it appears. - See also :pep:`530`. - - .. versionadded:: 3.6 -@@ -734,7 +765,7 @@ - .. coroutinemethod:: agen.asend(value) - - Returns an awaitable which when run resumes the execution of the -- asynchronous generator. As with the :meth:`~generator.send()` method for a -+ asynchronous generator. As with the :meth:`~generator.send` method for a - generator, this "sends" a value into the asynchronous generator function, - and the *value* argument becomes the result of the current yield expression. - The awaitable returned by the :meth:`asend` method will return the next -@@ -1204,7 +1235,8 @@ - Raising a negative number to a fractional power results in a :class:`complex` - number. (In earlier versions it raised a :exc:`ValueError`.) - --This operation can be customized using the special :meth:`~object.__pow__` method. -+This operation can be customized using the special :meth:`~object.__pow__` and -+:meth:`~object.__rpow__` methods. - - .. _unary: - -@@ -1292,6 +1324,9 @@ - The ``@`` (at) operator is intended to be used for matrix multiplication. No - builtin Python types implement this operator. - -+This operation can be customized using the special :meth:`~object.__matmul__` and -+:meth:`~object.__rmatmul__` methods. -+ - .. versionadded:: 3.5 - - .. index:: -@@ -1307,8 +1342,10 @@ - applied to the result. Division by zero raises the :exc:`ZeroDivisionError` - exception. - --This operation can be customized using the special :meth:`~object.__truediv__` and --:meth:`~object.__floordiv__` methods. -+The division operation can be customized using the special :meth:`~object.__truediv__` -+and :meth:`~object.__rtruediv__` methods. -+The floor division operation can be customized using the special -+:meth:`~object.__floordiv__` and :meth:`~object.__rfloordiv__` methods. - - .. index:: - single: modulo -@@ -1317,7 +1354,7 @@ - The ``%`` (modulo) operator yields the remainder from the division of the first - argument by the second. The numeric arguments are first converted to a common - type. A zero right argument raises the :exc:`ZeroDivisionError` exception. The --arguments may be floating point numbers, e.g., ``3.14%0.7`` equals ``0.34`` -+arguments may be floating-point numbers, e.g., ``3.14%0.7`` equals ``0.34`` - (since ``3.14`` equals ``4*0.7 + 0.34``.) The modulo operator always yields a - result with the same sign as its second operand (or zero); the absolute value of - the result is strictly smaller than the absolute value of the second operand -@@ -1333,11 +1370,12 @@ - known as interpolation). The syntax for string formatting is described in the - Python Library Reference, section :ref:`old-string-formatting`. - --The *modulo* operation can be customized using the special :meth:`~object.__mod__` method. -+The *modulo* operation can be customized using the special :meth:`~object.__mod__` -+and :meth:`~object.__rmod__` methods. - - The floor division operator, the modulo operator, and the :func:`divmod` --function are not defined for complex numbers. Instead, convert to a floating --point number using the :func:`abs` function if appropriate. -+function are not defined for complex numbers. Instead, convert to a -+floating-point number using the :func:`abs` function if appropriate. - - .. index:: - single: addition -@@ -1360,7 +1398,8 @@ - The ``-`` (subtraction) operator yields the difference of its arguments. The - numeric arguments are first converted to a common type. - --This operation can be customized using the special :meth:`~object.__sub__` method. -+This operation can be customized using the special :meth:`~object.__sub__` and -+:meth:`~object.__rsub__` methods. - - - .. _shifting: -@@ -1381,8 +1420,10 @@ - These operators accept integers as arguments. They shift the first argument to - the left or right by the number of bits given by the second argument. - --This operation can be customized using the special :meth:`~object.__lshift__` and --:meth:`~object.__rshift__` methods. -+The left shift operation can be customized using the special :meth:`~object.__lshift__` -+and :meth:`~object.__rlshift__` methods. -+The right shift operation can be customized using the special :meth:`~object.__rshift__` -+and :meth:`~object.__rrshift__` methods. - - .. index:: pair: exception; ValueError - -diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst -index f8c9724114d..7de995b1270 100644 ---- a/Doc/reference/import.rst -+++ b/Doc/reference/import.rst -@@ -281,7 +281,7 @@ - searches :data:`sys.meta_path`, which contains a list of meta path finder - objects. These finders are queried in order to see if they know how to handle - the named module. Meta path finders must implement a method called --:meth:`~importlib.abc.MetaPathFinder.find_spec()` which takes three arguments: -+:meth:`~importlib.abc.MetaPathFinder.find_spec` which takes three arguments: - a name, an import path, and (optionally) a target module. The meta path - finder can use any strategy it wants to determine whether it can handle - the named module or not. -@@ -292,7 +292,7 @@ - a spec, then a :exc:`ModuleNotFoundError` is raised. Any other exceptions - raised are simply propagated up, aborting the import process. - --The :meth:`~importlib.abc.MetaPathFinder.find_spec()` method of meta path -+The :meth:`~importlib.abc.MetaPathFinder.find_spec` method of meta path - finders is called with two or three arguments. The first is the fully - qualified name of the module being imported, for example ``foo.bar.baz``. - The second argument is the path entries to use for the module search. For -diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst -index 103d6ef05e4..cfae01ba97a 100644 ---- a/Doc/reference/lexical_analysis.rst -+++ b/Doc/reference/lexical_analysis.rst -@@ -503,11 +503,10 @@ - single: r"; raw string literal - - Both string and bytes literals may optionally be prefixed with a letter ``'r'`` --or ``'R'``; such strings are called :dfn:`raw strings` and treat backslashes as --literal characters. As a result, in string literals, ``'\U'`` and ``'\u'`` --escapes in raw strings are not treated specially. Given that Python 2.x's raw --unicode literals behave differently than Python 3.x's the ``'ur'`` syntax --is not supported. -+or ``'R'``; such constructs are called :dfn:`raw string literals` -+and :dfn:`raw bytes literals` respectively and treat backslashes as -+literal characters. As a result, in raw string literals, ``'\U'`` and ``'\u'`` -+escapes are not treated specially. - - .. versionadded:: 3.3 - The ``'rb'`` prefix of raw bytes literals has been added as a synonym -@@ -879,10 +878,10 @@ - ---------------- - - .. index:: number, numeric literal, integer literal -- floating point literal, hexadecimal literal -+ floating-point literal, hexadecimal literal - octal literal, binary literal, decimal literal, imaginary literal, complex literal - --There are three types of numeric literals: integers, floating point numbers, and -+There are three types of numeric literals: integers, floating-point numbers, and - imaginary numbers. There are no complex literals (complex numbers can be formed - by adding a real number and an imaginary number). - -@@ -943,10 +942,10 @@ - single: _ (underscore); in numeric literal - .. _floating: - --Floating point literals -+Floating-point literals - ----------------------- - --Floating point literals are described by the following lexical definitions: -+Floating-point literals are described by the following lexical definitions: - - .. productionlist:: python-grammar - floatnumber: `pointfloat` | `exponentfloat` -@@ -958,10 +957,10 @@ - - Note that the integer and exponent parts are always interpreted using radix 10. - For example, ``077e010`` is legal, and denotes the same number as ``77e10``. The --allowed range of floating point literals is implementation-dependent. As in -+allowed range of floating-point literals is implementation-dependent. As in - integer literals, underscores are supported for digit grouping. - --Some examples of floating point literals:: -+Some examples of floating-point literals:: - - 3.14 10. .001 1e100 3.14e-10 0e0 3.14_15_93 - -@@ -982,9 +981,9 @@ - imagnumber: (`floatnumber` | `digitpart`) ("j" | "J") - - An imaginary literal yields a complex number with a real part of 0.0. Complex --numbers are represented as a pair of floating point numbers and have the same -+numbers are represented as a pair of floating-point numbers and have the same - restrictions on their range. To create a complex number with a nonzero real --part, add a floating point number to it, e.g., ``(3+4j)``. Some examples of -+part, add a floating-point number to it, e.g., ``(3+4j)``. Some examples of - imaginary literals:: - - 3.14j 10.j 10j .001j 1e100j 3.14e-10j 3.14_15_93j -@@ -1019,9 +1018,9 @@ - .. code-block:: none - - ( ) [ ] { } -- , : . ; @ = -> -- += -= *= /= //= %= @= -- &= |= ^= >>= <<= **= -+ , : ! . ; @ = -+ -> += -= *= /= //= %= -+ @= &= |= ^= >>= <<= **= - - The period can also occur in floating-point and imaginary literals. A sequence - of three periods has a special meaning as an ellipsis literal. The second half -diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst -index a253482156d..618664b23f0 100644 ---- a/Doc/reference/simple_stmts.rst -+++ b/Doc/reference/simple_stmts.rst -@@ -293,7 +293,7 @@ - operation specific to the type of assignment on the two operands, and assigns - the result to the original target. The target is only evaluated once. - --An augmented assignment expression like ``x += 1`` can be rewritten as ``x = x + -+An augmented assignment statement like ``x += 1`` can be rewritten as ``x = x + - 1`` to achieve a similar, but not exactly equal effect. In the augmented - version, ``x`` is only evaluated once. Also, when possible, the actual operation - is performed *in-place*, meaning that rather than creating a new object and -@@ -333,7 +333,9 @@ - - The difference from normal :ref:`assignment` is that only a single target is allowed. - --For simple names as assignment targets, if in class or module scope, -+The assignment target is considered "simple" if it consists of a single -+name that is not enclosed in parentheses. -+For simple assignment targets, if in class or module scope, - the annotations are evaluated and stored in a special class or module - attribute :attr:`__annotations__` - that is a dictionary mapping from variable names (mangled if private) to -@@ -341,7 +343,8 @@ - created at the start of class or module body execution, if annotations - are found statically. - --For expressions as assignment targets, the annotations are evaluated if -+If the assignment target is not simple (an attribute, subscript node, or -+parenthesized name), the annotation is evaluated if - in class or module scope, but not stored. - - If a name is annotated in a function scope, then this name is local for -diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt -index 3ae65bc944d..068fe0cb426 100644 ---- a/Doc/requirements-oldest-sphinx.txt -+++ b/Doc/requirements-oldest-sphinx.txt -@@ -14,16 +14,16 @@ - - alabaster==0.7.16 - Babel==2.15.0 --certifi==2024.2.2 -+certifi==2024.7.4 - charset-normalizer==3.3.2 - docutils==0.19 - idna==3.7 - imagesize==1.4.1 - Jinja2==3.1.4 - MarkupSafe==2.1.5 --packaging==24.0 -+packaging==24.1 - Pygments==2.18.0 --requests==2.32.2 -+requests==2.32.3 - snowballstemmer==2.2.0 - Sphinx==6.2.1 - sphinxcontrib-applehelp==1.0.8 -@@ -32,4 +32,4 @@ - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.7 - sphinxcontrib-serializinghtml==1.1.10 --urllib3==2.2.1 -+urllib3==2.2.2 -diff --git a/Doc/requirements.txt b/Doc/requirements.txt -index b47a9d8a863..bf1028020b7 100644 ---- a/Doc/requirements.txt -+++ b/Doc/requirements.txt -@@ -6,12 +6,12 @@ - # Sphinx version is pinned so that new versions that introduce new warnings - # won't suddenly cause build failures. Updating the version is fine as long - # as no warnings are raised by doing so. --sphinx~=7.3.0 -+sphinx~=8.0.0 - - blurb - --sphinxext-opengraph==0.7.5 --sphinx-notfound-page==1.0.0 -+sphinxext-opengraph~=0.9.0 -+sphinx-notfound-page~=1.0.0 - - # The theme used by the documentation is stored separately, so we need - # to install that as well. -diff --git a/Doc/tools/check-warnings.py b/Doc/tools/check-warnings.py -index c50b00636c3..8f92ab298eb 100644 ---- a/Doc/tools/check-warnings.py -+++ b/Doc/tools/check-warnings.py -@@ -2,6 +2,7 @@ - """ - Check the output of running Sphinx in nit-picky mode (missing references). - """ -+ - from __future__ import annotations - - import argparse -@@ -14,7 +15,7 @@ - from typing import TextIO - - # Fail if NEWS nit found before this line number --NEWS_NIT_THRESHOLD = 200 -+NEWS_NIT_THRESHOLD = 300 - - # Exclude these whether they're dirty or clean, - # because they trigger a rebuild of dirty files. -@@ -206,7 +207,9 @@ - - - def fail_if_regression( -- warnings: list[str], files_with_expected_nits: set[str], files_with_nits: set[str] -+ warnings: list[str], -+ files_with_expected_nits: set[str], -+ files_with_nits: set[str], - ) -> int: - """ - Ensure some files always pass Sphinx nit-picky mode (no missing references). -@@ -252,17 +255,11 @@ - """ - Ensure no warnings are found in the NEWS file before a given line number. - """ -- news_nits = ( -- warning -- for warning in warnings -- if "/build/NEWS:" in warning -- ) -+ news_nits = (warning for warning in warnings if "/build/NEWS:" in warning) - - # Nits found before the threshold line - new_news_nits = [ -- nit -- for nit in news_nits -- if int(nit.split(":")[1]) <= threshold -+ nit for nit in news_nits if int(nit.split(":")[1]) <= threshold - ] - - if new_news_nits: -@@ -311,7 +308,8 @@ - exit_code = 0 - - wrong_directory_msg = "Must run this script from the repo root" -- assert Path("Doc").exists() and Path("Doc").is_dir(), wrong_directory_msg -+ if not Path("Doc").exists() or not Path("Doc").is_dir(): -+ raise RuntimeError(wrong_directory_msg) - - with Path("Doc/sphinx-warnings.txt").open(encoding="UTF-8") as f: - warnings = f.read().splitlines() -@@ -339,7 +337,9 @@ - ) - - if args.fail_if_improved: -- exit_code += fail_if_improved(files_with_expected_nits, files_with_nits) -+ exit_code += fail_if_improved( -+ files_with_expected_nits, files_with_nits -+ ) - - if args.fail_if_new_news_nit: - exit_code += fail_if_new_news_nit(warnings, args.fail_if_new_news_nit) ---- /dev/null -+++ b/Doc/tools/extensions/audit_events.py -@@ -0,0 +1,264 @@ -+"""Support for documenting audit events.""" -+ -+from __future__ import annotations -+ -+import re -+from typing import TYPE_CHECKING -+ -+from docutils import nodes -+from sphinx.errors import NoUri -+from sphinx.locale import _ as sphinx_gettext -+from sphinx.transforms.post_transforms import SphinxPostTransform -+from sphinx.util import logging -+from sphinx.util.docutils import SphinxDirective -+ -+if TYPE_CHECKING: -+ from collections.abc import Iterator -+ -+ from sphinx.application import Sphinx -+ from sphinx.builders import Builder -+ from sphinx.environment import BuildEnvironment -+ -+logger = logging.getLogger(__name__) -+ -+# This list of sets are allowable synonyms for event argument names. -+# If two names are in the same set, they are treated as equal for the -+# purposes of warning. This won't help if the number of arguments is -+# different! -+_SYNONYMS = [ -+ frozenset({"file", "path", "fd"}), -+] -+ -+ -+class AuditEvents: -+ def __init__(self) -> None: -+ self.events: dict[str, list[str]] = {} -+ self.sources: dict[str, list[tuple[str, str]]] = {} -+ -+ def __iter__(self) -> Iterator[tuple[str, list[str], tuple[str, str]]]: -+ for name, args in self.events.items(): -+ for source in self.sources[name]: -+ yield name, args, source -+ -+ def add_event( -+ self, name, args: list[str], source: tuple[str, str] -+ ) -> None: -+ if name in self.events: -+ self._check_args_match(name, args) -+ else: -+ self.events[name] = args -+ self.sources.setdefault(name, []).append(source) -+ -+ def _check_args_match(self, name: str, args: list[str]) -> None: -+ current_args = self.events[name] -+ msg = ( -+ f"Mismatched arguments for audit-event {name}: " -+ f"{current_args!r} != {args!r}" -+ ) -+ if current_args == args: -+ return -+ if len(current_args) != len(args): -+ logger.warning(msg) -+ return -+ for a1, a2 in zip(current_args, args, strict=False): -+ if a1 == a2: -+ continue -+ if any(a1 in s and a2 in s for s in _SYNONYMS): -+ continue -+ logger.warning(msg) -+ return -+ -+ def id_for(self, name) -> str: -+ source_count = len(self.sources.get(name, ())) -+ name_clean = re.sub(r"\W", "_", name) -+ return f"audit_event_{name_clean}_{source_count}" -+ -+ def rows(self) -> Iterator[tuple[str, list[str], list[tuple[str, str]]]]: -+ for name in sorted(self.events.keys()): -+ yield name, self.events[name], self.sources[name] -+ -+ -+def initialise_audit_events(app: Sphinx) -> None: -+ """Initialise the audit_events attribute on the environment.""" -+ if not hasattr(app.env, "audit_events"): -+ app.env.audit_events = AuditEvents() -+ -+ -+def audit_events_purge( -+ app: Sphinx, env: BuildEnvironment, docname: str -+) -> None: -+ """This is to remove traces of removed documents from env.audit_events.""" -+ fresh_audit_events = AuditEvents() -+ for name, args, (doc, target) in env.audit_events: -+ if doc != docname: -+ fresh_audit_events.add_event(name, args, (doc, target)) -+ -+ -+def audit_events_merge( -+ app: Sphinx, -+ env: BuildEnvironment, -+ docnames: list[str], -+ other: BuildEnvironment, -+) -> None: -+ """In Sphinx parallel builds, this merges audit_events from subprocesses.""" -+ for name, args, source in other.audit_events: -+ env.audit_events.add_event(name, args, source) -+ -+ -+class AuditEvent(SphinxDirective): -+ has_content = True -+ required_arguments = 1 -+ optional_arguments = 2 -+ final_argument_whitespace = True -+ -+ _label = [ -+ sphinx_gettext( -+ "Raises an :ref:`auditing event ` " -+ "{name} with no arguments." -+ ), -+ sphinx_gettext( -+ "Raises an :ref:`auditing event ` " -+ "{name} with argument {args}." -+ ), -+ sphinx_gettext( -+ "Raises an :ref:`auditing event ` " -+ "{name} with arguments {args}." -+ ), -+ ] -+ -+ def run(self) -> list[nodes.paragraph]: -+ name = self.arguments[0] -+ if len(self.arguments) >= 2 and self.arguments[1]: -+ args = [ -+ arg -+ for argument in self.arguments[1].strip("'\"").split(",") -+ if (arg := argument.strip()) -+ ] -+ else: -+ args = [] -+ ids = [] -+ try: -+ target = self.arguments[2].strip("\"'") -+ except (IndexError, TypeError): -+ target = None -+ if not target: -+ target = self.env.audit_events.id_for(name) -+ ids.append(target) -+ self.env.audit_events.add_event(name, args, (self.env.docname, target)) -+ -+ node = nodes.paragraph("", classes=["audit-hook"], ids=ids) -+ self.set_source_info(node) -+ if self.content: -+ node.rawsource = '\n'.join(self.content) # for gettext -+ self.state.nested_parse(self.content, self.content_offset, node) -+ else: -+ num_args = min(2, len(args)) -+ text = self._label[num_args].format( -+ name=f"``{name}``", -+ args=", ".join(f"``{a}``" for a in args), -+ ) -+ node.rawsource = text # for gettext -+ parsed, messages = self.state.inline_text(text, self.lineno) -+ node += parsed -+ node += messages -+ return [node] -+ -+ -+class audit_event_list(nodes.General, nodes.Element): # noqa: N801 -+ pass -+ -+ -+class AuditEventListDirective(SphinxDirective): -+ def run(self) -> list[audit_event_list]: -+ return [audit_event_list()] -+ -+ -+class AuditEventListTransform(SphinxPostTransform): -+ default_priority = 500 -+ -+ def run(self) -> None: -+ if self.document.next_node(audit_event_list) is None: -+ return -+ -+ table = self._make_table(self.app.builder, self.env.docname) -+ for node in self.document.findall(audit_event_list): -+ node.replace_self(table) -+ -+ def _make_table(self, builder: Builder, docname: str) -> nodes.table: -+ table = nodes.table(cols=3) -+ group = nodes.tgroup( -+ "", -+ nodes.colspec(colwidth=30), -+ nodes.colspec(colwidth=55), -+ nodes.colspec(colwidth=15), -+ cols=3, -+ ) -+ head = nodes.thead() -+ body = nodes.tbody() -+ -+ table += group -+ group += head -+ group += body -+ -+ head += nodes.row( -+ "", -+ nodes.entry("", nodes.paragraph("", "Audit event")), -+ nodes.entry("", nodes.paragraph("", "Arguments")), -+ nodes.entry("", nodes.paragraph("", "References")), -+ ) -+ -+ for name, args, sources in builder.env.audit_events.rows(): -+ body += self._make_row(builder, docname, name, args, sources) -+ -+ return table -+ -+ @staticmethod -+ def _make_row( -+ builder: Builder, -+ docname: str, -+ name: str, -+ args: list[str], -+ sources: list[tuple[str, str]], -+ ) -> nodes.row: -+ row = nodes.row() -+ name_node = nodes.paragraph("", nodes.Text(name)) -+ row += nodes.entry("", name_node) -+ -+ args_node = nodes.paragraph() -+ for arg in args: -+ args_node += nodes.literal(arg, arg) -+ args_node += nodes.Text(", ") -+ if len(args_node.children) > 0: -+ args_node.children.pop() # remove trailing comma -+ row += nodes.entry("", args_node) -+ -+ backlinks_node = nodes.paragraph() -+ backlinks = enumerate(sorted(set(sources)), start=1) -+ for i, (doc, label) in backlinks: -+ if isinstance(label, str): -+ ref = nodes.reference("", f"[{i}]", internal=True) -+ try: -+ target = ( -+ f"{builder.get_relative_uri(docname, doc)}#{label}" -+ ) -+ except NoUri: -+ continue -+ else: -+ ref["refuri"] = target -+ backlinks_node += ref -+ row += nodes.entry("", backlinks_node) -+ return row -+ -+ -+def setup(app: Sphinx): -+ app.add_directive("audit-event", AuditEvent) -+ app.add_directive("audit-event-table", AuditEventListDirective) -+ app.add_post_transform(AuditEventListTransform) -+ app.connect("builder-inited", initialise_audit_events) -+ app.connect("env-purge-doc", audit_events_purge) -+ app.connect("env-merge-info", audit_events_merge) -+ return { -+ "version": "1.0", -+ "parallel_read_safe": True, -+ "parallel_write_safe": True, -+ } -diff --git a/Doc/tools/extensions/c_annotations.py b/Doc/tools/extensions/c_annotations.py -index 7916b178f1c..a65cf71e4af 100644 ---- a/Doc/tools/extensions/c_annotations.py -+++ b/Doc/tools/extensions/c_annotations.py -@@ -1,226 +1,305 @@ --""" -- c_annotations.py -- ~~~~~~~~~~~~~~~~ -- -- Supports annotations for C API elements: -+"""Support annotations for C API elements. - -- * reference count annotations for C API functions. Based on -- refcount.py and anno-api.py in the old Python documentation tools. -+* Reference count annotations for C API functions. -+* Stable ABI annotations -+* Limited API annotations - -- * stable API annotations -+Configuration: -+* Set ``refcount_file`` to the path to the reference count data file. -+* Set ``stable_abi_file`` to the path to stable ABI list. -+""" - -- Usage: -- * Set the `refcount_file` config value to the path to the reference -- count data file. -- * Set the `stable_abi_file` config value to the path to stable ABI list. -+from __future__ import annotations - -- :copyright: Copyright 2007-2014 by Georg Brandl. -- :license: Python license. --""" -+import csv -+import dataclasses -+from pathlib import Path -+from typing import TYPE_CHECKING - --from os import path -+import sphinx - from docutils import nodes --from docutils.parsers.rst import directives --from docutils.parsers.rst import Directive - from docutils.statemachine import StringList --from sphinx.locale import _ as sphinx_gettext --import csv -- - from sphinx import addnodes --from sphinx.domains.c import CObject -+from sphinx.locale import _ as sphinx_gettext -+from sphinx.util.docutils import SphinxDirective - -+if TYPE_CHECKING: -+ from sphinx.application import Sphinx -+ from sphinx.util.typing import ExtensionMetadata - --REST_ROLE_MAP = { -- 'function': 'func', -- 'macro': 'macro', -- 'member': 'member', -- 'type': 'type', -- 'var': 'data', -+ROLE_TO_OBJECT_TYPE = { -+ "func": "function", -+ "macro": "macro", -+ "member": "member", -+ "type": "type", -+ "data": "var", - } - - --class RCEntry: -- def __init__(self, name): -- self.name = name -- self.args = [] -- self.result_type = '' -- self.result_refs = None -- -- --class Annotations: -- def __init__(self, refcount_filename, stable_abi_file): -- self.refcount_data = {} -- with open(refcount_filename, encoding='utf8') as fp: -- for line in fp: -- line = line.strip() -- if line[:1] in ("", "#"): -- # blank lines and comments -- continue -- parts = line.split(":", 4) -- if len(parts) != 5: -- raise ValueError(f"Wrong field count in {line!r}") -- function, type, arg, refcount, comment = parts -- # Get the entry, creating it if needed: -- try: -- entry = self.refcount_data[function] -- except KeyError: -- entry = self.refcount_data[function] = RCEntry(function) -- if not refcount or refcount == "null": -- refcount = None -- else: -- refcount = int(refcount) -- # Update the entry with the new parameter or the result -- # information. -- if arg: -- entry.args.append((arg, type, refcount)) -- else: -- entry.result_type = type -- entry.result_refs = refcount -- -- self.stable_abi_data = {} -- with open(stable_abi_file, encoding='utf8') as fp: -- for record in csv.DictReader(fp): -- name = record['name'] -- self.stable_abi_data[name] = record -- -- def add_annotations(self, app, doctree): -- for node in doctree.findall(addnodes.desc_content): -- par = node.parent -- if par['domain'] != 'c': -- continue -- if not par[0].has_key('ids') or not par[0]['ids']: -- continue -- name = par[0]['ids'][0] -- if name.startswith("c."): -- name = name[2:] -- -- objtype = par['objtype'] -- -- # Stable ABI annotation. These have two forms: -- # Part of the [Stable ABI](link). -- # Part of the [Stable ABI](link) since version X.Y. -- # For structs, there's some more info in the message: -- # Part of the [Limited API](link) (as an opaque struct). -- # Part of the [Stable ABI](link) (including all members). -- # Part of the [Limited API](link) (Only some members are part -- # of the stable ABI.). -- # ... all of which can have "since version X.Y" appended. -- record = self.stable_abi_data.get(name) -- if record: -- if record['role'] != objtype: -- raise ValueError( -- f"Object type mismatch in limited API annotation " -- f"for {name}: {record['role']!r} != {objtype!r}") -- stable_added = record['added'] -- message = sphinx_gettext('Part of the') -- message = message.center(len(message) + 2) -- emph_node = nodes.emphasis(message, message, -- classes=['stableabi']) -- ref_node = addnodes.pending_xref( -- 'Stable ABI', refdomain="std", reftarget='stable', -- reftype='ref', refexplicit="False") -- struct_abi_kind = record['struct_abi_kind'] -- if struct_abi_kind in {'opaque', 'members'}: -- ref_node += nodes.Text(sphinx_gettext('Limited API')) -- else: -- ref_node += nodes.Text(sphinx_gettext('Stable ABI')) -- emph_node += ref_node -- if struct_abi_kind == 'opaque': -- emph_node += nodes.Text(' ' + sphinx_gettext('(as an opaque struct)')) -- elif struct_abi_kind == 'full-abi': -- emph_node += nodes.Text(' ' + sphinx_gettext('(including all members)')) -- if record['ifdef_note']: -- emph_node += nodes.Text(' ' + record['ifdef_note']) -- if stable_added == '3.2': -- # Stable ABI was introduced in 3.2. -- pass -- else: -- emph_node += nodes.Text(' ' + sphinx_gettext('since version %s') % stable_added) -- emph_node += nodes.Text('.') -- if struct_abi_kind == 'members': -- emph_node += nodes.Text( -- ' ' + sphinx_gettext('(Only some members are part of the stable ABI.)')) -- node.insert(0, emph_node) -- -- # Unstable API annotation. -- if name.startswith('PyUnstable'): -- warn_node = nodes.admonition( -- classes=['unstable-c-api', 'warning']) -- message = sphinx_gettext('This is') + ' ' -- emph_node = nodes.emphasis(message, message) -- ref_node = addnodes.pending_xref( -- 'Unstable API', refdomain="std", -- reftarget='unstable-c-api', -- reftype='ref', refexplicit="False") -- ref_node += nodes.Text(sphinx_gettext('Unstable API')) -- emph_node += ref_node -- emph_node += nodes.Text(sphinx_gettext('. It may change without warning in minor releases.')) -- warn_node += emph_node -- node.insert(0, warn_node) -- -- # Return value annotation -- if objtype != 'function': -- continue -- entry = self.refcount_data.get(name) -- if not entry: -- continue -- elif not entry.result_type.endswith("Object*"): -- continue -- classes = ['refcount'] -- if entry.result_refs is None: -- rc = sphinx_gettext('Return value: Always NULL.') -- classes.append('return_null') -- elif entry.result_refs: -- rc = sphinx_gettext('Return value: New reference.') -- classes.append('return_new_ref') -- else: -- rc = sphinx_gettext('Return value: Borrowed reference.') -- classes.append('return_borrowed_ref') -- node.insert(0, nodes.emphasis(rc, rc, classes=classes)) -- -- --def init_annotations(app): -- annotations = Annotations( -- path.join(app.srcdir, app.config.refcount_file), -- path.join(app.srcdir, app.config.stable_abi_file), -+@dataclasses.dataclass(slots=True) -+class RefCountEntry: -+ # Name of the function. -+ name: str -+ # List of (argument name, type, refcount effect) tuples. -+ # (Currently not used. If it was, a dataclass might work better.) -+ args: list = dataclasses.field(default_factory=list) -+ # Return type of the function. -+ result_type: str = "" -+ # Reference count effect for the return value. -+ result_refs: int | None = None -+ -+ -+@dataclasses.dataclass(frozen=True, slots=True) -+class StableABIEntry: -+ # Role of the object. -+ # Source: Each [item_kind] in stable_abi.toml is mapped to a C Domain role. -+ role: str -+ # Name of the object. -+ # Source: [.*] in stable_abi.toml. -+ name: str -+ # Version when the object was added to the stable ABI. -+ # (Source: [.*.added] in stable_abi.toml. -+ added: str -+ # An explananatory blurb for the ifdef. -+ # Source: ``feature_macro.*.doc`` in stable_abi.toml. -+ ifdef_note: str -+ # Defines how much of the struct is exposed. Only relevant for structs. -+ # Source: [.*.struct_abi_kind] in stable_abi.toml. -+ struct_abi_kind: str -+ -+ -+def read_refcount_data(refcount_filename: Path) -> dict[str, RefCountEntry]: -+ refcount_data = {} -+ refcounts = refcount_filename.read_text(encoding="utf8") -+ for line in refcounts.splitlines(): -+ line = line.strip() -+ if not line or line.startswith("#"): -+ # blank lines and comments -+ continue -+ -+ # Each line is of the form -+ # function ':' type ':' [param name] ':' [refcount effect] ':' [comment] -+ parts = line.split(":", 4) -+ if len(parts) != 5: -+ raise ValueError(f"Wrong field count in {line!r}") -+ function, type, arg, refcount, _comment = parts -+ -+ # Get the entry, creating it if needed: -+ try: -+ entry = refcount_data[function] -+ except KeyError: -+ entry = refcount_data[function] = RefCountEntry(function) -+ if not refcount or refcount == "null": -+ refcount = None -+ else: -+ refcount = int(refcount) -+ # Update the entry with the new parameter -+ # or the result information. -+ if arg: -+ entry.args.append((arg, type, refcount)) -+ else: -+ entry.result_type = type -+ entry.result_refs = refcount -+ -+ return refcount_data -+ -+ -+def read_stable_abi_data(stable_abi_file: Path) -> dict[str, StableABIEntry]: -+ stable_abi_data = {} -+ with open(stable_abi_file, encoding="utf8") as fp: -+ for record in csv.DictReader(fp): -+ name = record["name"] -+ stable_abi_data[name] = StableABIEntry(**record) -+ -+ return stable_abi_data -+ -+ -+def add_annotations(app: Sphinx, doctree: nodes.document) -> None: -+ state = app.env.domaindata["c_annotations"] -+ refcount_data = state["refcount_data"] -+ stable_abi_data = state["stable_abi_data"] -+ for node in doctree.findall(addnodes.desc_content): -+ par = node.parent -+ if par["domain"] != "c": -+ continue -+ if not par[0].get("ids", None): -+ continue -+ name = par[0]["ids"][0] -+ if name.startswith("c."): -+ name = name[2:] -+ -+ objtype = par["objtype"] -+ -+ # Stable ABI annotation. -+ if record := stable_abi_data.get(name): -+ if ROLE_TO_OBJECT_TYPE[record.role] != objtype: -+ msg = ( -+ f"Object type mismatch in limited API annotation for {name}: " -+ f"{ROLE_TO_OBJECT_TYPE[record.role]!r} != {objtype!r}" -+ ) -+ raise ValueError(msg) -+ annotation = _stable_abi_annotation(record) -+ node.insert(0, annotation) -+ -+ # Unstable API annotation. -+ if name.startswith("PyUnstable"): -+ annotation = _unstable_api_annotation() -+ node.insert(0, annotation) -+ -+ # Return value annotation -+ if objtype != "function": -+ continue -+ if name not in refcount_data: -+ continue -+ entry = refcount_data[name] -+ if not entry.result_type.endswith("Object*"): -+ continue -+ annotation = _return_value_annotation(entry.result_refs) -+ node.insert(0, annotation) -+ -+ -+def _stable_abi_annotation(record: StableABIEntry) -> nodes.emphasis: -+ """Create the Stable ABI annotation. -+ -+ These have two forms: -+ Part of the `Stable ABI `_. -+ Part of the `Stable ABI `_ since version X.Y. -+ For structs, there's some more info in the message: -+ Part of the `Limited API `_ (as an opaque struct). -+ Part of the `Stable ABI `_ (including all members). -+ Part of the `Limited API `_ (Only some members are part -+ of the stable ABI.). -+ ... all of which can have "since version X.Y" appended. -+ """ -+ stable_added = record.added -+ message = sphinx_gettext("Part of the") -+ message = message.center(len(message) + 2) -+ emph_node = nodes.emphasis(message, message, classes=["stableabi"]) -+ ref_node = addnodes.pending_xref( -+ "Stable ABI", -+ refdomain="std", -+ reftarget="stable", -+ reftype="ref", -+ refexplicit="False", -+ ) -+ struct_abi_kind = record.struct_abi_kind -+ if struct_abi_kind in {"opaque", "members"}: -+ ref_node += nodes.Text(sphinx_gettext("Limited API")) -+ else: -+ ref_node += nodes.Text(sphinx_gettext("Stable ABI")) -+ emph_node += ref_node -+ if struct_abi_kind == "opaque": -+ emph_node += nodes.Text(" " + sphinx_gettext("(as an opaque struct)")) -+ elif struct_abi_kind == "full-abi": -+ emph_node += nodes.Text( -+ " " + sphinx_gettext("(including all members)") -+ ) -+ if record.ifdef_note: -+ emph_node += nodes.Text(f" {record.ifdef_note}") -+ if stable_added == "3.2": -+ # Stable ABI was introduced in 3.2. -+ pass -+ else: -+ emph_node += nodes.Text( -+ " " + sphinx_gettext("since version %s") % stable_added -+ ) -+ emph_node += nodes.Text(".") -+ if struct_abi_kind == "members": -+ msg = " " + sphinx_gettext( -+ "(Only some members are part of the stable ABI.)" -+ ) -+ emph_node += nodes.Text(msg) -+ return emph_node -+ -+ -+def _unstable_api_annotation() -> nodes.admonition: -+ ref_node = addnodes.pending_xref( -+ "Unstable API", -+ nodes.Text(sphinx_gettext("Unstable API")), -+ refdomain="std", -+ reftarget="unstable-c-api", -+ reftype="ref", -+ refexplicit="False", -+ ) -+ emph_node = nodes.emphasis( -+ "This is ", -+ sphinx_gettext("This is") + " ", -+ ref_node, -+ nodes.Text( -+ sphinx_gettext( -+ ". It may change without warning in minor releases." -+ ) -+ ), -+ ) -+ return nodes.admonition( -+ "", -+ emph_node, -+ classes=["unstable-c-api", "warning"], - ) -- app.connect('doctree-read', annotations.add_annotations) - -- class LimitedAPIList(Directive): - -- has_content = False -- required_arguments = 0 -- optional_arguments = 0 -- final_argument_whitespace = True -+def _return_value_annotation(result_refs: int | None) -> nodes.emphasis: -+ classes = ["refcount"] -+ if result_refs is None: -+ rc = sphinx_gettext("Return value: Always NULL.") -+ classes.append("return_null") -+ elif result_refs: -+ rc = sphinx_gettext("Return value: New reference.") -+ classes.append("return_new_ref") -+ else: -+ rc = sphinx_gettext("Return value: Borrowed reference.") -+ classes.append("return_borrowed_ref") -+ return nodes.emphasis(rc, rc, classes=classes) -+ -+ -+class LimitedAPIList(SphinxDirective): -+ has_content = False -+ required_arguments = 0 -+ optional_arguments = 0 -+ final_argument_whitespace = True - -- def run(self): -- content = [] -- for record in annotations.stable_abi_data.values(): -- role = REST_ROLE_MAP[record['role']] -- name = record['name'] -- content.append(f'* :c:{role}:`{name}`') -+ def run(self) -> list[nodes.Node]: -+ state = self.env.domaindata["c_annotations"] -+ content = [ -+ f"* :c:{record.role}:`{record.name}`" -+ for record in state["stable_abi_data"].values() -+ ] -+ node = nodes.paragraph() -+ self.state.nested_parse(StringList(content), 0, node) -+ return [node] - -- pnode = nodes.paragraph() -- self.state.nested_parse(StringList(content), 0, pnode) -- return [pnode] - -- app.add_directive('limited-api-list', LimitedAPIList) -+def init_annotations(app: Sphinx) -> None: -+ # Using domaindata is a bit hack-ish, -+ # but allows storing state without a global variable or closure. -+ app.env.domaindata["c_annotations"] = state = {} -+ state["refcount_data"] = read_refcount_data( -+ Path(app.srcdir, app.config.refcount_file) -+ ) -+ state["stable_abi_data"] = read_stable_abi_data( -+ Path(app.srcdir, app.config.stable_abi_file) -+ ) - - --def setup(app): -- app.add_config_value('refcount_file', '', True) -- app.add_config_value('stable_abi_file', '', True) -- app.connect('builder-inited', init_annotations) -+def setup(app: Sphinx) -> ExtensionMetadata: -+ app.add_config_value("refcount_file", "", "env", types={str}) -+ app.add_config_value("stable_abi_file", "", "env", types={str}) -+ app.add_directive("limited-api-list", LimitedAPIList) -+ app.connect("builder-inited", init_annotations) -+ app.connect("doctree-read", add_annotations) - -- # monkey-patch C object... -- CObject.option_spec = { -- 'noindex': directives.flag, -- 'stableabi': directives.flag, -- } -- old_handle_signature = CObject.handle_signature -+ if sphinx.version_info[:2] < (7, 2): -+ from docutils.parsers.rst import directives -+ from sphinx.domains.c import CObject - -- def new_handle_signature(self, sig, signode): -- signode.parent['stableabi'] = 'stableabi' in self.options -- return old_handle_signature(self, sig, signode) -- CObject.handle_signature = new_handle_signature -- return {'version': '1.0', 'parallel_read_safe': True} -+ # monkey-patch C object... -+ CObject.option_spec |= { -+ "no-index-entry": directives.flag, -+ "no-contents-entry": directives.flag, -+ } -+ -+ return { -+ "version": "1.0", -+ "parallel_read_safe": True, -+ "parallel_write_safe": True, -+ } -diff --git a/Doc/tools/extensions/glossary_search.py b/Doc/tools/extensions/glossary_search.py -index 7c93b1e4990..502b6cd95bc 100644 ---- a/Doc/tools/extensions/glossary_search.py -+++ b/Doc/tools/extensions/glossary_search.py -@@ -1,63 +1,63 @@ --# -*- coding: utf-8 -*- --""" -- glossary_search.py -- ~~~~~~~~~~~~~~~~ -+"""Feature search results for glossary items prominently.""" - -- Feature search results for glossary items prominently. -+from __future__ import annotations - -- :license: Python license. --""" - import json --import os.path --from docutils.nodes import definition_list_item -+from pathlib import Path -+from typing import TYPE_CHECKING -+ -+from docutils import nodes - from sphinx.addnodes import glossary - from sphinx.util import logging - -+if TYPE_CHECKING: -+ from sphinx.application import Sphinx -+ from sphinx.util.typing import ExtensionMetadata - - logger = logging.getLogger(__name__) --STATIC_DIR = '_static' --JSON = 'glossary.json' - - --def process_glossary_nodes(app, doctree, fromdocname): -+def process_glossary_nodes( -+ app: Sphinx, -+ doctree: nodes.document, -+ _docname: str, -+) -> None: - if app.builder.format != 'html' or app.builder.embedded: - return - -- terms = {} -+ if hasattr(app.env, 'glossary_terms'): -+ terms = app.env.glossary_terms -+ else: -+ terms = app.env.glossary_terms = {} - - for node in doctree.findall(glossary): -- for glossary_item in node.findall(definition_list_item): -- term = glossary_item[0].astext().lower() -- definition = glossary_item[1] -+ for glossary_item in node.findall(nodes.definition_list_item): -+ term = glossary_item[0].astext() -+ definition = glossary_item[-1] - - rendered = app.builder.render_partial(definition) -- terms[term] = { -- 'title': glossary_item[0].astext(), -- 'body': rendered['html_body'] -+ terms[term.lower()] = { -+ 'title': term, -+ 'body': rendered['html_body'], - } - -- if hasattr(app.env, 'glossary_terms'): -- app.env.glossary_terms.update(terms) -- else: -- app.env.glossary_terms = terms - --def on_build_finish(app, exc): -- if not hasattr(app.env, 'glossary_terms'): -- return -- if not app.env.glossary_terms: -+def write_glossary_json(app: Sphinx, _exc: Exception) -> None: -+ if not getattr(app.env, 'glossary_terms', None): - return - -- logger.info(f'Writing {JSON}', color='green') -- -- dest_dir = os.path.join(app.outdir, STATIC_DIR) -- os.makedirs(dest_dir, exist_ok=True) -- -- with open(os.path.join(dest_dir, JSON), 'w') as f: -- json.dump(app.env.glossary_terms, f) -+ logger.info('Writing glossary.json', color='green') -+ dest = Path(app.outdir, '_static', 'glossary.json') -+ dest.parent.mkdir(exist_ok=True) -+ dest.write_text(json.dumps(app.env.glossary_terms), encoding='utf-8') - - --def setup(app): -+def setup(app: Sphinx) -> ExtensionMetadata: - app.connect('doctree-resolved', process_glossary_nodes) -- app.connect('build-finished', on_build_finish) -+ app.connect('build-finished', write_glossary_json) - -- return {'version': '0.1', 'parallel_read_safe': True} -+ return { -+ 'version': '1.0', -+ 'parallel_read_safe': True, -+ 'parallel_write_safe': True, -+ } ---- /dev/null -+++ b/Doc/tools/extensions/lexers/__init__.py -@@ -0,0 +1,15 @@ -+from .asdl_lexer import ASDLLexer -+from .peg_lexer import PEGLexer -+ -+ -+def setup(app): -+ # Used for highlighting Parser/Python.asdl in library/ast.rst -+ app.add_lexer("asdl", ASDLLexer) -+ # Used for highlighting Grammar/python.gram in reference/grammar.rst -+ app.add_lexer("peg", PEGLexer) -+ -+ return { -+ "version": "1.0", -+ "parallel_read_safe": True, -+ "parallel_write_safe": True, -+ } -diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/lexers/asdl_lexer.py -similarity index 62% -rename from Doc/tools/extensions/asdl_highlight.py -rename to Doc/tools/extensions/lexers/asdl_lexer.py -index 42863a4b3bc..3a74174a1f7 100644 ---- a/Doc/tools/extensions/asdl_highlight.py -+++ b/Doc/tools/extensions/lexers/asdl_lexer.py -@@ -1,15 +1,6 @@ --import sys --from pathlib import Path -+from pygments.lexer import RegexLexer, bygroups, include -+from pygments.token import Comment, Keyword, Name, Operator, Punctuation, Text - --CPYTHON_ROOT = Path(__file__).resolve().parent.parent.parent.parent --sys.path.append(str(CPYTHON_ROOT / "Parser")) -- --from pygments.lexer import RegexLexer, bygroups, include, words --from pygments.token import (Comment, Keyword, Name, Operator, -- Punctuation, Text) -- --from asdl import builtin_types --from sphinx.highlighting import lexers - - class ASDLLexer(RegexLexer): - name = "ASDL" -@@ -34,7 +25,10 @@ - r"(\w+)(\*\s|\?\s|\s)(\w+)", - bygroups(Name.Builtin.Pseudo, Operator, Name), - ), -- (words(builtin_types), Name.Builtin), -+ # Keep in line with ``builtin_types`` from Parser/asdl.py. -+ # ASDL's 4 builtin types are -+ # constant, identifier, int, string -+ ("constant|identifier|int|string", Name.Builtin), - (r"attributes", Name.Builtin), - ( - _name + _text_ws + "(=)", -@@ -46,8 +40,3 @@ - (r".", Text), - ], - } -- -- --def setup(app): -- lexers["asdl"] = ASDLLexer() -- return {'version': '1.0', 'parallel_read_safe': True} -diff --git a/Doc/tools/extensions/peg_highlight.py b/Doc/tools/extensions/lexers/peg_lexer.py -similarity index 94% -rename from Doc/tools/extensions/peg_highlight.py -rename to Doc/tools/extensions/lexers/peg_lexer.py -index 5ab5530d269..06f9f8eb312 100644 ---- a/Doc/tools/extensions/peg_highlight.py -+++ b/Doc/tools/extensions/lexers/peg_lexer.py -@@ -1,8 +1,6 @@ - from pygments.lexer import RegexLexer, bygroups, include - from pygments.token import Comment, Keyword, Name, Operator, Punctuation, Text - --from sphinx.highlighting import lexers -- - - class PEGLexer(RegexLexer): - """Pygments Lexer for PEG grammar (.gram) files -@@ -81,8 +79,3 @@ - (r".", Text), - ], - } -- -- --def setup(app): -- lexers["peg"] = PEGLexer() -- return {"version": "1.0", "parallel_read_safe": True} -diff --git a/Doc/tools/extensions/patchlevel.py b/Doc/tools/extensions/patchlevel.py -index 617f28c2527..f2df6db47a2 100644 ---- a/Doc/tools/extensions/patchlevel.py -+++ b/Doc/tools/extensions/patchlevel.py -@@ -1,68 +1,77 @@ --# -*- coding: utf-8 -*- --""" -- patchlevel.py -- ~~~~~~~~~~~~~ -+"""Extract version information from Include/patchlevel.h.""" - -- Extract version info from Include/patchlevel.h. -- Adapted from Doc/tools/getversioninfo. -+import re -+import sys -+from pathlib import Path -+from typing import Literal, NamedTuple - -- :copyright: 2007-2008 by Georg Brandl. -- :license: Python license. --""" -+CPYTHON_ROOT = Path( -+ __file__, # cpython/Doc/tools/extensions/patchlevel.py -+ "..", # cpython/Doc/tools/extensions -+ "..", # cpython/Doc/tools -+ "..", # cpython/Doc -+ "..", # cpython -+).resolve() -+PATCHLEVEL_H = CPYTHON_ROOT / "Include" / "patchlevel.h" - --from __future__ import print_function -+RELEASE_LEVELS = { -+ "PY_RELEASE_LEVEL_ALPHA": "alpha", -+ "PY_RELEASE_LEVEL_BETA": "beta", -+ "PY_RELEASE_LEVEL_GAMMA": "candidate", -+ "PY_RELEASE_LEVEL_FINAL": "final", -+} - --import os --import re --import sys - --def get_header_version_info(srcdir): -- patchlevel_h = os.path.join(srcdir, '..', 'Include', 'patchlevel.h') -+class version_info(NamedTuple): # noqa: N801 -+ major: int #: Major release number -+ minor: int #: Minor release number -+ micro: int #: Patch release number -+ releaselevel: Literal["alpha", "beta", "candidate", "final"] -+ serial: int #: Serial release number - -- # This won't pick out all #defines, but it will pick up the ones we -- # care about. -- rx = re.compile(r'\s*#define\s+([a-zA-Z][a-zA-Z_0-9]*)\s+([a-zA-Z_0-9]+)') - -- d = {} -- with open(patchlevel_h) as f: -- for line in f: -- m = rx.match(line) -- if m is not None: -- name, value = m.group(1, 2) -- d[name] = value -+def get_header_version_info() -> version_info: -+ # Capture PY_ prefixed #defines. -+ pat = re.compile(r"\s*#define\s+(PY_\w*)\s+(\w+)", re.ASCII) - -- release = version = '%s.%s' % (d['PY_MAJOR_VERSION'], d['PY_MINOR_VERSION']) -- micro = int(d['PY_MICRO_VERSION']) -- release += '.' + str(micro) -+ defines = {} -+ patchlevel_h = PATCHLEVEL_H.read_text(encoding="utf-8") -+ for line in patchlevel_h.splitlines(): -+ if (m := pat.match(line)) is not None: -+ name, value = m.groups() -+ defines[name] = value - -- level = d['PY_RELEASE_LEVEL'] -- suffixes = { -- 'PY_RELEASE_LEVEL_ALPHA': 'a', -- 'PY_RELEASE_LEVEL_BETA': 'b', -- 'PY_RELEASE_LEVEL_GAMMA': 'rc', -- } -- if level != 'PY_RELEASE_LEVEL_FINAL': -- release += suffixes[level] + str(int(d['PY_RELEASE_SERIAL'])) -- return version, release -+ return version_info( -+ major=int(defines["PY_MAJOR_VERSION"]), -+ minor=int(defines["PY_MINOR_VERSION"]), -+ micro=int(defines["PY_MICRO_VERSION"]), -+ releaselevel=RELEASE_LEVELS[defines["PY_RELEASE_LEVEL"]], -+ serial=int(defines["PY_RELEASE_SERIAL"]), -+ ) - - --def get_sys_version_info(): -- major, minor, micro, level, serial = sys.version_info -- release = version = '%s.%s' % (major, minor) -- release += '.%s' % micro -- if level != 'final': -- release += '%s%s' % (level[0], serial) -+def format_version_info(info: version_info) -> tuple[str, str]: -+ version = f"{info.major}.{info.minor}" -+ release = f"{info.major}.{info.minor}.{info.micro}" -+ if info.releaselevel != "final": -+ suffix = {"alpha": "a", "beta": "b", "candidate": "rc"} -+ release += f"{suffix[info.releaselevel]}{info.serial}" - return version, release - - - def get_version_info(): - try: -- return get_header_version_info('.') -- except (IOError, OSError): -- version, release = get_sys_version_info() -- print('Can\'t get version info from Include/patchlevel.h, ' \ -- 'using version of this interpreter (%s).' % release, file=sys.stderr) -+ info = get_header_version_info() -+ return format_version_info(info) -+ except OSError: -+ version, release = format_version_info(sys.version_info) -+ print( -+ f"Failed to get version info from Include/patchlevel.h, " -+ f"using version of this interpreter ({release}).", -+ file=sys.stderr, -+ ) - return version, release - --if __name__ == '__main__': -- print(get_header_version_info('.')[1]) -+ -+if __name__ == "__main__": -+ print(format_version_info(get_header_version_info())[1]) diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py -index caf145997fa..7cd9155634b 100644 +index 96a4f24fad3..7cd9155634b 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py -@@ -15,14 +15,14 @@ - from time import asctime - from pprint import pformat - --from docutils import nodes, utils -+from docutils import nodes - from docutils.io import StringOutput --from docutils.parsers.rst import Directive --from docutils.utils import new_document -+from docutils.parsers.rst import directives -+from docutils.utils import new_document, unescape - from sphinx import addnodes - from sphinx.builders import Builder --from sphinx.domains.python import PyFunction, PyMethod --from sphinx.errors import NoUri -+from sphinx.domains.changeset import VersionChange, versionlabels, versionlabel_classes -+from sphinx.domains.python import PyFunction, PyMethod, PyModule - from sphinx.locale import _ as sphinx_gettext - from sphinx.util import logging - from sphinx.util.docutils import SphinxDirective -@@ -48,11 +48,14 @@ - - std.token_re = re.compile(r'`((~?[\w-]*:)?\w+)`') - -+# backport :no-index: -+PyModule.option_spec['no-index'] = directives.flag -+ - - # Support for marking up and linking to bugs.python.org issues - - def issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): -- issue = utils.unescape(text) -+ issue = unescape(text) - # sanity check: there are no bpo issues within these two values - if 47261 < int(issue) < 400000: - msg = inliner.reporter.error(f'The BPO ID {text!r} seems too high -- ' -@@ -67,7 +70,7 @@ - # Support for marking up and linking to GitHub issues - - def gh_issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): -- issue = utils.unescape(text) -+ issue = unescape(text) - # sanity check: all GitHub issues have ID >= 32426 - # even though some of them are also valid BPO IDs - if int(issue) < 32426: -@@ -82,7 +85,7 @@ - - # Support for marking up implementation details - --class ImplementationDetail(Directive): -+class ImplementationDetail(SphinxDirective): - - has_content = True - final_argument_whitespace = True -@@ -118,7 +121,7 @@ +@@ -121,7 +121,7 @@ known_platforms = frozenset({ "AIX", "Android", "BSD", "DragonFlyBSD", "Emscripten", "FreeBSD", "Linux", "NetBSD", "OpenBSD", "POSIX", "Solaris", "Unix", "VxWorks", @@ -11814,7 +887,7 @@ index caf145997fa..7cd9155634b 100644 # libc "BSD libc", "glibc", "musl", # POSIX platforms with pthreads -@@ -149,7 +152,7 @@ +@@ -152,7 +152,7 @@ Example:: @@ -11823,1050 +896,10 @@ index caf145997fa..7cd9155634b 100644 Arguments like "Linux >= 3.17 with glibc >= 2.27" are currently not parsed into separate tokens. -@@ -179,143 +182,6 @@ - return platforms - - --# Support for documenting audit event -- --def audit_events_purge(app, env, docname): -- """This is to remove from env.all_audit_events old traces of removed -- documents. -- """ -- if not hasattr(env, 'all_audit_events'): -- return -- fresh_all_audit_events = {} -- for name, event in env.all_audit_events.items(): -- event["source"] = [(d, t) for d, t in event["source"] if d != docname] -- if event["source"]: -- # Only keep audit_events that have at least one source. -- fresh_all_audit_events[name] = event -- env.all_audit_events = fresh_all_audit_events -- -- --def audit_events_merge(app, env, docnames, other): -- """In Sphinx parallel builds, this merges env.all_audit_events from -- subprocesses. -- -- all_audit_events is a dict of names, with values like: -- {'source': [(docname, target), ...], 'args': args} -- """ -- if not hasattr(other, 'all_audit_events'): -- return -- if not hasattr(env, 'all_audit_events'): -- env.all_audit_events = {} -- for name, value in other.all_audit_events.items(): -- if name in env.all_audit_events: -- env.all_audit_events[name]["source"].extend(value["source"]) -- else: -- env.all_audit_events[name] = value -- -- --class AuditEvent(Directive): -- -- has_content = True -- required_arguments = 1 -- optional_arguments = 2 -- final_argument_whitespace = True -- -- _label = [ -- sphinx_gettext("Raises an :ref:`auditing event ` {name} with no arguments."), -- sphinx_gettext("Raises an :ref:`auditing event ` {name} with argument {args}."), -- sphinx_gettext("Raises an :ref:`auditing event ` {name} with arguments {args}."), -- ] -- -- @property -- def logger(self): -- cls = type(self) -- return logging.getLogger(cls.__module__ + "." + cls.__name__) -- -- def run(self): -- name = self.arguments[0] -- if len(self.arguments) >= 2 and self.arguments[1]: -- args = (a.strip() for a in self.arguments[1].strip("'\"").split(",")) -- args = [a for a in args if a] -- else: -- args = [] -- -- label = self._label[min(2, len(args))] -- text = label.format(name="``{}``".format(name), -- args=", ".join("``{}``".format(a) for a in args if a)) -- -- env = self.state.document.settings.env -- if not hasattr(env, 'all_audit_events'): -- env.all_audit_events = {} -- -- new_info = { -- 'source': [], -- 'args': args -- } -- info = env.all_audit_events.setdefault(name, new_info) -- if info is not new_info: -- if not self._do_args_match(info['args'], new_info['args']): -- self.logger.warning( -- "Mismatched arguments for audit-event {}: {!r} != {!r}" -- .format(name, info['args'], new_info['args']) -- ) -- -- ids = [] -- try: -- target = self.arguments[2].strip("\"'") -- except (IndexError, TypeError): -- target = None -- if not target: -- target = "audit_event_{}_{}".format( -- re.sub(r'\W', '_', name), -- len(info['source']), -- ) -- ids.append(target) -- -- info['source'].append((env.docname, target)) -- -- pnode = nodes.paragraph(text, classes=["audit-hook"], ids=ids) -- pnode.line = self.lineno -- if self.content: -- self.state.nested_parse(self.content, self.content_offset, pnode) -- else: -- n, m = self.state.inline_text(text, self.lineno) -- pnode.extend(n + m) -- -- return [pnode] -- -- # This list of sets are allowable synonyms for event argument names. -- # If two names are in the same set, they are treated as equal for the -- # purposes of warning. This won't help if number of arguments is -- # different! -- _SYNONYMS = [ -- {"file", "path", "fd"}, -- ] -- -- def _do_args_match(self, args1, args2): -- if args1 == args2: -- return True -- if len(args1) != len(args2): -- return False -- for a1, a2 in zip(args1, args2): -- if a1 == a2: -- continue -- if any(a1 in s and a2 in s for s in self._SYNONYMS): -- continue -- return False -- return True -- -- --class audit_event_list(nodes.General, nodes.Element): -- pass -- -- --class AuditEventListDirective(Directive): -- -- def run(self): -- return [audit_event_list('')] -- -- - # Support for documenting decorators - - class PyDecoratorMixin(object): -@@ -395,58 +261,34 @@ - - # Support for documenting version of removal in deprecations - --class DeprecatedRemoved(Directive): -- has_content = True -+class DeprecatedRemoved(VersionChange): - required_arguments = 2 -- optional_arguments = 1 -- final_argument_whitespace = True -- option_spec = {} - -- _deprecated_label = sphinx_gettext('Deprecated since version {deprecated}, will be removed in version {removed}') -- _removed_label = sphinx_gettext('Deprecated since version {deprecated}, removed in version {removed}') -+ _deprecated_label = sphinx_gettext('Deprecated since version %s, will be removed in version %s') -+ _removed_label = sphinx_gettext('Deprecated since version %s, removed in version %s') - - def run(self): -- node = addnodes.versionmodified() -- node.document = self.state.document -- node['type'] = 'deprecated-removed' -- version = (self.arguments[0], self.arguments[1]) -- node['version'] = version -- env = self.state.document.settings.env -- current_version = tuple(int(e) for e in env.config.version.split('.')) -- removed_version = tuple(int(e) for e in self.arguments[1].split('.')) -+ # Replace the first two arguments (deprecated version and removed version) -+ # with a single tuple of both versions. -+ version_deprecated = self.arguments[0] -+ version_removed = self.arguments.pop(1) -+ self.arguments[0] = version_deprecated, version_removed -+ -+ # Set the label based on if we have reached the removal version -+ current_version = tuple(map(int, self.config.version.split('.'))) -+ removed_version = tuple(map(int, version_removed.split('.'))) - if current_version < removed_version: -- label = self._deprecated_label -- else: -- label = self._removed_label -- -- text = label.format(deprecated=self.arguments[0], removed=self.arguments[1]) -- if len(self.arguments) == 3: -- inodes, messages = self.state.inline_text(self.arguments[2], -- self.lineno+1) -- para = nodes.paragraph(self.arguments[2], '', *inodes, translatable=False) -- node.append(para) -+ versionlabels[self.name] = self._deprecated_label -+ versionlabel_classes[self.name] = 'deprecated' - else: -- messages = [] -- if self.content: -- self.state.nested_parse(self.content, self.content_offset, node) -- if len(node): -- if isinstance(node[0], nodes.paragraph) and node[0].rawsource: -- content = nodes.inline(node[0].rawsource, translatable=True) -- content.source = node[0].source -- content.line = node[0].line -- content += node[0].children -- node[0].replace_self(nodes.paragraph('', '', content, translatable=False)) -- node[0].insert(0, nodes.inline('', '%s: ' % text, -- classes=['versionmodified'])) -- else: -- para = nodes.paragraph('', '', -- nodes.inline('', '%s.' % text, -- classes=['versionmodified']), -- translatable=False) -- node.append(para) -- env = self.state.document.settings.env -- env.get_domain('changeset').note_changeset(node) -- return [node] + messages -+ versionlabels[self.name] = self._removed_label -+ versionlabel_classes[self.name] = 'removed' -+ try: -+ return super().run() -+ finally: -+ # reset versionlabels and versionlabel_classes -+ versionlabels[self.name] = '' -+ versionlabel_classes[self.name] = '' - - - # Support for including Misc/NEWS -@@ -456,7 +298,7 @@ - whatsnew_re = re.compile(r"(?im)^what's new in (.*?)\??$") - - --class MiscNews(Directive): -+class MiscNews(SphinxDirective): - has_content = False - required_arguments = 1 - optional_arguments = 0 -@@ -471,7 +313,7 @@ - if not source_dir: - source_dir = path.dirname(path.abspath(source)) - fpath = path.join(source_dir, fname) -- self.state.document.settings.record_dependencies.add(fpath) -+ self.env.note_dependency(path.abspath(fpath)) - try: - with io.open(fpath, encoding='utf-8') as fp: - content = fp.read() -@@ -603,70 +445,6 @@ - return sig - - --def process_audit_events(app, doctree, fromdocname): -- for node in doctree.findall(audit_event_list): -- break -- else: -- return -- -- env = app.builder.env -- -- table = nodes.table(cols=3) -- group = nodes.tgroup( -- '', -- nodes.colspec(colwidth=30), -- nodes.colspec(colwidth=55), -- nodes.colspec(colwidth=15), -- cols=3, -- ) -- head = nodes.thead() -- body = nodes.tbody() -- -- table += group -- group += head -- group += body -- -- row = nodes.row() -- row += nodes.entry('', nodes.paragraph('', nodes.Text('Audit event'))) -- row += nodes.entry('', nodes.paragraph('', nodes.Text('Arguments'))) -- row += nodes.entry('', nodes.paragraph('', nodes.Text('References'))) -- head += row -- -- for name in sorted(getattr(env, "all_audit_events", ())): -- audit_event = env.all_audit_events[name] -- -- row = nodes.row() -- node = nodes.paragraph('', nodes.Text(name)) -- row += nodes.entry('', node) -- -- node = nodes.paragraph() -- for i, a in enumerate(audit_event['args']): -- if i: -- node += nodes.Text(", ") -- node += nodes.literal(a, nodes.Text(a)) -- row += nodes.entry('', node) -- -- node = nodes.paragraph() -- backlinks = enumerate(sorted(set(audit_event['source'])), start=1) -- for i, (doc, label) in backlinks: -- if isinstance(label, str): -- ref = nodes.reference("", nodes.Text("[{}]".format(i)), internal=True) -- try: -- ref['refuri'] = "{}#{}".format( -- app.builder.get_relative_uri(fromdocname, doc), -- label, -- ) -- except NoUri: -- continue -- node += ref -- row += nodes.entry('', node) -- -- body += row -- -- for node in doctree.findall(audit_event_list): -- node.replace_self(table) -- -- - def patch_pairindextypes(app, _env) -> None: - """Remove all entries from ``pairindextypes`` before writing POT files. - -@@ -696,8 +474,6 @@ - app.add_role('gh', gh_issue_role) - app.add_directive('impl-detail', ImplementationDetail) - app.add_directive('availability', Availability) -- app.add_directive('audit-event', AuditEvent) -- app.add_directive('audit-event-table', AuditEventListDirective) - app.add_directive('deprecated-removed', DeprecatedRemoved) - app.add_builder(PydocTopicsBuilder) - app.add_object_type('opcode', 'opcode', '%s (opcode)', parse_opcode_signature) -@@ -713,7 +489,4 @@ - app.add_directive_to_domain('py', 'abstractmethod', PyAbstractMethod) - app.add_directive('miscnews', MiscNews) - app.connect('env-check-consistency', patch_pairindextypes) -- app.connect('doctree-resolved', process_audit_events) -- app.connect('env-merge-info', audit_events_merge) -- app.connect('env-purge-doc', audit_events_purge) - return {'version': '1.0', 'parallel_read_safe': True} ---- /dev/null -+++ b/Doc/tools/static/glossary_search.js -@@ -0,0 +1,47 @@ -+"use strict"; -+ -+const GLOSSARY_PAGE = "glossary.html"; -+ -+const glossary_search = async () => { -+ const response = await fetch("_static/glossary.json"); -+ if (!response.ok) { -+ throw new Error("Failed to fetch glossary.json"); -+ } -+ const glossary = await response.json(); -+ -+ const params = new URLSearchParams(document.location.search).get("q"); -+ if (!params) { -+ return; -+ } -+ -+ const searchParam = params.toLowerCase(); -+ const glossaryItem = glossary[searchParam]; -+ if (!glossaryItem) { -+ return; -+ } -+ -+ // set up the title text with a link to the glossary page -+ const glossaryTitle = document.getElementById("glossary-title"); -+ glossaryTitle.textContent = "Glossary: " + glossaryItem.title; -+ const linkTarget = searchParam.replace(/ /g, "-"); -+ glossaryTitle.href = GLOSSARY_PAGE + "#term-" + linkTarget; -+ -+ // rewrite any anchor links (to other glossary terms) -+ // to have a full reference to the glossary page -+ const glossaryBody = document.getElementById("glossary-body"); -+ glossaryBody.innerHTML = glossaryItem.body; -+ const anchorLinks = glossaryBody.querySelectorAll('a[href^="#"]'); -+ anchorLinks.forEach(function (link) { -+ const currentUrl = link.getAttribute("href"); -+ link.href = GLOSSARY_PAGE + currentUrl; -+ }); -+ -+ const glossaryResult = document.getElementById("glossary-result"); -+ glossaryResult.style.display = ""; -+}; -+ -+if (document.readyState !== "loading") { -+ glossary_search().catch(console.error); -+} else { -+ document.addEventListener("DOMContentLoaded", glossary_search); -+} ---- /dev/null -+++ b/Doc/tools/static/rtd_switcher.js -@@ -0,0 +1,55 @@ -+ function onSwitch(event) { -+ const option = event.target.selectedIndex; -+ const item = event.target.options[option]; -+ window.location.href = item.dataset.url; -+ } -+ -+ document.addEventListener("readthedocs-addons-data-ready", function(event) { -+ const config = event.detail.data() -+ const versionSelect = ` -+ -+ `; -+ -+ // Prepend the current language to the options on the selector -+ let languages = config.projects.translations.concat(config.projects.current); -+ languages = languages.sort((a, b) => a.language.name.localeCompare(b.language.name)); -+ -+ const languageSelect = ` -+ -+ `; -+ -+ // Query all the placeholders because there are different ones for Desktop/Mobile -+ const versionPlaceholders = document.querySelectorAll(".version_switcher_placeholder"); -+ for (placeholder of versionPlaceholders) { -+ placeholder.innerHTML = versionSelect; -+ let selectElement = placeholder.querySelector("select"); -+ selectElement.addEventListener("change", onSwitch); -+ } -+ -+ const languagePlaceholders = document.querySelectorAll(".language_switcher_placeholder"); -+ for (placeholder of languagePlaceholders) { -+ placeholder.innerHTML = languageSelect; -+ let selectElement = placeholder.querySelector("select"); -+ selectElement.addEventListener("change", onSwitch); -+ } -+ }); -diff --git a/Doc/tools/templates/download.html b/Doc/tools/templates/download.html -index b5353d6fb77..9f99eea6f3c 100644 ---- a/Doc/tools/templates/download.html -+++ b/Doc/tools/templates/download.html -@@ -1,5 +1,5 @@ - {% extends "layout.html" %} --{% set title = 'Download' %} -+{% set title = _('Download') %} - {% if daily is defined %} - {% set dlbase = pathto('archives', 1) %} - {% else %} -@@ -11,54 +11,68 @@ - {% endif %} - - {% block body %} --

Download Python {{ release }} Documentation

-+

{% trans %}Download Python {{ release }} Documentation{% endtrans %}

- --{% if last_updated %}

Last updated on: {{ last_updated }}.

{% endif %} -+{% if last_updated %}

{% trans %}Last updated on: {{ last_updated }}.{% endtrans %}

{% endif %} - --

To download an archive containing all the documents for this version of --Python in one of various formats, follow one of links in this table.

-+

{% trans %}To download an archive containing all the documents for this version of -+Python in one of various formats, follow one of links in this table.{% endtrans %}

- - -- -- -- -- -+ -+ -+ -+ - -- -- -- -+ -+ -+ -+ - -- -- -- -+ -+ -+ -+ - -- -- -- -+ -+ -+ -+ - -- -- -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ - - -
FormatPacked as .zipPacked as .tar.bz2
PDF (US-Letter paper size)Download (ca. 13 MiB)Download (ca. 13 MiB)
{% trans %}Format{% endtrans %}{% trans %}Packed as .zip{% endtrans %}{% trans %}Packed as .tar.bz2{% endtrans %}
PDF (A4 paper size)Download (ca. 13 MiB)Download (ca. 13 MiB)
{% trans %}PDF (US-Letter paper size){% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}
HTMLDownload (ca. 9 MiB)Download (ca. 6 MiB)
{% trans %}PDF (A4 paper size){% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}
Plain TextDownload (ca. 3 MiB)Download (ca. 2 MiB)
{% trans %}HTML{% endtrans %}{% trans download_size="13" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="8" %}Download (ca. {{ download_size }} MiB){% endtrans %}
EPUBDownload (ca. 5 MiB)
{% trans %}Plain text{% endtrans %}{% trans download_size="4" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="3" %}Download (ca. {{ download_size }} MiB){% endtrans %}
{% trans %}Texinfo{% endtrans %}{% trans download_size="9" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="7" %}Download (ca. {{ download_size }} MiB){% endtrans %}
{% trans %}EPUB{% endtrans %}{% trans download_size="6" %}Download (ca. {{ download_size }} MiB){% endtrans %}
- --

These archives contain all the content in the documentation.

-+

{% trans %}These archives contain all the content in the documentation.{% endtrans %}

- - --

Unpacking

-+

{% trans %}Unpacking{% endtrans %}

- --

Unix users should download the .tar.bz2 archives; these are bzipped tar -+

{% trans %}Unix users should download the .tar.bz2 archives; these are bzipped tar - archives and can be handled in the usual way using tar and the bzip2 - program. The Info-ZIP unzip program can be - used to handle the ZIP archives if desired. The .tar.bz2 archives provide the --best compression and fastest download times.

-+best compression and fastest download times.{% endtrans %}

- --

Windows users can use the ZIP archives since those are customary on that --platform. These are created on Unix using the Info-ZIP zip program.

-+

{% trans %}Windows users can use the ZIP archives since those are customary on that -+platform. These are created on Unix using the Info-ZIP zip program.{% endtrans %}

- - --

Problems

-+

{% trans %}Problems{% endtrans %}

- --

If you have comments or suggestions for the Python documentation, please send --email to docs@python.org.

-+

{% trans %}If you have comments or suggestions for the Python documentation, please send -+email to docs@python.org.{% endtrans %}

- {% endblock %} -diff --git a/Doc/tools/templates/indexcontent.html b/Doc/tools/templates/indexcontent.html -index 6f854e86ab8..f2e9fbb0106 100644 ---- a/Doc/tools/templates/indexcontent.html -+++ b/Doc/tools/templates/indexcontent.html -@@ -33,6 +33,8 @@ - {% trans %}C API reference{% endtrans %}

- -+ - - - -diff --git a/Doc/tools/templates/layout.html b/Doc/tools/templates/layout.html -index e931147813a..b09fd21a8dd 100644 ---- a/Doc/tools/templates/layout.html -+++ b/Doc/tools/templates/layout.html -@@ -26,6 +26,9 @@ - {% endblock %} - - {% block extrahead %} -+ {% if builder == "html" and enable_analytics %} -+ -+ {% endif %} - - {% if builder != "htmlhelp" %} - {% if pagename == 'whatsnew/changelog' and not embedded %} -@@ -43,90 +46,7 @@ - {{ super() }} - - {%- if not embedded %} -- -- -+ -+ - {%- endif %} - {% endblock %} -diff --git a/Doc/tools/templates/search.html b/Doc/tools/templates/search.html -index 85297446138..6ddac5f828b 100644 ---- a/Doc/tools/templates/search.html -+++ b/Doc/tools/templates/search.html -@@ -2,61 +2,16 @@ - {% block extrahead %} - {{ super() }} - -- -+ -+{% endblock %} -+{% block searchresults %} -+
-+ {# For glossary_search.js #} -+ -+
- {% endblock %} -diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst -index 1b64741c349..675faa8c524 100644 ---- a/Doc/tutorial/classes.rst -+++ b/Doc/tutorial/classes.rst -@@ -688,6 +688,11 @@ - without regard to the syntactic position of the identifier, as long as it - occurs within the definition of a class. - -+.. seealso:: -+ -+ The :ref:`private name mangling specifications ` -+ for details and special cases. -+ - Name mangling is helpful for letting subclasses override methods without - breaking intraclass method calls. For example:: - -diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst -index 77444f9cb83..677d7ca02c3 100644 ---- a/Doc/tutorial/controlflow.rst -+++ b/Doc/tutorial/controlflow.rst -@@ -61,7 +61,7 @@ - :: - - >>> # Measure some strings: -- ... words = ['cat', 'window', 'defenestrate'] -+ >>> words = ['cat', 'window', 'defenestrate'] - >>> for w in words: - ... print(w, len(w)) - ... -@@ -445,7 +445,7 @@ - ... print() - ... - >>> # Now call the function we just defined: -- ... fib(2000) -+ >>> fib(2000) - 0 1 1 2 3 5 8 13 21 34 55 89 144 233 377 610 987 1597 - - .. index:: -diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst -index a1492298bdb..73f17adeea7 100644 ---- a/Doc/tutorial/datastructures.rst -+++ b/Doc/tutorial/datastructures.rst -@@ -383,16 +383,16 @@ - >>> t - (12345, 54321, 'hello!') - >>> # Tuples may be nested: -- ... u = t, (1, 2, 3, 4, 5) -+ >>> u = t, (1, 2, 3, 4, 5) - >>> u - ((12345, 54321, 'hello!'), (1, 2, 3, 4, 5)) - >>> # Tuples are immutable: -- ... t[0] = 88888 -+ >>> t[0] = 88888 - Traceback (most recent call last): - File "", line 1, in - TypeError: 'tuple' object does not support item assignment - >>> # but they can contain mutable objects: -- ... v = ([1, 2, 3], [3, 2, 1]) -+ >>> v = ([1, 2, 3], [3, 2, 1]) - >>> v - ([1, 2, 3], [3, 2, 1]) - -@@ -465,7 +465,7 @@ - False - - >>> # Demonstrate set operations on unique letters from two words -- ... -+ >>> - >>> a = set('abracadabra') - >>> b = set('alacazam') - >>> a # unique letters in a -diff --git a/Doc/tutorial/floatingpoint.rst b/Doc/tutorial/floatingpoint.rst -index 0795e2fef98..dfe2d1d3a83 100644 ---- a/Doc/tutorial/floatingpoint.rst -+++ b/Doc/tutorial/floatingpoint.rst -@@ -6,7 +6,7 @@ - .. _tut-fp-issues: - - ************************************************** --Floating Point Arithmetic: Issues and Limitations -+Floating-Point Arithmetic: Issues and Limitations - ************************************************** - - .. sectionauthor:: Tim Peters -@@ -88,7 +88,7 @@ - Python 3.1, Python (on most systems) is now able to choose the shortest of - these and simply display ``0.1``. - --Note that this is in the very nature of binary floating-point: this is not a bug -+Note that this is in the very nature of binary floating point: this is not a bug - in Python, and it is not a bug in your code either. You'll see the same kind of - thing in all languages that support your hardware's floating-point arithmetic - (although some languages may not *display* the difference by default, or in all -@@ -148,13 +148,13 @@ - with "0.1" is explained in precise detail below, in the "Representation Error" - section. See `Examples of Floating Point Problems - `_ for --a pleasant summary of how binary floating-point works and the kinds of -+a pleasant summary of how binary floating point works and the kinds of - problems commonly encountered in practice. Also see - `The Perils of Floating Point `_ - for a more complete account of other common surprises. - - As that says near the end, "there are no easy answers." Still, don't be unduly --wary of floating-point! The errors in Python float operations are inherited -+wary of floating point! The errors in Python float operations are inherited - from the floating-point hardware, and on most machines are on the order of no - more than 1 part in 2\*\*53 per operation. That's more than adequate for most - tasks, but you do need to keep in mind that it's not decimal arithmetic and -@@ -230,7 +230,7 @@ - >>> sum([0.1] * 10) == 1.0 - True - --The :func:`math.fsum()` goes further and tracks all of the "lost digits" -+The :func:`math.fsum` goes further and tracks all of the "lost digits" - as values are added onto a running total so that the result has only a - single rounding. This is slower than :func:`sum` but will be more - accurate in uncommon cases where large magnitude inputs mostly cancel -diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst -index 857068a51ab..2e6fd419b21 100644 ---- a/Doc/tutorial/inputoutput.rst -+++ b/Doc/tutorial/inputoutput.rst -@@ -87,12 +87,12 @@ - >>> print(s) - The value of x is 32.5, and y is 40000... - >>> # The repr() of a string adds string quotes and backslashes: -- ... hello = 'hello, world\n' -+ >>> hello = 'hello, world\n' - >>> hellos = repr(hello) - >>> print(hellos) - 'hello, world\n' - >>> # The argument to repr() may be any Python object: -- ... repr((x, y, ('spam', 'eggs'))) -+ >>> repr((x, y, ('spam', 'eggs'))) - "(32.5, 40000, ('spam', 'eggs'))" - - The :mod:`string` module contains a :class:`~string.Template` class that offers -@@ -279,9 +279,11 @@ - Old string formatting - --------------------- - --The % operator (modulo) can also be used for string formatting. Given ``'string' --% values``, instances of ``%`` in ``string`` are replaced with zero or more --elements of ``values``. This operation is commonly known as string -+The % operator (modulo) can also be used for string formatting. -+Given ``format % values`` (where *format* is a string), -+``%`` conversion specifications in *format* are replaced with -+zero or more elements of *values*. -+This operation is commonly known as string - interpolation. For example:: - - >>> import math -diff --git a/Doc/tutorial/introduction.rst b/Doc/tutorial/introduction.rst -index 0f16dae8b14..054bac59c95 100644 ---- a/Doc/tutorial/introduction.rst -+++ b/Doc/tutorial/introduction.rst -@@ -62,7 +62,7 @@ - 20 - >>> (50 - 5*6) / 4 - 5.0 -- >>> 8 / 5 # division always returns a floating point number -+ >>> 8 / 5 # division always returns a floating-point number - 1.6 - - The integer numbers (e.g. ``2``, ``4``, ``20``) have type :class:`int`, -@@ -501,8 +501,8 @@ - as follows:: - - >>> # Fibonacci series: -- ... # the sum of two elements defines the next -- ... a, b = 0, 1 -+ >>> # the sum of two elements defines the next -+ >>> a, b = 0, 1 - >>> while a < 10: - ... print(a) - ... a, b = b, a+b -@@ -544,7 +544,7 @@ - * The :func:`print` function writes the value of the argument(s) it is given. - It differs from just writing the expression you want to write (as we did - earlier in the calculator examples) in the way it handles multiple arguments, -- floating point quantities, and strings. Strings are printed without quotes, -+ floating-point quantities, and strings. Strings are printed without quotes, - and a space is inserted between items, so you can format things nicely, like - this:: - -diff --git a/Doc/tutorial/stdlib.rst b/Doc/tutorial/stdlib.rst -index 6bae279c5e9..ca9dadb4cb8 100644 ---- a/Doc/tutorial/stdlib.rst -+++ b/Doc/tutorial/stdlib.rst -@@ -138,7 +138,7 @@ - =========== - - The :mod:`math` module gives access to the underlying C library functions for --floating point math:: -+floating-point math:: - - >>> import math - >>> math.cos(math.pi / 4) -diff --git a/Doc/tutorial/stdlib2.rst b/Doc/tutorial/stdlib2.rst -index 33f311db3a2..b6d36365a5b 100644 ---- a/Doc/tutorial/stdlib2.rst -+++ b/Doc/tutorial/stdlib2.rst -@@ -293,7 +293,7 @@ - sometimes there is a need for alternative implementations with different - performance trade-offs. - --The :mod:`array` module provides an :class:`~array.array()` object that is like -+The :mod:`array` module provides an :class:`~array.array` object that is like - a list that stores only homogeneous data and stores it more compactly. The - following example shows an array of numbers stored as two byte unsigned binary - numbers (typecode ``"H"``) rather than the usual 16 bytes per entry for regular -@@ -306,7 +306,7 @@ - >>> a[1:3] - array('H', [10, 700]) - --The :mod:`collections` module provides a :class:`~collections.deque()` object -+The :mod:`collections` module provides a :class:`~collections.deque` object - that is like a list with faster appends and pops from the left side but slower - lookups in the middle. These objects are well suited for implementing queues - and breadth first tree searches:: -@@ -352,11 +352,11 @@ - - .. _tut-decimal-fp: - --Decimal Floating Point Arithmetic -+Decimal Floating-Point Arithmetic - ================================= - - The :mod:`decimal` module offers a :class:`~decimal.Decimal` datatype for --decimal floating point arithmetic. Compared to the built-in :class:`float` -+decimal floating-point arithmetic. Compared to the built-in :class:`float` - implementation of binary floating point, the class is especially helpful for - - * financial applications and other uses which require exact decimal -diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst -index 82fbc82b5da..50f6eb8cdfb 100644 ---- a/Doc/using/cmdline.rst -+++ b/Doc/using/cmdline.rst -@@ -24,7 +24,7 @@ - - When invoking Python, you may specify any of these options:: - -- python [-bBdEhiIOqsSuvVWx?] [-c command | -m module-name | script | - ] [args] -+ python [-bBdEhiIOPqRsSuvVWx?] [-c command | -m module-name | script | - ] [args] - - The most common use case is, of course, a simple invocation of a script:: - -@@ -440,6 +440,7 @@ - -Wdefault # Warn once per call location - -Werror # Convert to exceptions - -Walways # Warn every time -+ -Wall # Same as -Walways - -Wmodule # Warn once per calling module - -Wonce # Warn once per Python process - -Wignore # Never warn -@@ -719,6 +720,11 @@ - This variable can also be modified by Python code using :data:`os.environ` - to force inspect mode on program termination. - -+ .. audit-event:: cpython.run_stdin "" "" -+ -+ .. versionchanged:: 3.12.5 (also 3.11.10, 3.10.15, 3.9.20, and 3.8.20) -+ Emits audit events. -+ - - .. envvar:: PYTHONUNBUFFERED - -@@ -842,6 +848,7 @@ - PYTHONWARNINGS=default # Warn once per call location - PYTHONWARNINGS=error # Convert to exceptions - PYTHONWARNINGS=always # Warn every time -+ PYTHONWARNINGS=all # Same as PYTHONWARNINGS=always - PYTHONWARNINGS=module # Warn once per calling module - PYTHONWARNINGS=once # Warn once per Python process - PYTHONWARNINGS=ignore # Never warn -@@ -943,7 +950,7 @@ - 'surrogatepass' are used. - - This may also be enabled at runtime with -- :func:`sys._enablelegacywindowsfsencoding()`. -+ :func:`sys._enablelegacywindowsfsencoding`. - - .. availability:: Windows. - diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst -index 0e605b38d42..78ff18683a2 100644 +index 51af4e3b7ad..78ff18683a2 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst -@@ -14,8 +14,8 @@ - `_ - are not required. - --* Support for `IEEE 754 `_ floating -- point numbers and `floating point Not-a-Number (NaN) -+* Support for `IEEE 754 `_ -+ floating-point numbers and `floating-point Not-a-Number (NaN) - `_. - - * Support for threads. @@ -638,7 +638,7 @@ macOS Options ------------- @@ -13297,1148 +1330,6 @@ index 31d37aad2a7..44fb00de373 100644 Other Resources =============== -diff --git a/Doc/whatsnew/2.1.rst b/Doc/whatsnew/2.1.rst -index b4002f06e92..8eafb48461a 100644 ---- a/Doc/whatsnew/2.1.rst -+++ b/Doc/whatsnew/2.1.rst -@@ -644,9 +644,9 @@ - lists the function arguments and the local variables for each frame. - - * Various functions in the :mod:`time` module, such as :func:`~time.asctime` and -- :func:`~time.localtime`, require a floating point argument containing the time in -+ :func:`~time.localtime`, require a floating-point argument containing the time in - seconds since the epoch. The most common use of these functions is to work with -- the current time, so the floating point argument has been made optional; when a -+ the current time, so the floating-point argument has been made optional; when a - value isn't provided, the current time will be used. For example, log file - entries usually need a string containing the current time; in Python 2.1, - ``time.asctime()`` can be used, instead of the lengthier -diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst -index d4dbe0570fb..5db34fa08c6 100644 ---- a/Doc/whatsnew/2.2.rst -+++ b/Doc/whatsnew/2.2.rst -@@ -1249,7 +1249,7 @@ - - * The :func:`pow` built-in function no longer supports 3 arguments when - floating-point numbers are supplied. ``pow(x, y, z)`` returns ``(x**y) % z``, -- but this is never useful for floating point numbers, and the final result varies -+ but this is never useful for floating-point numbers, and the final result varies - unpredictably depending on the platform. A call such as ``pow(2.0, 8.0, 7.0)`` - will now raise a :exc:`TypeError` exception. - -diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst -index e2f1bbf7a29..f6350967a8d 100644 ---- a/Doc/whatsnew/2.3.rst -+++ b/Doc/whatsnew/2.3.rst -@@ -1382,7 +1382,7 @@ - In Python 2.4, the default will change to always returning floats. - - Application developers should enable this feature only if all their libraries -- work properly when confronted with floating point time stamps, or if they use -+ work properly when confronted with floating-point time stamps, or if they use - the tuple API. If used, the feature should be activated on an application level - instead of trying to enable it on a per-use basis. - -diff --git a/Doc/whatsnew/2.5.rst b/Doc/whatsnew/2.5.rst -index 6aa3e459f91..802c3a90519 100644 ---- a/Doc/whatsnew/2.5.rst -+++ b/Doc/whatsnew/2.5.rst -@@ -1724,7 +1724,7 @@ - :mod:`ctypes` also provides a wrapper for Python's C API as the - ``ctypes.pythonapi`` object. This object does *not* release the global - interpreter lock before calling a function, because the lock must be held when --calling into the interpreter's code. There's a :class:`py_object()` type -+calling into the interpreter's code. There's a :class:`~ctypes.py_object` type - constructor that will create a :c:expr:`PyObject *` pointer. A simple usage:: - - import ctypes -@@ -1734,7 +1734,7 @@ - ctypes.py_object("abc"), ctypes.py_object(1)) - # d is now {'abc', 1}. - --Don't forget to use :class:`py_object()`; if it's omitted you end up with a -+Don't forget to use :func:`~ctypes.py_object`; if it's omitted you end up with a - segmentation fault. - - :mod:`ctypes` has been around for a while, but people still write and -diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst -index d2a82d5c45e..7e575e82523 100644 ---- a/Doc/whatsnew/2.6.rst -+++ b/Doc/whatsnew/2.6.rst -@@ -1453,7 +1453,7 @@ - Fraction(5, 3) - - For converting floating-point numbers to rationals, --the float type now has an :meth:`as_integer_ratio()` method that returns -+the float type now has an :meth:`as_integer_ratio` method that returns - the numerator and denominator for a fraction that evaluates to the same - floating-point value:: - -@@ -2273,7 +2273,7 @@ - (Contributed by Guido van Rossum from work for Google App Engine; - :issue:`3487`.) - --* The :mod:`rlcompleter` module's :meth:`Completer.complete()` method -+* The :mod:`rlcompleter` module's :meth:`Completer.complete` method - will now ignore exceptions triggered while evaluating a name. - (Fixed by Lorenz Quack; :issue:`2250`.) - -@@ -2566,7 +2566,7 @@ - :meth:`tracer`, and :meth:`speed` methods. - * The ability to set new shapes for the turtle, and to - define a new coordinate system. -- * Turtles now have an :meth:`undo()` method that can roll back actions. -+ * Turtles now have an :meth:`undo` method that can roll back actions. - * Simple support for reacting to input events such as mouse and keyboard - activity, making it possible to write simple games. - * A :file:`turtle.cfg` file can be used to customize the starting appearance -@@ -3051,7 +3051,7 @@ - - * Several functions return information about the platform's - floating-point support. :c:func:`PyFloat_GetMax` returns -- the maximum representable floating point value, -+ the maximum representable floating-point value, - and :c:func:`PyFloat_GetMin` returns the minimum - positive value. :c:func:`PyFloat_GetInfo` returns an object - containing more information from the :file:`float.h` file, such as -diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst -index 585c704af1f..86ce2224250 100644 ---- a/Doc/whatsnew/2.7.rst -+++ b/Doc/whatsnew/2.7.rst -@@ -291,7 +291,7 @@ - configuration files can now be read, modified, and then written back - in their original order. - --* The :meth:`~collections.somenamedtuple._asdict()` method for -+* The :meth:`~collections.somenamedtuple._asdict` method for - :func:`collections.namedtuple` now returns an ordered dictionary with the - values appearing in the same order as the underlying tuple indices. - -@@ -1198,7 +1198,7 @@ - of the operands. Previously such comparisons would fall back to - Python's default rules for comparing objects, which produced arbitrary - results based on their type. Note that you still cannot combine -- :class:`!Decimal` and floating-point in other operations such as addition, -+ :class:`!Decimal` and floating point in other operations such as addition, - since you should be explicitly choosing how to convert between float and - :class:`!Decimal`. (Fixed by Mark Dickinson; :issue:`2531`.) - -diff --git a/Doc/whatsnew/3.1.rst b/Doc/whatsnew/3.1.rst -index 7ecc34abb7c..fd427421ebf 100644 ---- a/Doc/whatsnew/3.1.rst -+++ b/Doc/whatsnew/3.1.rst -@@ -205,9 +205,9 @@ - - (Contributed by Mark Dickinson; :issue:`4707`.) - --* Python now uses David Gay's algorithm for finding the shortest floating -- point representation that doesn't change its value. This should help -- mitigate some of the confusion surrounding binary floating point -+* Python now uses David Gay's algorithm for finding the shortest floating-point -+ representation that doesn't change its value. This should help -+ mitigate some of the confusion surrounding binary floating-point - numbers. - - The significance is easily seen with a number like ``1.1`` which does not -@@ -215,7 +215,7 @@ - equivalent, an expression like ``float('1.1')`` evaluates to the nearest - representable value which is ``0x1.199999999999ap+0`` in hex or - ``1.100000000000000088817841970012523233890533447265625`` in decimal. That -- nearest value was and still is used in subsequent floating point -+ nearest value was and still is used in subsequent floating-point - calculations. - - What is new is how the number gets displayed. Formerly, Python used a -@@ -224,7 +224,7 @@ - using 17 digits was that it relied on IEEE-754 guarantees to assure that - ``eval(repr(1.1))`` would round-trip exactly to its original value. The - disadvantage is that many people found the output to be confusing (mistaking -- intrinsic limitations of binary floating point representation as being a -+ intrinsic limitations of binary floating-point representation as being a - problem with Python itself). - - The new algorithm for ``repr(1.1)`` is smarter and returns ``'1.1'``. -@@ -236,8 +236,8 @@ - it does not change the underlying values. So, it is still the case that - ``1.1 + 2.2 != 3.3`` even though the representations may suggest otherwise. - -- The new algorithm depends on certain features in the underlying floating -- point implementation. If the required features are not found, the old -+ The new algorithm depends on certain features in the underlying floating-point -+ implementation. If the required features are not found, the old - algorithm will continue to be used. Also, the text pickle protocols - assure cross-platform portability by using the old algorithm. - -@@ -550,7 +550,7 @@ - This section lists previously described changes and other bugfixes - that may require changes to your code: - --* The new floating point string representations can break existing doctests. -+* The new floating-point string representations can break existing doctests. - For example:: - - def e(): -diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst -index 30b39aad86a..9c647972d3a 100644 ---- a/Doc/whatsnew/3.10.rst -+++ b/Doc/whatsnew/3.10.rst -@@ -1233,7 +1233,7 @@ - itertools - --------- - --Add :func:`itertools.pairwise()`. -+Add :func:`itertools.pairwise`. - (Contributed by Raymond Hettinger in :issue:`38200`.) - - linecache -@@ -1245,14 +1245,14 @@ - os - -- - --Add :func:`os.cpu_count()` support for VxWorks RTOS. -+Add :func:`os.cpu_count` support for VxWorks RTOS. - (Contributed by Peixing Xin in :issue:`41440`.) - - Add a new function :func:`os.eventfd` and related helpers to wrap the - ``eventfd2`` syscall on Linux. - (Contributed by Christian Heimes in :issue:`41001`.) - --Add :func:`os.splice()` that allows to move data between two file -+Add :func:`os.splice` that allows to move data between two file - descriptors without copying between kernel address space and user - address space, where one of the file descriptors must refer to a - pipe. (Contributed by Pablo Galindo in :issue:`41625`.) -@@ -1292,7 +1292,7 @@ - platform - -------- - --Add :func:`platform.freedesktop_os_release()` to retrieve operation system -+Add :func:`platform.freedesktop_os_release` to retrieve operation system - identification from `freedesktop.org os-release - `_ standard file. - (Contributed by Christian Heimes in :issue:`28468`.) -diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst -index 5e272478ce8..84805f93ca7 100644 ---- a/Doc/whatsnew/3.11.rst -+++ b/Doc/whatsnew/3.11.rst -@@ -768,6 +768,21 @@ - (Contributed by Yurii Karabas in :issue:`46014`.) - - -+.. _whatsnew311-gzip: -+ -+gzip -+---- -+ -+* The :func:`gzip.compress` function is now faster when used with the -+ **mtime=0** argument as it delegates the compression entirely to a single -+ :func:`zlib.compress` operation. There is one side effect of this change: The -+ gzip file header contains an "OS" byte in its header. That was traditionally -+ always set to a value of 255 representing "unknown" by the :mod:`gzip` -+ module. Now, when using :func:`~gzip.compress` with **mtime=0**, it may be -+ set to a different value by the underlying zlib C library Python was linked -+ against. -+ (See :gh:`112346` for details on the side effect.) -+ - .. _whatsnew311-hashlib: - - hashlib -@@ -2017,8 +2032,8 @@ - It was introduced in Python 3.4 but has been broken since Python 3.7. - (Contributed by Inada Naoki in :issue:`23882`.) - --* Removed the undocumented private :meth:`!float.__set_format__()` method, -- previously known as :meth:`!float.__setformat__()` in Python 3.7. -+* Removed the undocumented private :meth:`!float.__set_format__` method, -+ previously known as :meth:`!float.__setformat__` in Python 3.7. - Its docstring said: "You probably don't want to use this function. - It exists mainly to be used in Python's test suite." - (Contributed by Victor Stinner in :issue:`46852`.) -@@ -2123,7 +2138,7 @@ - :issue:`45440` and :issue:`46640`.) - - * Support for `IEEE 754 `_ -- floating point numbers. -+ floating-point numbers. - (Contributed by Victor Stinner in :issue:`46917`.) - - * The :c:macro:`!Py_NO_NAN` macro has been removed. -diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst -index 6ba04c6227b..3ecfdd873fb 100644 ---- a/Doc/whatsnew/3.12.rst -+++ b/Doc/whatsnew/3.12.rst -@@ -739,7 +739,7 @@ - itertools - --------- - --* Add :class:`itertools.batched()` for collecting into even-sized -+* Add :func:`itertools.batched` for collecting into even-sized - tuples where the last batch may be shorter than the rest. - (Contributed by Raymond Hettinger in :gh:`98363`.) - -@@ -1330,7 +1330,7 @@ - (Contributed by Brett Cannon in :gh:`65961`.) - - * The bitwise inversion operator (``~``) on bool is deprecated. It will throw an -- error in Python 3.14. Use ``not`` for logical negation of bools instead. -+ error in Python 3.16. Use ``not`` for logical negation of bools instead. - In the rare case that you really need the bitwise inversion of the underlying - ``int``, convert to int explicitly: ``~int(x)``. (Contributed by Tim Hoffmann - in :gh:`103487`.) -@@ -1341,152 +1341,15 @@ - therefore it will be removed in 3.14. - (Contributed by Nikita Sobolev in :gh:`101866`.) - --Pending Removal in Python 3.13 -------------------------------- -- --The following modules and APIs have been deprecated in earlier Python releases, --and will be removed in Python 3.13. -- --Modules (see :pep:`594`): -- --* :mod:`aifc` --* :mod:`audioop` --* :mod:`cgi` --* :mod:`cgitb` --* :mod:`chunk` --* :mod:`crypt` --* :mod:`imghdr` --* :mod:`mailcap` --* :mod:`msilib` --* :mod:`nis` --* :mod:`nntplib` --* :mod:`ossaudiodev` --* :mod:`pipes` --* :mod:`sndhdr` --* :mod:`spwd` --* :mod:`sunau` --* :mod:`telnetlib` --* :mod:`uu` --* :mod:`xdrlib` -- --Other modules: -- --* :mod:`!lib2to3`, and the :program:`2to3` program (:gh:`84540`) -- --APIs: -- --* :class:`!configparser.LegacyInterpolation` (:gh:`90765`) --* ``locale.resetlocale()`` (:gh:`90817`) --* :meth:`!turtle.RawTurtle.settiltangle` (:gh:`50096`) --* :func:`!unittest.findTestCases` (:gh:`50096`) --* :func:`!unittest.getTestCaseNames` (:gh:`50096`) --* :func:`!unittest.makeSuite` (:gh:`50096`) --* :meth:`!unittest.TestProgram.usageExit` (:gh:`67048`) --* :class:`!webbrowser.MacOSX` (:gh:`86421`) --* :class:`classmethod` descriptor chaining (:gh:`89519`) --* :mod:`importlib.resources` deprecated methods: -- -- * ``contents()`` -- * ``is_resource()`` -- * ``open_binary()`` -- * ``open_text()`` -- * ``path()`` -- * ``read_binary()`` -- * ``read_text()`` -- -- Use :func:`importlib.resources.files()` instead. Refer to `importlib-resources: Migrating from Legacy -- `_ (:gh:`106531`) -- --Pending Removal in Python 3.14 -------------------------------- -- --The following APIs have been deprecated --and will be removed in Python 3.14. -- --* :mod:`argparse`: The *type*, *choices*, and *metavar* parameters -- of :class:`!argparse.BooleanOptionalAction` -- --* :mod:`ast`: -- -- * :class:`!ast.Num` -- * :class:`!ast.Str` -- * :class:`!ast.Bytes` -- * :class:`!ast.NameConstant` -- * :class:`!ast.Ellipsis` -- --* :mod:`asyncio`: -- -- * :class:`!asyncio.MultiLoopChildWatcher` -- * :class:`!asyncio.FastChildWatcher` -- * :class:`!asyncio.AbstractChildWatcher` -- * :class:`!asyncio.SafeChildWatcher` -- * :func:`!asyncio.set_child_watcher` -- * :func:`!asyncio.get_child_watcher`, -- * :meth:`!asyncio.AbstractEventLoopPolicy.set_child_watcher` -- * :meth:`!asyncio.AbstractEventLoopPolicy.get_child_watcher` -- --* :mod:`collections.abc`: :class:`!collections.abc.ByteString`. -- --* :mod:`email`: the *isdst* parameter in :func:`email.utils.localtime`. -- --* :mod:`importlib.abc`: -- -- * :class:`!importlib.abc.ResourceReader` -- * :class:`!importlib.abc.Traversable` -- * :class:`!importlib.abc.TraversableResources` -- --* :mod:`itertools`: Support for copy, deepcopy, and pickle operations. -- --* :mod:`pkgutil`: -- -- * :func:`!pkgutil.find_loader` -- * :func:`!pkgutil.get_loader`. -- --* :mod:`pty`: -- -- * :func:`!pty.master_open` -- * :func:`!pty.slave_open` -- --* :mod:`shutil`: The *onerror* argument of :func:`shutil.rmtree` -- --* :mod:`typing`: :class:`!typing.ByteString` -- --* :mod:`xml.etree.ElementTree`: Testing the truth value of an :class:`xml.etree.ElementTree.Element`. -- --* The ``__package__`` and ``__cached__`` attributes on module objects. -- --* The :attr:`~codeobject.co_lnotab` attribute of code objects. -- --Pending Removal in Python 3.15 -------------------------------- -- --The following APIs have been deprecated --and will be removed in Python 3.15. -- --APIs: -- --* :func:`locale.getdefaultlocale` (:gh:`90817`) -- -- --Pending Removal in Future Versions ------------------------------------ -- --The following APIs were deprecated in earlier Python versions and will be removed, --although there is currently no date scheduled for their removal. -+.. include:: ../deprecations/pending-removal-in-3.13.rst - --* :mod:`array`'s ``'u'`` format code (:gh:`57281`) -+.. include:: ../deprecations/pending-removal-in-3.14.rst - --* :class:`typing.Text` (:gh:`92332`) -+.. include:: ../deprecations/pending-removal-in-3.15.rst - --* Currently Python accepts numeric literals immediately followed by keywords, -- for example ``0in x``, ``1or x``, ``0if 1else 2``. It allows confusing -- and ambiguous expressions like ``[0x1for x in y]`` (which can be -- interpreted as ``[0x1 for x in y]`` or ``[0x1f or x in y]``). -- A syntax warning is raised if the numeric literal is -- immediately followed by one of keywords :keyword:`and`, :keyword:`else`, -- :keyword:`for`, :keyword:`if`, :keyword:`in`, :keyword:`is` and :keyword:`or`. -- In a future release it will be changed to a syntax error. (:gh:`87999`) -+.. include:: ../deprecations/pending-removal-in-3.16.rst - -+.. include:: ../deprecations/pending-removal-in-future.rst - - Removed - ======= -@@ -1570,9 +1433,9 @@ - ------- - - * Remove the pure Python implementation of :mod:`hashlib`'s -- :func:`hashlib.pbkdf2_hmac()`, deprecated in Python 3.10. Python 3.10 and -+ :func:`hashlib.pbkdf2_hmac`, deprecated in Python 3.10. Python 3.10 and - newer requires OpenSSL 1.1.1 (:pep:`644`): this OpenSSL version provides -- a C implementation of :func:`~hashlib.pbkdf2_hmac()` which is faster. -+ a C implementation of :func:`~hashlib.pbkdf2_hmac` which is faster. - (Contributed by Victor Stinner in :gh:`94199`.) - - importlib -@@ -1581,7 +1444,7 @@ - * Many previously deprecated cleanups in :mod:`importlib` have now been - completed: - -- * References to, and support for :meth:`!module_repr()` has been removed. -+ * References to, and support for :meth:`!module_repr` has been removed. - (Contributed by Barry Warsaw in :gh:`97850`.) - - * ``importlib.util.set_package``, ``importlib.util.set_loader`` and -@@ -2358,92 +2221,13 @@ - overrides :c:member:`~PyTypeObject.tp_new` is deprecated. - Call the metaclass instead. - --Pending Removal in Python 3.14 --^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -+.. Add deprecations above alphabetically, not here at the end. - --* The ``ma_version_tag`` field in :c:type:`PyDictObject` for extension modules -- (:pep:`699`; :gh:`101193`). -+.. include:: ../deprecations/c-api-pending-removal-in-3.14.rst - --* Global configuration variables: -+.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst - -- * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug` -- * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose` -- * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet` -- * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive` -- * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect` -- * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level` -- * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import` -- * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning` -- * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings` -- * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment` -- * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode` -- * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory` -- * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio` -- * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed` -- and :c:member:`PyConfig.hash_seed` -- * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated` -- * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding` -- * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio` -- * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` -- * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` -- * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` -- * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` (see :c:func:`Py_PreInitialize`) -- -- The :c:func:`Py_InitializeFromConfig` API should be used with -- :c:type:`PyConfig` instead. -- --* Creating :c:data:`immutable types ` with mutable -- bases (:gh:`95388`). -- --Pending Removal in Python 3.15 --^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -- --* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule` --* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` --* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` --* Python initialization functions: -- -- * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and -- :data:`!warnings.filters` -- * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` -- * :c:func:`Py_GetPath`: get :data:`sys.path` -- * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` -- * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` -- * :c:func:`Py_GetProgramName`: get :data:`sys.executable` -- * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or -- the :envvar:`PYTHONHOME` environment variable -- --Pending Removal in Future Versions --^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -- --The following APIs are deprecated and will be removed, --although there is currently no date scheduled for their removal. -- --* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: unneeded since Python 3.8 --* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException` --* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException` --* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException` --* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject` --* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child` --* :c:func:`PySlice_GetIndicesEx`: use :c:func:`PySlice_Unpack` and :c:func:`PySlice_AdjustIndices` --* :c:func:`!PyUnicode_AsDecodedObject`: use :c:func:`PyCodec_Decode` --* :c:func:`!PyUnicode_AsDecodedUnicode`: use :c:func:`PyCodec_Decode` --* :c:func:`!PyUnicode_AsEncodedObject`: use :c:func:`PyCodec_Encode` --* :c:func:`!PyUnicode_AsEncodedUnicode`: use :c:func:`PyCodec_Encode` --* :c:func:`PyUnicode_READY`: unneeded since Python 3.12 --* :c:func:`!PyErr_Display`: use :c:func:`PyErr_DisplayException` --* :c:func:`!_PyErr_ChainExceptions`: use ``_PyErr_ChainExceptions1`` --* :c:member:`!PyBytesObject.ob_shash` member: -- call :c:func:`PyObject_Hash` instead --* :c:member:`!PyDictObject.ma_version_tag` member --* Thread Local Storage (TLS) API: -- -- * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc` -- * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free` -- * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set` -- * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get` -- * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete` -- * :c:func:`PyThread_ReInitTLS`: unneeded since Python 3.7 -+.. include:: ../deprecations/c-api-pending-removal-in-future.rst - - Removed - ------- -@@ -2476,3 +2260,37 @@ - - * Fixed ``is_global`` and ``is_private`` behavior in ``IPv4Address``, - ``IPv6Address``, ``IPv4Network`` and ``IPv6Network``. -+ -+ -+Notable changes in 3.12.5 -+========================= -+ -+email -+----- -+ -+ -+* Headers with embedded newlines are now quoted on output. -+ -+ The :mod:`~email.generator` will now refuse to serialize (write) headers -+ that are improperly folded or delimited, such that they would be parsed as -+ multiple headers or joined with adjacent data. -+ If you need to turn this safety feature off, -+ set :attr:`~email.policy.Policy.verify_generated_headers`. -+ (Contributed by Bas Bloemsaat and Petr Viktorin in :gh:`121650`.) -+ -+ -+Notable changes in 3.12.6 -+========================= -+ -+email -+----- -+ -+* :func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now return -+ ``('', '')`` 2-tuples in more situations where invalid email addresses are -+ encountered, instead of potentially inaccurate values. -+ An optional *strict* parameter was added to these two functions: -+ use ``strict=False`` to get the old behavior, accepting malformed inputs. -+ ``getattr(email.utils, 'supports_strict_parsing', False)`` can be used to -+ check if the *strict* paramater is available. -+ (Contributed by Thomas Dwyer and Victor Stinner for :gh:`102988` to improve -+ the CVE-2023-27043 fix.) -diff --git a/Doc/whatsnew/3.2.rst b/Doc/whatsnew/3.2.rst -index 1300b759eca..e75066bcf2e 100644 ---- a/Doc/whatsnew/3.2.rst -+++ b/Doc/whatsnew/3.2.rst -@@ -531,7 +531,7 @@ - - (Proposed and implemented by Mark Dickinson; :issue:`9337`.) - --* :class:`memoryview` objects now have a :meth:`~memoryview.release()` method -+* :class:`memoryview` objects now have a :meth:`~memoryview.release` method - and they also now support the context management protocol. This allows timely - release of any resources that were acquired when requesting a buffer from the - original object. -@@ -1312,7 +1312,7 @@ - been relaxed. It is still unsupported (and ill-advised) to have implicit - mixing in arithmetic expressions such as ``Decimal('1.1') + float('1.1')`` - because the latter loses information in the process of constructing the binary --float. However, since existing floating point value can be converted losslessly -+float. However, since existing floating-point value can be converted losslessly - to either a decimal or rational representation, it makes sense to add them to - the constructor and to support mixed-type comparisons. - -@@ -1325,7 +1325,7 @@ - and :class:`fractions.Fraction` (:issue:`2531` and :issue:`8188`). - - Similar changes were made to :class:`fractions.Fraction` so that the --:meth:`~fractions.Fraction.from_float()` and :meth:`~fractions.Fraction.from_decimal` -+:meth:`~fractions.Fraction.from_float` and :meth:`~fractions.Fraction.from_decimal` - methods are no longer needed (:issue:`8294`): - - >>> from decimal import Decimal -@@ -1622,7 +1622,7 @@ - - The :mod:`socket` module has two new improvements. - --* Socket objects now have a :meth:`~socket.socket.detach()` method which puts -+* Socket objects now have a :meth:`~socket.socket.detach` method which puts - the socket into closed state without actually closing the underlying file - descriptor. The latter can then be reused for other purposes. - (Added by Antoine Pitrou; :issue:`8524`.) -@@ -1859,11 +1859,11 @@ - -------- - - :class:`!asyncore.dispatcher` now provides a --:meth:`!handle_accepted()` method -+:meth:`!handle_accepted` method - returning a ``(sock, addr)`` pair which is called when a connection has actually - been established with a new remote endpoint. This is supposed to be used as a --replacement for old :meth:`!handle_accept()` and avoids --the user to call :meth:`!accept()` directly. -+replacement for old :meth:`!handle_accept` and avoids -+the user to call :meth:`!accept` directly. - - (Contributed by Giampaolo Rodolà; :issue:`6706`.) - -@@ -2321,7 +2321,7 @@ - intervals and reduced overhead due to lock contention and the number of - ensuing system calls. The notion of a "check interval" to allow thread - switches has been abandoned and replaced by an absolute duration expressed in -- seconds. This parameter is tunable through :func:`sys.setswitchinterval()`. -+ seconds. This parameter is tunable through :func:`sys.setswitchinterval`. - It currently defaults to 5 milliseconds. - - Additional details about the implementation can be read from a `python-dev -diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst -index 0f810c17cb9..de19c6c12de 100644 ---- a/Doc/whatsnew/3.3.rst -+++ b/Doc/whatsnew/3.3.rst -@@ -779,8 +779,8 @@ - Some smaller changes made to the core Python language are: - - * Added support for Unicode name aliases and named sequences. -- Both :func:`unicodedata.lookup()` and ``'\N{...}'`` now resolve name aliases, -- and :func:`unicodedata.lookup()` resolves named sequences too. -+ Both :func:`unicodedata.lookup` and ``'\N{...}'`` now resolve name aliases, -+ and :func:`unicodedata.lookup` resolves named sequences too. - - (Contributed by Ezio Melotti in :issue:`12753`.) - -@@ -1097,12 +1097,12 @@ - C-module and libmpdec written by Stefan Krah. - - The new C version of the decimal module integrates the high speed libmpdec --library for arbitrary precision correctly rounded decimal floating point -+library for arbitrary precision correctly rounded decimal floating-point - arithmetic. libmpdec conforms to IBM's General Decimal Arithmetic Specification. - - Performance gains range from 10x for database applications to 100x for - numerically intensive applications. These numbers are expected gains --for standard precisions used in decimal floating point arithmetic. Since -+for standard precisions used in decimal floating-point arithmetic. Since - the precision is user configurable, the exact figures may vary. For example, - in integer bignum arithmetic the differences can be significantly higher. - -diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst -index 33635c6db93..9aa04e84e48 100644 ---- a/Doc/whatsnew/3.4.rst -+++ b/Doc/whatsnew/3.4.rst -@@ -1495,7 +1495,7 @@ - stat - ---- - --The :mod:`stat` module is now backed by a C implementation in :mod:`_stat`. A C -+The :mod:`stat` module is now backed by a C implementation in :mod:`!_stat`. A C - implementation is required as most of the values aren't standardized and - are platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) - -@@ -1967,7 +1967,7 @@ - - * The ``-R`` option to the :ref:`python regression test suite ` now - also checks for memory allocation leaks, using -- :func:`sys.getallocatedblocks()`. (Contributed by Antoine Pitrou in -+ :func:`sys.getallocatedblocks`. (Contributed by Antoine Pitrou in - :issue:`13390`.) - - * ``python -m`` now works with namespace packages. -diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst -index 06bcd354338..ba1432625dd 100644 ---- a/Doc/whatsnew/3.5.rst -+++ b/Doc/whatsnew/3.5.rst -@@ -1667,7 +1667,7 @@ - accept a *decode_data* keyword argument to determine if the ``DATA`` portion of - the SMTP transaction is decoded using the ``"utf-8"`` codec or is instead - provided to the --:meth:`!SMTPServer.process_message()` -+:meth:`!SMTPServer.process_message` - method as a byte string. The default is ``True`` for backward compatibility - reasons, but will change to ``False`` in Python 3.6. If *decode_data* is set - to ``False``, the ``process_message`` method must be prepared to accept keyword -@@ -1677,14 +1677,14 @@ - The :class:`!SMTPServer` class now advertises the ``8BITMIME`` extension - (:rfc:`6152`) if *decode_data* has been set ``True``. If the client - specifies ``BODY=8BITMIME`` on the ``MAIL`` command, it is passed to --:meth:`!SMTPServer.process_message()` -+:meth:`!SMTPServer.process_message` - via the *mail_options* keyword. - (Contributed by Milan Oberkirch and R. David Murray in :issue:`21795`.) - - The :class:`!SMTPServer` class now also supports the ``SMTPUTF8`` - extension (:rfc:`6531`: Internationalized Email). If the client specified - ``SMTPUTF8 BODY=8BITMIME`` on the ``MAIL`` command, they are passed to --:meth:`!SMTPServer.process_message()` -+:meth:`!SMTPServer.process_message` - via the *mail_options* keyword. It is the responsibility of the - ``process_message`` method to correctly handle the ``SMTPUTF8`` data. - (Contributed by Milan Oberkirch in :issue:`21725`.) -@@ -1935,8 +1935,8 @@ - tkinter - ------- - --The :mod:`tkinter._fix` module used for setting up the Tcl/Tk environment --on Windows has been replaced by a private function in the :mod:`_tkinter` -+The :mod:`!tkinter._fix` module used for setting up the Tcl/Tk environment -+on Windows has been replaced by a private function in the :mod:`!_tkinter` - module which makes no permanent changes to environment variables. - (Contributed by Zachary Ware in :issue:`20035`.) - -@@ -2405,7 +2405,7 @@ - error-prone and has been removed in Python 3.5. See :issue:`13936` for full - details. - --* The :meth:`ssl.SSLSocket.send()` method now raises either -+* The :meth:`ssl.SSLSocket.send` method now raises either - :exc:`ssl.SSLWantReadError` or :exc:`ssl.SSLWantWriteError` - on a non-blocking socket if the operation would block. Previously, - it would return ``0``. (Contributed by Nikolaus Rath in :issue:`20951`.) -diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst -index e91e6dc11b9..cfd6d497d5a 100644 ---- a/Doc/whatsnew/3.6.rst -+++ b/Doc/whatsnew/3.6.rst -@@ -261,7 +261,7 @@ - - The :ref:`string formatting ` language also now has support - for the ``'_'`` option to signal the use of an underscore for a thousands --separator for floating point presentation types and for integer -+separator for floating-point presentation types and for integer - presentation type ``'d'``. For integer presentation types ``'b'``, - ``'o'``, ``'x'``, and ``'X'``, underscores will be inserted every 4 - digits:: -@@ -511,10 +511,10 @@ - Prior to Python 3.6, data loss could result when using bytes paths on Windows. - With this change, using bytes to represent paths is now supported on Windows, - provided those bytes are encoded with the encoding returned by --:func:`sys.getfilesystemencoding()`, which now defaults to ``'utf-8'``. -+:func:`sys.getfilesystemencoding`, which now defaults to ``'utf-8'``. - - Applications that do not use str to represent paths should use --:func:`os.fsencode()` and :func:`os.fsdecode()` to ensure their bytes are -+:func:`os.fsencode` and :func:`os.fsdecode` to ensure their bytes are - correctly encoded. To revert to the previous behaviour, set - :envvar:`PYTHONLEGACYWINDOWSFSENCODING` or call - :func:`sys._enablelegacywindowsfsencoding`. -@@ -780,7 +780,7 @@ - - Note that the pseudo-random generators in the :mod:`random` module - should *NOT* be used for security purposes. Use :mod:`secrets` -- on Python 3.6+ and :func:`os.urandom()` on Python 3.5 and earlier. -+ on Python 3.6+ and :func:`os.urandom` on Python 3.5 and earlier. - - .. seealso:: - -@@ -1316,7 +1316,7 @@ - pickletools - ----------- - --:func:`pickletools.dis()` now outputs the implicit memo index for the -+:func:`pickletools.dis` now outputs the implicit memo index for the - ``MEMOIZE`` opcode. - (Contributed by Serhiy Storchaka in :issue:`25382`.) - -diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst -index 71831fc697d..6806ac205b2 100644 ---- a/Doc/whatsnew/3.7.rst -+++ b/Doc/whatsnew/3.7.rst -@@ -339,7 +339,7 @@ - ------------------------------------------------------ - - The resolution of clocks in modern systems can exceed the limited precision --of a floating point number returned by the :func:`time.time` function -+of a floating-point number returned by the :func:`time.time` function - and its variants. To avoid loss of precision, :pep:`564` adds six new - "nanosecond" variants of the existing timer functions to the :mod:`time` - module: -@@ -603,7 +603,7 @@ - new ABC for access to, opening, and reading *resources* inside packages. - Resources are roughly similar to files inside packages, but they needn't - be actual files on the physical file system. Module loaders can provide a --:meth:`get_resource_reader()` function which returns -+:meth:`get_resource_reader` function which returns - a :class:`importlib.abc.ResourceReader` instance to support this - new API. Built-in file path loaders and zip file loaders both support this. - -@@ -2017,11 +2017,11 @@ - --------- - - Methods --:meth:`!MetaPathFinder.find_module()` -+:meth:`!MetaPathFinder.find_module` - (replaced by - :meth:`MetaPathFinder.find_spec() `) - and --:meth:`!PathEntryFinder.find_loader()` -+:meth:`!PathEntryFinder.find_loader` - (replaced by - :meth:`PathEntryFinder.find_spec() `) - both deprecated in Python 3.4 now emit :exc:`DeprecationWarning`. -@@ -2048,7 +2048,7 @@ - threading - --------- - --:mod:`dummy_threading` and :mod:`_dummy_thread` have been deprecated. It is -+:mod:`!dummy_threading` and :mod:`!_dummy_thread` have been deprecated. It is - no longer possible to build Python with threading disabled. - Use :mod:`threading` instead. - (Contributed by Antoine Pitrou in :issue:`31370`.) -@@ -2184,7 +2184,7 @@ - ``socket.socketpair`` on Python 3.5 and newer. - - * :mod:`asyncio` no longer exports the :mod:`selectors` and -- :mod:`_overlapped` modules as ``asyncio.selectors`` and -+ :mod:`!_overlapped` modules as ``asyncio.selectors`` and - ``asyncio._overlapped``. Replace ``from asyncio import selectors`` with - ``import selectors``. - -@@ -2366,7 +2366,7 @@ - positions 2--3. To match only blank lines, the pattern should be rewritten - as ``r'(?m)^[^\S\n]*$'``. - -- :func:`re.sub()` now replaces empty matches adjacent to a previous -+ :func:`re.sub` now replaces empty matches adjacent to a previous - non-empty match. For example ``re.sub('x*', '-', 'abxd')`` returns now - ``'-a-b--d-'`` instead of ``'-a-b-d-'`` (the first minus between 'b' and - 'd' replaces 'x', and the second minus replaces an empty string between -@@ -2425,7 +2425,7 @@ - to :meth:`ArgumentParser.add_subparsers() `. - (Contributed by Anthony Sottile in :issue:`26510`.) - --* :meth:`ast.literal_eval()` is now stricter. Addition and subtraction of -+* :meth:`ast.literal_eval` is now stricter. Addition and subtraction of - arbitrary numbers are no longer allowed. - (Contributed by Serhiy Storchaka in :issue:`31778`.) - -diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst -index ae049a31014..9f1408de8e5 100644 ---- a/Doc/whatsnew/3.8.rst -+++ b/Doc/whatsnew/3.8.rst -@@ -936,7 +936,7 @@ - logging - ------- - --Added a *force* keyword argument to :func:`logging.basicConfig()` -+Added a *force* keyword argument to :func:`logging.basicConfig` - When set to true, any existing handlers attached - to the root logger are removed and closed before carrying out the - configuration specified by the other arguments. -@@ -1077,16 +1077,16 @@ - ------- - - :mod:`pathlib.Path` methods that return a boolean result like --:meth:`~pathlib.Path.exists()`, :meth:`~pathlib.Path.is_dir()`, --:meth:`~pathlib.Path.is_file()`, :meth:`~pathlib.Path.is_mount()`, --:meth:`~pathlib.Path.is_symlink()`, :meth:`~pathlib.Path.is_block_device()`, --:meth:`~pathlib.Path.is_char_device()`, :meth:`~pathlib.Path.is_fifo()`, --:meth:`~pathlib.Path.is_socket()` now return ``False`` instead of raising -+:meth:`~pathlib.Path.exists`, :meth:`~pathlib.Path.is_dir`, -+:meth:`~pathlib.Path.is_file`, :meth:`~pathlib.Path.is_mount`, -+:meth:`~pathlib.Path.is_symlink`, :meth:`~pathlib.Path.is_block_device`, -+:meth:`~pathlib.Path.is_char_device`, :meth:`~pathlib.Path.is_fifo`, -+:meth:`~pathlib.Path.is_socket` now return ``False`` instead of raising - :exc:`ValueError` or its subclass :exc:`UnicodeEncodeError` for paths that - contain characters unrepresentable at the OS level. - (Contributed by Serhiy Storchaka in :issue:`33721`.) - --Added :meth:`!pathlib.Path.link_to()` which creates a hard link pointing -+Added :meth:`!pathlib.Path.link_to` which creates a hard link pointing - to a path. - (Contributed by Joannah Nanjekye in :issue:`26978`) - Note that ``link_to`` was deprecated in 3.10 and removed in 3.12 in -@@ -1170,13 +1170,13 @@ - socket - ------ - --Added :meth:`~socket.create_server()` and :meth:`~socket.has_dualstack_ipv6()` -+Added :meth:`~socket.create_server` and :meth:`~socket.has_dualstack_ipv6` - convenience functions to automate the necessary tasks usually involved when - creating a server socket, including accepting both IPv4 and IPv6 connections - on the same socket. (Contributed by Giampaolo Rodolà in :issue:`17561`.) - --The :func:`socket.if_nameindex()`, :func:`socket.if_nametoindex()`, and --:func:`socket.if_indextoname()` functions have been implemented on Windows. -+The :func:`socket.if_nameindex`, :func:`socket.if_nametoindex`, and -+:func:`socket.if_indextoname` functions have been implemented on Windows. - (Contributed by Zackery Spytz in :issue:`37007`.) - - -@@ -1192,11 +1192,11 @@ - statistics - ---------- - --Added :func:`statistics.fmean` as a faster, floating point variant of --:func:`statistics.mean()`. (Contributed by Raymond Hettinger and -+Added :func:`statistics.fmean` as a faster, floating-point variant of -+:func:`statistics.mean`. (Contributed by Raymond Hettinger and - Steven D'Aprano in :issue:`35904`.) - --Added :func:`statistics.geometric_mean()` -+Added :func:`statistics.geometric_mean` - (Contributed by Raymond Hettinger in :issue:`27181`.) - - Added :func:`statistics.multimode` that returns a list of the most -@@ -1367,10 +1367,10 @@ - have been added as well. - (Contributed by Lisa Roach in :issue:`26467`). - --Added :func:`~unittest.addModuleCleanup()` and --:meth:`~unittest.TestCase.addClassCleanup()` to unittest to support --cleanups for :func:`~unittest.setUpModule()` and --:meth:`~unittest.TestCase.setUpClass()`. -+Added :func:`~unittest.addModuleCleanup` and -+:meth:`~unittest.TestCase.addClassCleanup` to unittest to support -+cleanups for :func:`~unittest.setUpModule` and -+:meth:`~unittest.TestCase.setUpClass`. - (Contributed by Lisa Roach in :issue:`24412`.) - - Several mock assert functions now also print a list of actual calls upon -@@ -1432,7 +1432,7 @@ - (Contributed by Stefan Behnel in :issue:`28238`.) - - The :mod:`xml.etree.ElementTree` module provides a new function --:func:`–xml.etree.ElementTree.canonicalize()` that implements C14N 2.0. -+:func:`–xml.etree.ElementTree.canonicalize` that implements C14N 2.0. - (Contributed by Stefan Behnel in :issue:`13611`.) - - The target object of :class:`xml.etree.ElementTree.XMLParser` can -@@ -1712,7 +1712,7 @@ - the ``l*gettext()`` functions. - (Contributed by Serhiy Storchaka in :issue:`33710`.) - --* The :meth:`~threading.Thread.isAlive()` method of :class:`threading.Thread` -+* The :meth:`~threading.Thread.isAlive` method of :class:`threading.Thread` - has been deprecated. - (Contributed by Donghee Na in :issue:`35283`.) - -diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst -index f32224aefc0..747bcfa84bb 100644 ---- a/Doc/whatsnew/3.9.rst -+++ b/Doc/whatsnew/3.9.rst -@@ -408,8 +408,8 @@ - - datetime - -------- --The :meth:`~datetime.date.isocalendar()` of :class:`datetime.date` --and :meth:`~datetime.datetime.isocalendar()` of :class:`datetime.datetime` -+The :meth:`~datetime.date.isocalendar` of :class:`datetime.date` -+and :meth:`~datetime.datetime.isocalendar` of :class:`datetime.datetime` - methods now returns a :func:`~collections.namedtuple` instead of a :class:`tuple`. - (Contributed by Donghee Na in :issue:`24416`.) - -@@ -610,7 +610,7 @@ - pathlib - ------- - --Added :meth:`pathlib.Path.readlink()` which acts similarly to -+Added :meth:`pathlib.Path.readlink` which acts similarly to - :func:`os.readlink`. - (Contributed by Girts Folkmanis in :issue:`30618`) - -@@ -983,13 +983,13 @@ - (Contributed by Victor Stinner in :issue:`37312`.) - - * ``aifc.openfp()`` alias to ``aifc.open()``, ``sunau.openfp()`` alias to -- ``sunau.open()``, and ``wave.openfp()`` alias to :func:`wave.open()` have been -+ ``sunau.open()``, and ``wave.openfp()`` alias to :func:`wave.open` have been - removed. They were deprecated since Python 3.7. - (Contributed by Victor Stinner in :issue:`37320`.) - --* The :meth:`!isAlive()` method of :class:`threading.Thread` -+* The :meth:`!isAlive` method of :class:`threading.Thread` - has been removed. It was deprecated since Python 3.8. -- Use :meth:`~threading.Thread.is_alive()` instead. -+ Use :meth:`~threading.Thread.is_alive` instead. - (Contributed by Donghee Na in :issue:`37804`.) - - * Methods ``getchildren()`` and ``getiterator()`` of classes -diff --git a/Include/floatobject.h b/Include/floatobject.h -index 999441ac536..8963c16832a 100644 ---- a/Include/floatobject.h -+++ b/Include/floatobject.h -@@ -2,7 +2,7 @@ - /* Float object interface */ - - /* --PyFloatObject represents a (double precision) floating point number. -+PyFloatObject represents a (double precision) floating-point number. - */ - - #ifndef Py_FLOATOBJECT_H -diff --git a/Include/internal/pycore_dtoa.h b/Include/internal/pycore_dtoa.h -index 4d9681d59a6..899d413b05e 100644 ---- a/Include/internal/pycore_dtoa.h -+++ b/Include/internal/pycore_dtoa.h -@@ -11,8 +11,6 @@ - #include "pycore_pymath.h" // _PY_SHORT_FLOAT_REPR - - --#if _PY_SHORT_FLOAT_REPR == 1 -- - typedef uint32_t ULong; - - struct -@@ -22,15 +20,15 @@ - ULong x[1]; - }; - --#ifdef Py_USING_MEMORY_DEBUGGER -+#if defined(Py_USING_MEMORY_DEBUGGER) || _PY_SHORT_FLOAT_REPR == 0 - - struct _dtoa_state { - int _not_used; - }; --#define _dtoa_interp_state_INIT(INTERP) \ -+#define _dtoa_state_INIT(INTERP) \ - {0} - --#else // !Py_USING_MEMORY_DEBUGGER -+#else // !Py_USING_MEMORY_DEBUGGER && _PY_SHORT_FLOAT_REPR != 0 - - /* The size of the Bigint freelist */ - #define Bigint_Kmax 7 -@@ -65,8 +63,6 @@ - int *decpt, int *sign, char **rve); - PyAPI_FUNC(void) _Py_dg_freedtoa(char *s); - --#endif // _PY_SHORT_FLOAT_REPR == 1 -- - #ifdef __cplusplus - } - #endif -diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h -index ad7d74c5dd2..4d355b2bc8d 100644 ---- a/Include/internal/pycore_frame.h -+++ b/Include/internal/pycore_frame.h -@@ -200,7 +200,7 @@ - - /* Gets the PyFrameObject for this frame, lazily - * creating it if necessary. -- * Returns a borrowed referennce */ -+ * Returns a borrowed reference */ - static inline PyFrameObject * - _PyFrame_GetFrameObject(_PyInterpreterFrame *frame) - { -@@ -213,9 +213,6 @@ - return _PyFrame_MakeAndSetFrameObject(frame); - } - --void --_PyFrame_ClearLocals(_PyInterpreterFrame *frame); -- - /* Clears all references in the frame. - * If take is non-zero, then the _PyInterpreterFrame frame - * may be transferred to the frame object it references -diff --git a/Include/patchlevel.h b/Include/patchlevel.h -index 07e77187d06..704c1c442de 100644 ---- a/Include/patchlevel.h -+++ b/Include/patchlevel.h -@@ -18,12 +18,12 @@ - /*--start constants--*/ - #define PY_MAJOR_VERSION 3 - #define PY_MINOR_VERSION 12 --#define PY_MICRO_VERSION 4 -+#define PY_MICRO_VERSION 6 - #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL - #define PY_RELEASE_SERIAL 0 - - /* Version as a string */ --#define PY_VERSION "3.12.4" -+#define PY_VERSION "3.12.6" - /*--end constants--*/ - - /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. -diff --git a/Include/pymacro.h b/Include/pymacro.h -index 342d2a7b844..d5700dc3893 100644 ---- a/Include/pymacro.h -+++ b/Include/pymacro.h -@@ -15,11 +15,11 @@ - // MSVC makes static_assert a keyword in C11-17, contrary to the standards. - // - // In C++11 and C2x, static_assert is a keyword, redefining is undefined --// behaviour. So only define if building as C (if __STDC_VERSION__ is defined), --// not C++, and only for C11-17. -+// behaviour. So only define if building as C, not C++ (if __cplusplus is -+// not defined), and only for C11-17. - #if !defined(static_assert) && (defined(__GNUC__) || defined(__clang__)) \ -- && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L \ -- && __STDC_VERSION__ <= 201710L -+ && !defined(__cplusplus) && defined(__STDC_VERSION__) \ -+ && __STDC_VERSION__ >= 201112L && __STDC_VERSION__ <= 201710L - # define static_assert _Static_assert - #endif - --- /dev/null +++ b/Lib/_ios_support.py @@ -0,0 +1,71 @@ @@ -14513,310 +1404,6 @@ index 342d2a7b844..d5700dc3893 100644 + model = objc.objc_msgSend(device_model, SEL_UTF8String).decode() + + return system, release, model, is_simulator -diff --git a/Lib/_pydatetime.py b/Lib/_pydatetime.py -index cd0ea900bfb..ad6292e1e41 100644 ---- a/Lib/_pydatetime.py -+++ b/Lib/_pydatetime.py -@@ -970,6 +970,8 @@ - @classmethod - def fromtimestamp(cls, t): - "Construct a date from a POSIX timestamp (like time.time())." -+ if t is None: -+ raise TypeError("'NoneType' object cannot be interpreted as an integer") - y, m, d, hh, mm, ss, weekday, jday, dst = _time.localtime(t) - return cls(y, m, d) - -diff --git a/Lib/_pydecimal.py b/Lib/_pydecimal.py -index 613123ec7b4..75df3db2624 100644 ---- a/Lib/_pydecimal.py -+++ b/Lib/_pydecimal.py -@@ -424,7 +424,7 @@ - # numbers.py for more detail. - - class Decimal(object): -- """Floating point class for decimal arithmetic.""" -+ """Floating-point class for decimal arithmetic.""" - - __slots__ = ('_exp','_int','_sign', '_is_special') - # Generally, the value of the Decimal instance is given by -diff --git a/Lib/argparse.py b/Lib/argparse.py -index 120cb6c8458..e4892955e4f 100644 ---- a/Lib/argparse.py -+++ b/Lib/argparse.py -@@ -1843,7 +1843,7 @@ - # ================================== - def add_subparsers(self, **kwargs): - if self._subparsers is not None: -- self.error(_('cannot have multiple subparser arguments')) -+ raise ArgumentError(None, _('cannot have multiple subparser arguments')) - - # add the parser class to the arguments if it's not present - kwargs.setdefault('parser_class', type(self)) -@@ -1895,8 +1895,11 @@ - def parse_args(self, args=None, namespace=None): - args, argv = self.parse_known_args(args, namespace) - if argv: -- msg = _('unrecognized arguments: %s') -- self.error(msg % ' '.join(argv)) -+ msg = _('unrecognized arguments: %s') % ' '.join(argv) -+ if self.exit_on_error: -+ self.error(msg) -+ else: -+ raise ArgumentError(None, msg) - return args - - def parse_known_args(self, args=None, namespace=None): -@@ -2175,7 +2178,7 @@ - self._get_value(action, action.default)) - - if required_actions: -- self.error(_('the following arguments are required: %s') % -+ raise ArgumentError(None, _('the following arguments are required: %s') % - ', '.join(required_actions)) - - # make sure all required groups had one option present -@@ -2191,7 +2194,7 @@ - for action in group._group_actions - if action.help is not SUPPRESS] - msg = _('one of the arguments %s is required') -- self.error(msg % ' '.join(names)) -+ raise ArgumentError(None, msg % ' '.join(names)) - - # return the updated namespace and the extra arguments - return namespace, extras -@@ -2218,7 +2221,7 @@ - arg_strings = self._read_args_from_files(arg_strings) - new_arg_strings.extend(arg_strings) - except OSError as err: -- self.error(str(err)) -+ raise ArgumentError(None, str(err)) - - # return the modified argument list - return new_arg_strings -@@ -2298,7 +2301,7 @@ - for action, option_string, sep, explicit_arg in option_tuples]) - args = {'option': arg_string, 'matches': options} - msg = _('ambiguous option: %(option)s could match %(matches)s') -- self.error(msg % args) -+ raise ArgumentError(None, msg % args) - - # if exactly one action matched, this segmentation is good, - # so return the parsed action -@@ -2358,7 +2361,7 @@ - - # shouldn't ever get here - else: -- self.error(_('unexpected option string: %s') % option_string) -+ raise ArgumentError(None, _('unexpected option string: %s') % option_string) - - # return the collected option tuples - return result -@@ -2415,8 +2418,11 @@ - def parse_intermixed_args(self, args=None, namespace=None): - args, argv = self.parse_known_intermixed_args(args, namespace) - if argv: -- msg = _('unrecognized arguments: %s') -- self.error(msg % ' '.join(argv)) -+ msg = _('unrecognized arguments: %s') % ' '.join(argv) -+ if self.exit_on_error: -+ self.error(msg) -+ else: -+ raise ArgumentError(None, msg) - return args - - def parse_known_intermixed_args(self, args=None, namespace=None): -diff --git a/Lib/asyncio/__main__.py b/Lib/asyncio/__main__.py -index c39a31d7b3d..04655801151 100644 ---- a/Lib/asyncio/__main__.py -+++ b/Lib/asyncio/__main__.py -@@ -89,6 +89,8 @@ - - - if __name__ == '__main__': -+ sys.audit("cpython.run_stdin") -+ - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - -diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py -index 29eff0499cb..cb037fd472c 100644 ---- a/Lib/asyncio/base_events.py -+++ b/Lib/asyncio/base_events.py -@@ -994,8 +994,7 @@ - except OSError as exc: - msg = ( - f'error while attempting to bind on ' -- f'address {laddr!r}: ' -- f'{exc.strerror.lower()}' -+ f'address {laddr!r}: {str(exc).lower()}' - ) - exc = OSError(exc.errno, msg) - my_exceptions.append(exc) -@@ -1561,7 +1560,7 @@ - except OSError as err: - msg = ('error while attempting ' - 'to bind on address %r: %s' -- % (sa, err.strerror.lower())) -+ % (sa, str(err).lower())) - if err.errno == errno.EADDRNOTAVAIL: - # Assume the family is not enabled (bpo-30945) - sockets.pop() -diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py -index 97fc4e3fcb6..fd486f02c67 100644 ---- a/Lib/asyncio/futures.py -+++ b/Lib/asyncio/futures.py -@@ -272,9 +272,13 @@ - raise exceptions.InvalidStateError(f'{self._state}: {self!r}') - if isinstance(exception, type): - exception = exception() -- if type(exception) is StopIteration: -- raise TypeError("StopIteration interacts badly with generators " -- "and cannot be raised into a Future") -+ if isinstance(exception, StopIteration): -+ new_exc = RuntimeError("StopIteration interacts badly with " -+ "generators and cannot be raised into a " -+ "Future") -+ new_exc.__cause__ = exception -+ new_exc.__context__ = exception -+ exception = new_exc - self._exception = exception - self._exception_tb = exception.__traceback__ - self._state = _FINISHED -diff --git a/Lib/calendar.py b/Lib/calendar.py -index 97d7cab3365..ee3ec838c96 100644 ---- a/Lib/calendar.py -+++ b/Lib/calendar.py -@@ -159,8 +159,8 @@ - - - def monthrange(year, month): -- """Return weekday (0-6 ~ Mon-Sun) and number of days (28-31) for -- year, month.""" -+ """Return weekday of first day of month (0-6 ~ Mon-Sun) -+ and number of days (28-31) for year, month.""" - if not 1 <= month <= 12: - raise IllegalMonthError(month) - day1 = weekday(year, month, 1) -diff --git a/Lib/code.py b/Lib/code.py -index 2bd5fa3e795..cb7dd44b0a3 100644 ---- a/Lib/code.py -+++ b/Lib/code.py -@@ -105,29 +105,21 @@ - The output is written by self.write(), below. - - """ -- type, value, tb = sys.exc_info() -- sys.last_exc = value -- sys.last_type = type -- sys.last_value = value -- sys.last_traceback = tb -- if filename and type is SyntaxError: -- # Work hard to stuff the correct filename in the exception -- try: -- msg, (dummy_filename, lineno, offset, line) = value.args -- except ValueError: -- # Not the format we expect; leave it alone -- pass -- else: -- # Stuff in the right filename -- value = SyntaxError(msg, (filename, lineno, offset, line)) -- sys.last_exc = sys.last_value = value -- if sys.excepthook is sys.__excepthook__: -- lines = traceback.format_exception_only(type, value) -- self.write(''.join(lines)) -- else: -- # If someone has set sys.excepthook, we let that take precedence -- # over self.write -- sys.excepthook(type, value, tb) -+ try: -+ typ, value, tb = sys.exc_info() -+ if filename and typ is SyntaxError: -+ # Work hard to stuff the correct filename in the exception -+ try: -+ msg, (dummy_filename, lineno, offset, line) = value.args -+ except ValueError: -+ # Not the format we expect; leave it alone -+ pass -+ else: -+ # Stuff in the right filename -+ value = SyntaxError(msg, (filename, lineno, offset, line)) -+ self._showtraceback(typ, value, None) -+ finally: -+ typ = value = tb = None - - def showtraceback(self): - """Display the exception that just occurred. -@@ -137,19 +129,34 @@ - The output is written by self.write(), below. - - """ -- sys.last_type, sys.last_value, last_tb = ei = sys.exc_info() -- sys.last_traceback = last_tb -- sys.last_exc = ei[1] - try: -- lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next) -- if sys.excepthook is sys.__excepthook__: -- self.write(''.join(lines)) -- else: -- # If someone has set sys.excepthook, we let that take precedence -- # over self.write -- sys.excepthook(ei[0], ei[1], last_tb) -+ typ, value, tb = sys.exc_info() -+ self._showtraceback(typ, value, tb.tb_next) - finally: -- last_tb = ei = None -+ typ = value = tb = None -+ -+ def _showtraceback(self, typ, value, tb): -+ sys.last_type = typ -+ sys.last_traceback = tb -+ sys.last_exc = sys.last_value = value = value.with_traceback(tb) -+ if sys.excepthook is sys.__excepthook__: -+ lines = traceback.format_exception(typ, value, tb) -+ self.write(''.join(lines)) -+ else: -+ # If someone has set sys.excepthook, we let that take precedence -+ # over self.write -+ try: -+ sys.excepthook(typ, value, tb) -+ except SystemExit: -+ raise -+ except BaseException as e: -+ e.__context__ = None -+ e = e.with_traceback(e.__traceback__.tb_next) -+ print('Error in sys.excepthook:', file=sys.stderr) -+ sys.__excepthook__(type(e), e, e.__traceback__) -+ print(file=sys.stderr) -+ print('Original exception was:', file=sys.stderr) -+ sys.__excepthook__(typ, value, tb) - - def write(self, data): - """Write a string. -diff --git a/Lib/colorsys.py b/Lib/colorsys.py -index bc897bd0f99..e97f91718a3 100644 ---- a/Lib/colorsys.py -+++ b/Lib/colorsys.py -@@ -24,7 +24,7 @@ - __all__ = ["rgb_to_yiq","yiq_to_rgb","rgb_to_hls","hls_to_rgb", - "rgb_to_hsv","hsv_to_rgb"] - --# Some floating point constants -+# Some floating-point constants - - ONE_THIRD = 1.0/3.0 - ONE_SIXTH = 1.0/6.0 -diff --git a/Lib/concurrent/futures/__init__.py b/Lib/concurrent/futures/__init__.py -index 292e886d5a8..72de617a5b6 100644 ---- a/Lib/concurrent/futures/__init__.py -+++ b/Lib/concurrent/futures/__init__.py -@@ -23,6 +23,7 @@ - 'ALL_COMPLETED', - 'CancelledError', - 'TimeoutError', -+ 'InvalidStateError', - 'BrokenExecutor', - 'Future', - 'Executor', diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py index 6cedee74236..0e88cffc74f 100644 --- a/Lib/ctypes/__init__.py @@ -14852,757 +1439,6 @@ index c550883e7c7..12d7428fe9a 100644 from ctypes.macholib.dyld import dyld_find as _dyld_find def find_library(name): possible = ['lib%s.dylib' % name, -diff --git a/Lib/decimal.py b/Lib/decimal.py -index d61e374b9f9..4d8e15cb68f 100644 ---- a/Lib/decimal.py -+++ b/Lib/decimal.py -@@ -1,6 +1,6 @@ --"""Decimal fixed point and floating point arithmetic. -+"""Decimal fixed-point and floating-point arithmetic. - --This is an implementation of decimal floating point arithmetic based on -+This is an implementation of decimal floating-point arithmetic based on - the General Decimal Arithmetic Specification: - - http://speleotrove.com/decimal/decarith.html -diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py -index ab3c3031ef5..ec2215a5e5f 100644 ---- a/Lib/email/_header_value_parser.py -+++ b/Lib/email/_header_value_parser.py -@@ -92,6 +92,8 @@ - ASPECIALS = TSPECIALS | set("*'%") - ATTRIBUTE_ENDS = ASPECIALS | WSP - EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%') -+NLSET = {'\n', '\r'} -+SPECIALSNL = SPECIALS | NLSET - - def quote_string(value): - return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"' -@@ -2802,9 +2804,13 @@ - wrap_as_ew_blocked -= 1 - continue - tstr = str(part) -- if part.token_type == 'ptext' and set(tstr) & SPECIALS: -- # Encode if tstr contains special characters. -- want_encoding = True -+ if not want_encoding: -+ if part.token_type == 'ptext': -+ # Encode if tstr contains special characters. -+ want_encoding = not SPECIALSNL.isdisjoint(tstr) -+ else: -+ # Encode if tstr contains newlines. -+ want_encoding = not NLSET.isdisjoint(tstr) - try: - tstr.encode(encoding) - charset = encoding -@@ -2988,6 +2994,7 @@ - excess = len(encoded_word) - remaining_space - lines[-1] += encoded_word - to_encode = to_encode[len(to_encode_word):] -+ leading_whitespace = '' - - if to_encode: - lines.append(' ') -diff --git a/Lib/email/_policybase.py b/Lib/email/_policybase.py -index 2ec54fbabae..5f9aa9fb091 100644 ---- a/Lib/email/_policybase.py -+++ b/Lib/email/_policybase.py -@@ -157,6 +157,13 @@ - message_factory -- the class to use to create new message objects. - If the value is None, the default is Message. - -+ verify_generated_headers -+ -- if true, the generator verifies that each header -+ they are properly folded, so that a parser won't -+ treat it as multiple headers, start-of-body, or -+ part of another header. -+ This is a check against custom Header & fold() -+ implementations. - """ - - raise_on_defect = False -@@ -165,6 +172,7 @@ - max_line_length = 78 - mangle_from_ = False - message_factory = None -+ verify_generated_headers = True - - def handle_defect(self, obj, defect): - """Based on policy, either raise defect or call register_defect. -diff --git a/Lib/email/errors.py b/Lib/email/errors.py -index 3ad00565549..02aa5eced6a 100644 ---- a/Lib/email/errors.py -+++ b/Lib/email/errors.py -@@ -29,6 +29,10 @@ - """An illegal charset was given.""" - - -+class HeaderWriteError(MessageError): -+ """Error while writing headers.""" -+ -+ - # These are parsing defects which the parser was able to work around. - class MessageDefect(ValueError): - """Base class for a message defect.""" -diff --git a/Lib/email/generator.py b/Lib/email/generator.py -index c8056ad47ba..47b9df8f4e6 100644 ---- a/Lib/email/generator.py -+++ b/Lib/email/generator.py -@@ -14,12 +14,14 @@ - from copy import deepcopy - from io import StringIO, BytesIO - from email.utils import _has_surrogates -+from email.errors import HeaderWriteError - - UNDERSCORE = '_' - NL = '\n' # XXX: no longer used by the code below. - - NLCRE = re.compile(r'\r\n|\r|\n') - fcre = re.compile(r'^From ', re.MULTILINE) -+NEWLINE_WITHOUT_FWSP = re.compile(r'\r\n[^ \t]|\r[^ \n\t]|\n[^ \t]') - - - class Generator: -@@ -222,7 +224,16 @@ - - def _write_headers(self, msg): - for h, v in msg.raw_items(): -- self.write(self.policy.fold(h, v)) -+ folded = self.policy.fold(h, v) -+ if self.policy.verify_generated_headers: -+ linesep = self.policy.linesep -+ if not folded.endswith(self.policy.linesep): -+ raise HeaderWriteError( -+ f'folded header does not end with {linesep!r}: {folded!r}') -+ if NEWLINE_WITHOUT_FWSP.search(folded.removesuffix(linesep)): -+ raise HeaderWriteError( -+ f'folded header contains newline: {folded!r}') -+ self.write(folded) - # A blank line always separates headers from body - self.write(self._NL) - -diff --git a/Lib/email/utils.py b/Lib/email/utils.py -index aa949aa933a..e53abc8b840 100644 ---- a/Lib/email/utils.py -+++ b/Lib/email/utils.py -@@ -48,6 +48,7 @@ - specialsre = re.compile(r'[][\\()<>@,:;".]') - escapesre = re.compile(r'[\\"]') - -+ - def _has_surrogates(s): - """Return True if s may contain surrogate-escaped binary data.""" - # This check is based on the fact that unless there are surrogates, utf8 -@@ -106,12 +107,127 @@ - return address - - -+def _iter_escaped_chars(addr): -+ pos = 0 -+ escape = False -+ for pos, ch in enumerate(addr): -+ if escape: -+ yield (pos, '\\' + ch) -+ escape = False -+ elif ch == '\\': -+ escape = True -+ else: -+ yield (pos, ch) -+ if escape: -+ yield (pos, '\\') -+ -+ -+def _strip_quoted_realnames(addr): -+ """Strip real names between quotes.""" -+ if '"' not in addr: -+ # Fast path -+ return addr -+ -+ start = 0 -+ open_pos = None -+ result = [] -+ for pos, ch in _iter_escaped_chars(addr): -+ if ch == '"': -+ if open_pos is None: -+ open_pos = pos -+ else: -+ if start != open_pos: -+ result.append(addr[start:open_pos]) -+ start = pos + 1 -+ open_pos = None -+ -+ if start < len(addr): -+ result.append(addr[start:]) -+ -+ return ''.join(result) - --def getaddresses(fieldvalues): -- """Return a list of (REALNAME, EMAIL) for each fieldvalue.""" -- all = COMMASPACE.join(str(v) for v in fieldvalues) -- a = _AddressList(all) -- return a.addresslist -+ -+supports_strict_parsing = True -+ -+def getaddresses(fieldvalues, *, strict=True): -+ """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue. -+ -+ When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in -+ its place. -+ -+ If strict is true, use a strict parser which rejects malformed inputs. -+ """ -+ -+ # If strict is true, if the resulting list of parsed addresses is greater -+ # than the number of fieldvalues in the input list, a parsing error has -+ # occurred and consequently a list containing a single empty 2-tuple [('', -+ # '')] is returned in its place. This is done to avoid invalid output. -+ # -+ # Malformed input: getaddresses(['alice@example.com ']) -+ # Invalid output: [('', 'alice@example.com'), ('', 'bob@example.com')] -+ # Safe output: [('', '')] -+ -+ if not strict: -+ all = COMMASPACE.join(str(v) for v in fieldvalues) -+ a = _AddressList(all) -+ return a.addresslist -+ -+ fieldvalues = [str(v) for v in fieldvalues] -+ fieldvalues = _pre_parse_validation(fieldvalues) -+ addr = COMMASPACE.join(fieldvalues) -+ a = _AddressList(addr) -+ result = _post_parse_validation(a.addresslist) -+ -+ # Treat output as invalid if the number of addresses is not equal to the -+ # expected number of addresses. -+ n = 0 -+ for v in fieldvalues: -+ # When a comma is used in the Real Name part it is not a deliminator. -+ # So strip those out before counting the commas. -+ v = _strip_quoted_realnames(v) -+ # Expected number of addresses: 1 + number of commas -+ n += 1 + v.count(',') -+ if len(result) != n: -+ return [('', '')] -+ -+ return result -+ -+ -+def _check_parenthesis(addr): -+ # Ignore parenthesis in quoted real names. -+ addr = _strip_quoted_realnames(addr) -+ -+ opens = 0 -+ for pos, ch in _iter_escaped_chars(addr): -+ if ch == '(': -+ opens += 1 -+ elif ch == ')': -+ opens -= 1 -+ if opens < 0: -+ return False -+ return (opens == 0) -+ -+ -+def _pre_parse_validation(email_header_fields): -+ accepted_values = [] -+ for v in email_header_fields: -+ if not _check_parenthesis(v): -+ v = "('', '')" -+ accepted_values.append(v) -+ -+ return accepted_values -+ -+ -+def _post_parse_validation(parsed_email_header_tuples): -+ accepted_values = [] -+ # The parser would have parsed a correctly formatted domain-literal -+ # The existence of an [ after parsing indicates a parsing failure -+ for v in parsed_email_header_tuples: -+ if '[' in v[1]: -+ v = ('', '') -+ accepted_values.append(v) -+ -+ return accepted_values - - - def _format_timetuple_and_zone(timetuple, zone): -@@ -128,7 +244,7 @@ - - Fri, 09 Nov 2001 01:08:47 -0000 - -- Optional timeval if given is a floating point time value as accepted by -+ Optional timeval if given is a floating-point time value as accepted by - gmtime() and localtime(), otherwise the current time is used. - - Optional localtime is a flag that when True, interprets timeval, and -@@ -205,16 +321,33 @@ - tzinfo=datetime.timezone(datetime.timedelta(seconds=tz))) - - --def parseaddr(addr): -+def parseaddr(addr, *, strict=True): - """ - Parse addr into its constituent realname and email address parts. - - Return a tuple of realname and email address, unless the parse fails, in - which case return a 2-tuple of ('', ''). -+ -+ If strict is True, use a strict parser which rejects malformed inputs. - """ -- addrs = _AddressList(addr).addresslist -- if not addrs: -- return '', '' -+ if not strict: -+ addrs = _AddressList(addr).addresslist -+ if not addrs: -+ return ('', '') -+ return addrs[0] -+ -+ if isinstance(addr, list): -+ addr = addr[0] -+ -+ if not isinstance(addr, str): -+ return ('', '') -+ -+ addr = _pre_parse_validation([addr])[0] -+ addrs = _post_parse_validation(_AddressList(addr).addresslist) -+ -+ if not addrs or len(addrs) > 1: -+ return ('', '') -+ - return addrs[0] - - -diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py -index 2ac872c25c8..a7c84572382 100644 ---- a/Lib/ensurepip/__init__.py -+++ b/Lib/ensurepip/__init__.py -@@ -10,7 +10,7 @@ - - __all__ = ["version", "bootstrap"] - _PACKAGE_NAMES = ('pip',) --_PIP_VERSION = "24.0" -+_PIP_VERSION = "24.2" - _PROJECTS = [ - ("pip", _PIP_VERSION, "py3"), - ] -diff --git a/Lib/filecmp.py b/Lib/filecmp.py -index 30bd900fa80..6fdb48f7a39 100644 ---- a/Lib/filecmp.py -+++ b/Lib/filecmp.py -@@ -160,12 +160,14 @@ - ok = True - try: - a_stat = os.stat(a_path) -- except OSError: -+ except (OSError, ValueError): -+ # See https://github.com/python/cpython/issues/122400 -+ # for the rationale for protecting against ValueError. - # print('Can\'t stat', a_path, ':', why.args[1]) - ok = False - try: - b_stat = os.stat(b_path) -- except OSError: -+ except (OSError, ValueError): - # print('Can\'t stat', b_path, ':', why.args[1]) - ok = False - -@@ -280,12 +282,12 @@ - # Return: - # 0 for equal - # 1 for different --# 2 for funny cases (can't stat, etc.) -+# 2 for funny cases (can't stat, NUL bytes, etc.) - # - def _cmp(a, b, sh, abs=abs, cmp=cmp): - try: - return not abs(cmp(a, b, sh)) -- except OSError: -+ except (OSError, ValueError): - return 2 - - -diff --git a/Lib/fractions.py b/Lib/fractions.py -index 88b418fe383..e3a8bbcfb3e 100644 ---- a/Lib/fractions.py -+++ b/Lib/fractions.py -@@ -825,8 +825,10 @@ - # A fractional power will generally produce an - # irrational number. - return float(a) ** float(b) -- else: -+ elif isinstance(b, (float, complex)): - return float(a) ** b -+ else: -+ return NotImplemented - - def __rpow__(b, a): - """a ** b""" -diff --git a/Lib/functools.py b/Lib/functools.py -index 1f1ba638866..318efd04fd8 100644 ---- a/Lib/functools.py -+++ b/Lib/functools.py -@@ -372,15 +372,13 @@ - self.keywords = keywords - - def __repr__(self): -- args = ", ".join(map(repr, self.args)) -- keywords = ", ".join("{}={!r}".format(k, v) -- for k, v in self.keywords.items()) -- format_string = "{module}.{cls}({func}, {args}, {keywords})" -- return format_string.format(module=self.__class__.__module__, -- cls=self.__class__.__qualname__, -- func=self.func, -- args=args, -- keywords=keywords) -+ cls = type(self) -+ module = cls.__module__ -+ qualname = cls.__qualname__ -+ args = [repr(self.func)] -+ args.extend(map(repr, self.args)) -+ args.extend(f"{k}={v!r}" for k, v in self.keywords.items()) -+ return f"{module}.{qualname}({', '.join(args)})" - - def _make_unbound_method(self): - def _method(cls_or_self, /, *args, **keywords): -diff --git a/Lib/http/cookies.py b/Lib/http/cookies.py -index 35ac2dc6ae2..6b9ed24ad8e 100644 ---- a/Lib/http/cookies.py -+++ b/Lib/http/cookies.py -@@ -184,8 +184,13 @@ - return '"' + str.translate(_Translator) + '"' - - --_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") --_QuotePatt = re.compile(r"[\\].") -+_unquote_sub = re.compile(r'\\(?:([0-3][0-7][0-7])|(.))').sub -+ -+def _unquote_replace(m): -+ if m[1]: -+ return chr(int(m[1], 8)) -+ else: -+ return m[2] - - def _unquote(str): - # If there aren't any doublequotes, -@@ -205,36 +210,13 @@ - # \012 --> \n - # \" --> " - # -- i = 0 -- n = len(str) -- res = [] -- while 0 <= i < n: -- o_match = _OctalPatt.search(str, i) -- q_match = _QuotePatt.search(str, i) -- if not o_match and not q_match: # Neither matched -- res.append(str[i:]) -- break -- # else: -- j = k = -1 -- if o_match: -- j = o_match.start(0) -- if q_match: -- k = q_match.start(0) -- if q_match and (not o_match or k < j): # QuotePatt matched -- res.append(str[i:k]) -- res.append(str[k+1]) -- i = k + 2 -- else: # OctalPatt matched -- res.append(str[i:j]) -- res.append(chr(int(str[j+1:j+4], 8))) -- i = j + 4 -- return _nulljoin(res) -+ return _unquote_sub(_unquote_replace, str) - - # The _getdate() routine is used to set the expiration time in the cookie's HTTP - # header. By default, _getdate() returns the current time in the appropriate - # "expires" format for a Set-Cookie header. The one optional argument is an - # offset from now, in seconds. For example, an offset of -3600 means "one hour --# ago". The offset may be a floating point number. -+# ago". The offset may be a floating-point number. - # - - _weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] -diff --git a/Lib/idlelib/HISTORY.txt b/Lib/idlelib/HISTORY.txt -index 731fabd185f..a601b25b5f8 100644 ---- a/Lib/idlelib/HISTORY.txt -+++ b/Lib/idlelib/HISTORY.txt -@@ -277,7 +277,7 @@ - Debug menu: - - JIT (Just-In-Time) stack viewer toggle -- if set, the stack viewer --automaticall pops up when you get a traceback. -+automatically pops up when you get a traceback. - - Windows menu: - -diff --git a/Lib/idlelib/Icons/README.txt b/Lib/idlelib/Icons/README.txt -index d91c4d5d8d8..f285637d534 100644 ---- a/Lib/idlelib/Icons/README.txt -+++ b/Lib/idlelib/Icons/README.txt -@@ -1,13 +1,51 @@ --The IDLE icons are from https://bugs.python.org/issue1490384 -+IDLE-PYTHON LOGOS - --Created by Andrew Clover. -+These are sent to tk on Windows, *NIX, and non-Aqua macOS -+in pyshell following "# set application icon". - --The original sources are available from Andrew's website: -+ -+2006?: Andrew Clover made variously sized python icons for win23. - https://www.doxdesk.com/software/py/pyicons.html - --Various different formats and sizes are available at this GitHub Pull Request: --https://github.com/python/cpython/pull/17473 -+2006: 16, 32, and 48 bit .png versions were copied to CPython -+as Python application icons, maybe in PC/icons/py.ico. -+https://github.com/python/cpython/issues/43372 -+ -+2014: They were copied (perhaps a bit revised) to idlelib/Icons. -+https://github.com/python/cpython/issues/64605 -+.gif versions were also added. -+ -+2020: Add Clover's 256-bit image. -+https://github.com/python/cpython/issues/82620 -+Other fixups were done. -+ -+The idle.ico file used for Windows was created with ImageMagick: -+ $ convert idle_16.png idle_32.png idle_48.png idle_256.png idle.ico -+** This needs redoing whenever files are changed. -+?? Do Start, Desktop, and Taskbar use idlelib/Icons files? -+ -+Issue added Windows Store PC/icons/idlex44.png and .../idlex150.png. -+https://github.com/python/cpython/pull/22817 -+?? Should these be updated with major changes? -+ -+2022: Optimize .png images in CPython repository with external program. -+https://github.com/python/cpython/pull/21348 -+idle.ico (and idlex##) were not updated. -+ -+The idlexx.gif files are only needed for *nix running tcl/tk 8.5. -+As of 2022, this was known true for 1 'major' Linux distribution. -+(Same would be true for any non-Aqua macOS with 8.5, but now none?) -+Can be deleted when we require 8.6 or it is known always used. -+ -+Future: Derivitives of Python logo should be submitted for approval. -+PSF Trademark Working Group / Committee psf-trademarks@python.org -+https://www.python.org/community/logos/ # Original files -+https://www.python.org/psf/trademarks-faq/ -+https://www.python.org/psf/trademarks/ # Usage. -+ -+ -+OTHER GIFS: These are used by browsers using idlelib.tree. -+At least some will not be used when tree is replaced by ttk.Treeview. - --The idle.ico file was created with ImageMagick: - -- $ convert idle_16.png idle_32.png idle_48.png idle_256.png idle.ico -+Edited 2024 August 26 by TJR. -diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt -index d60f924242a..2735ad7e7e6 100644 ---- a/Lib/idlelib/News3.txt -+++ b/Lib/idlelib/News3.txt -@@ -4,6 +4,16 @@ - ========================= - - -+gh-120083: Add explicit black IDLE Hovertip foreground color needed for -+recent macOS. Fixes Sonoma showing unreadable white on pale yellow. -+Patch by John Riggles. -+ -+gh-122482: Change About IDLE to direct users to discuss.python.org -+instead of the now unused idle-dev email and mailing list. -+ -+gh-78889: Stop Shell freezes by blocking user access to non-method -+sys.stdout.shell attributes, which are all private. -+ - gh-78955: Use user-selected color theme for Help => IDLE Doc. - - gh-96905: In idlelib code, stop redefining built-ins 'dict' and 'object'. -@@ -567,14 +577,14 @@ - color setting, default or custom, on the extensions tab, that applied - to all themes.) For built-in themes, the foreground is the same as - normal text and the background is a contrasting gray. Context colors for --custom themes are set on the Hightlights tab along with other colors. -+custom themes are set on the Highlights tab along with other colors. - When one starts IDLE from a console and loads a custom theme without - definitions for 'context', one will see a warning message on the - console. - - bpo-33642: Display up to maxlines non-blank lines for Code Context. - If there is no current context, show a single blank line. (Previously, --the Code Contex had numlines lines, usually with some blank.) The use -+the Code Context had numlines lines, usually with some blank.) The use - of a new option, 'maxlines' (default 15), avoids possible interference - with user settings of the old option, 'numlines' (default 3). - -@@ -728,7 +738,7 @@ - versa. - Initial patch by Charles Wohlganger, revised by Terry Jan Reedy. - --bpo-31051: Rearrange condigdialog General tab. -+bpo-31051: Rearrange configdialog General tab. - Sort non-Help options into Window (Shell+Editor) and Editor (only). - Leave room for the addition of new options. - Patch by Terry Jan Reedy. -diff --git a/Lib/idlelib/TODO.txt b/Lib/idlelib/TODO.txt -index e2f1ac0f274..41b86b0c6d5 100644 ---- a/Lib/idlelib/TODO.txt -+++ b/Lib/idlelib/TODO.txt -@@ -179,7 +179,7 @@ - escape from the command area. (Albert Brandl) - - - Set X11 class to "idle/Idle", set icon and title to something --beginning with "idle" -- for window manangers. (Randall Hopper) -+beginning with "idle" -- for window managers. (Randall Hopper) - - - Config files editable through a preferences dialog. (me) DONE - -diff --git a/Lib/idlelib/configdialog.py b/Lib/idlelib/configdialog.py -index eedf97bf74f..4d2adb48570 100644 ---- a/Lib/idlelib/configdialog.py -+++ b/Lib/idlelib/configdialog.py -@@ -111,7 +111,7 @@ - load_configs: Load pages except for extensions. - activate_config_changes: Tell editors to reload. - """ -- self.frame = frame = Frame(self, padding="5px") -+ self.frame = frame = Frame(self, padding=5) - self.frame.grid(sticky="nwes") - self.note = note = Notebook(frame) - self.extpage = ExtPage(note) -diff --git a/Lib/idlelib/grep.py b/Lib/idlelib/grep.py -index ef14349960b..42048ff2395 100644 ---- a/Lib/idlelib/grep.py -+++ b/Lib/idlelib/grep.py -@@ -190,7 +190,7 @@ - - - def _grep_dialog(parent): # htest # -- from tkinter import Toplevel, Text, SEL, END -+ from tkinter import Toplevel, Text, SEL - from tkinter.ttk import Frame, Button - from idlelib.pyshell import PyShellFileList - -diff --git a/Lib/idlelib/help_about.py b/Lib/idlelib/help_about.py -index aa1c352897f..81c65f6264e 100644 ---- a/Lib/idlelib/help_about.py -+++ b/Lib/idlelib/help_about.py -@@ -85,15 +85,18 @@ - byline = Label(frame_background, text=byline_text, justify=LEFT, - fg=self.fg, bg=self.bg) - byline.grid(row=2, column=0, sticky=W, columnspan=3, padx=10, pady=5) -- email = Label(frame_background, text='email: idle-dev@python.org', -- justify=LEFT, fg=self.fg, bg=self.bg) -- email.grid(row=6, column=0, columnspan=2, sticky=W, padx=10, pady=0) -+ -+ forums_url = "https://discuss.python.org" -+ forums = Label(frame_background, text="Python forums: "+forums_url, -+ justify=LEFT, fg=self.fg, bg=self.bg) -+ forums.grid(row=6, column=0, sticky=W, padx=10, pady=0) -+ forums.bind("", lambda event: webbrowser.open(forums_url)) - docs_url = ("https://docs.python.org/%d.%d/library/idle.html" % - sys.version_info[:2]) - docs = Label(frame_background, text=docs_url, - justify=LEFT, fg=self.fg, bg=self.bg) - docs.grid(row=7, column=0, columnspan=2, sticky=W, padx=10, pady=0) -- docs.bind("", lambda event: webbrowser.open(docs['text'])) -+ docs.bind("", lambda event: webbrowser.open(docs_url)) - - Frame(frame_background, borderwidth=1, relief=SUNKEN, - height=2, bg=self.bg).grid(row=8, column=0, sticky=EW, -@@ -123,9 +126,7 @@ - height=2, bg=self.bg).grid(row=11, column=0, sticky=EW, - columnspan=3, padx=5, pady=5) - -- idle = Label(frame_background, -- text='IDLE', -- fg=self.fg, bg=self.bg) -+ idle = Label(frame_background, text='IDLE', fg=self.fg, bg=self.bg) - idle.grid(row=12, column=0, sticky=W, padx=10, pady=0) - idle_buttons = Frame(frame_background, bg=self.bg) - idle_buttons.grid(row=13, column=0, columnspan=3, sticky=NSEW) -diff --git a/Lib/idlelib/idle_test/example_stub.pyi b/Lib/idlelib/idle_test/example_stub.pyi -index 17b58010a9d..abcdbc17529 100644 ---- a/Lib/idlelib/idle_test/example_stub.pyi -+++ b/Lib/idlelib/idle_test/example_stub.pyi -@@ -1,4 +1,4 @@ --" Example to test recognition of .pyi file as Python source code. -+# An example file to test recognition of a .pyi file as Python source code. - - class Example: - def method(self, argument1: str, argument2: list[int]) -> None: ... -diff --git a/Lib/idlelib/pyshell.py b/Lib/idlelib/pyshell.py -index d8b2652d5d7..e882c6cb3b8 100755 ---- a/Lib/idlelib/pyshell.py -+++ b/Lib/idlelib/pyshell.py -@@ -706,7 +706,7 @@ - del _filename, _sys, _dirname, _dir - \n""".format(filename)) - -- def showsyntaxerror(self, filename=None): -+ def showsyntaxerror(self, filename=None, **kwargs): - """Override Interactive Interpreter method: Use Colorizing - - Color the offending position instead of printing it and pointing at it -diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py -index 53e80a9b428..476a7b26c00 100644 ---- a/Lib/idlelib/run.py -+++ b/Lib/idlelib/run.py -@@ -436,6 +436,9 @@ - - def __init__(self, shell, tags, encoding='utf-8', errors='strict'): - self.shell = shell -+ # GH-78889: accessing unpickleable attributes freezes Shell. -+ # IDLE only needs methods; allow 'width' for possible use. -+ self.shell._RPCProxy__attributes = {'width': 1} - self.tags = tags - self._encoding = encoding - self._errors = errors -diff --git a/Lib/idlelib/searchbase.py b/Lib/idlelib/searchbase.py -index 64ed50c7364..c68a6ca339a 100644 ---- a/Lib/idlelib/searchbase.py -+++ b/Lib/idlelib/searchbase.py -@@ -86,7 +86,7 @@ - top.wm_iconname(self.icon) - _setup_dialog(top) - self.top = top -- self.frame = Frame(top, padding="5px") -+ self.frame = Frame(top, padding=5) - self.frame.grid(sticky="nwes") - top.grid_columnconfigure(0, weight=100) - top.grid_rowconfigure(0, weight=100) -diff --git a/Lib/idlelib/tooltip.py b/Lib/idlelib/tooltip.py -index 3983690dd41..df5b1fe1dcf 100644 ---- a/Lib/idlelib/tooltip.py -+++ b/Lib/idlelib/tooltip.py -@@ -144,7 +144,8 @@ - - class Hovertip(OnHoverTooltipBase): - "A tooltip that pops up when a mouse hovers over an anchor widget." -- def __init__(self, anchor_widget, text, hover_delay=1000): -+ def __init__(self, anchor_widget, text, hover_delay=1000, -+ foreground="#000000", background="#ffffe0"): - """Create a text tooltip with a mouse hover delay. - - anchor_widget: the widget next to which the tooltip will be shown -@@ -156,10 +157,13 @@ - """ - super().__init__(anchor_widget, hover_delay=hover_delay) - self.text = text -+ self.foreground = foreground -+ self.background = background - - def showcontents(self): - label = Label(self.tipwindow, text=self.text, justify=LEFT, -- background="#ffffe0", relief=SOLID, borderwidth=1) -+ relief=SOLID, borderwidth=1, -+ foreground=self.foreground, background=self.background) - label.pack() - - diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 61dafc0f4cb..306c3282103 100644 --- a/Lib/importlib/_bootstrap_external.py @@ -15712,86 +1548,11 @@ index d9a19a13f7b..fbd30b159fb 100644 from ._bootstrap_external import NamespaceLoader -diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py -index 54156e93afc..e6ca17821d1 100644 ---- a/Lib/importlib/metadata/__init__.py -+++ b/Lib/importlib/metadata/__init__.py -@@ -534,7 +534,7 @@ - paths = ( - (subdir / name) - .resolve() -- .relative_to(self.locate_file('').resolve()) -+ .relative_to(self.locate_file('').resolve(), walk_up=True) - .as_posix() - for name in text.splitlines() - ) -diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py -index 3743e6aa912..4b836f47120 100644 ---- a/Lib/importlib/util.py -+++ b/Lib/importlib/util.py -@@ -13,7 +13,6 @@ - - import _imp - import sys --import threading - import types - - -@@ -253,6 +252,9 @@ - - def exec_module(self, module): - """Make the module load lazily.""" -+ # Threading is only needed for lazy loading, and importlib.util can -+ # be pulled in at interpreter startup, so defer until needed. -+ import threading - module.__spec__.loader = self.loader - module.__loader__ = self.loader - # Don't need to worry about deep-copying as trying to set an attribute diff --git a/Lib/inspect.py b/Lib/inspect.py -index 497169dacb5..a91f5e2fb95 100644 +index c43faa73159..a91f5e2fb95 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py -@@ -280,7 +280,13 @@ - if globals is None: - globals = obj_globals - if locals is None: -- locals = obj_locals -+ locals = obj_locals or {} -+ -+ # "Inject" type parameters into the local namespace -+ # (unless they are shadowed by assignments *in* the local namespace), -+ # as a way of emulating annotation scopes when calling `eval()` -+ if type_params := getattr(obj, "__type_params__", ()): -+ locals = {param.__name__: param for param in type_params} | locals - - return_value = {key: - value if not isinstance(value, str) else eval(value, globals, locals) -@@ -401,13 +407,13 @@ - return _has_code_flag(obj, CO_GENERATOR) - - # A marker for markcoroutinefunction and iscoroutinefunction. --_is_coroutine_marker = object() -+_is_coroutine_mark = object() - - def _has_coroutine_mark(f): - while ismethod(f): - f = f.__func__ - f = functools._unwrap_partial(f) -- return getattr(f, "_is_coroutine_marker", None) is _is_coroutine_marker -+ return getattr(f, "_is_coroutine_marker", None) is _is_coroutine_mark - - def markcoroutinefunction(func): - """ -@@ -415,7 +421,7 @@ - """ - if hasattr(func, '__func__'): - func = func.__func__ -- func._is_coroutine_marker = _is_coroutine_marker -+ func._is_coroutine_marker = _is_coroutine_mark - return func - - def iscoroutinefunction(obj): -@@ -955,6 +961,10 @@ +@@ -961,6 +961,10 @@ elif any(filename.endswith(s) for s in importlib.machinery.EXTENSION_SUFFIXES): return None @@ -15802,7 +1563,7 @@ index 497169dacb5..a91f5e2fb95 100644 # return a filename found in the linecache even if it doesn't exist on disk if filename in linecache.cache: return filename -@@ -985,6 +995,7 @@ +@@ -991,6 +995,7 @@ return object if hasattr(object, '__module__'): return sys.modules.get(object.__module__) @@ -15810,7 +1571,7 @@ index 497169dacb5..a91f5e2fb95 100644 # Try the filename to modulename cache if _filename is not None and _filename in modulesbyfile: return sys.modules.get(modulesbyfile[_filename]) -@@ -1078,7 +1089,7 @@ +@@ -1084,7 +1089,7 @@ # Allow filenames in form of "" to pass through. # `doctest` monkeypatches `linecache` module to enable # inspection, so let `linecache.getlines` to be called. @@ -15819,163 +1580,6 @@ index 497169dacb5..a91f5e2fb95 100644 raise OSError('source code not available') module = getmodule(object, file) -diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py -index d8f3b5e2e9e..816fbcd2bc4 100644 ---- a/Lib/ipaddress.py -+++ b/Lib/ipaddress.py -@@ -310,7 +310,7 @@ - [IPv4Network('192.0.2.0/24')] - - Args: -- addresses: An iterator of IPv4Network or IPv6Network objects. -+ addresses: An iterable of IPv4Network or IPv6Network objects. - - Returns: - An iterator of the collapsed IPv(4|6)Network objects. -diff --git a/Lib/linecache.py b/Lib/linecache.py -index ed4c9700dcb..248cba93874 100644 ---- a/Lib/linecache.py -+++ b/Lib/linecache.py -@@ -70,7 +70,7 @@ - continue # no-op for files loaded via a __loader__ - try: - stat = os.stat(fullname) -- except OSError: -+ except (OSError, ValueError): - cache.pop(filename, None) - continue - if size != stat.st_size or mtime != stat.st_mtime: -@@ -128,10 +128,12 @@ - try: - stat = os.stat(fullname) - break -- except OSError: -+ except (OSError, ValueError): - pass - else: - return [] -+ except ValueError: # may be raised by os.stat() -+ return [] - try: - with tokenize.open(fullname) as fp: - lines = fp.readlines() -diff --git a/Lib/logging/config.py b/Lib/logging/config.py -index 1824d0aa747..ac90b537d8a 100644 ---- a/Lib/logging/config.py -+++ b/Lib/logging/config.py -@@ -500,6 +500,33 @@ - value = tuple(value) - return value - -+def _is_queue_like_object(obj): -+ """Check that *obj* implements the Queue API.""" -+ if isinstance(obj, queue.Queue): -+ return True -+ # defer importing multiprocessing as much as possible -+ from multiprocessing.queues import Queue as MPQueue -+ if isinstance(obj, MPQueue): -+ return True -+ # Depending on the multiprocessing start context, we cannot create -+ # a multiprocessing.managers.BaseManager instance 'mm' to get the -+ # runtime type of mm.Queue() or mm.JoinableQueue() (see gh-119819). -+ # -+ # Since we only need an object implementing the Queue API, we only -+ # do a protocol check, but we do not use typing.runtime_checkable() -+ # and typing.Protocol to reduce import time (see gh-121723). -+ # -+ # Ideally, we would have wanted to simply use strict type checking -+ # instead of a protocol-based type checking since the latter does -+ # not check the method signatures. -+ queue_interface = [ -+ 'empty', 'full', 'get', 'get_nowait', -+ 'put', 'put_nowait', 'join', 'qsize', -+ 'task_done', -+ ] -+ return all(callable(getattr(obj, method, None)) -+ for method in queue_interface) -+ - class DictConfigurator(BaseConfigurator): - """ - Configure logging using a dictionary-like object to describe the -@@ -787,25 +814,20 @@ - # if 'handlers' not in config: - # raise ValueError('No handlers specified for a QueueHandler') - if 'queue' in config: -- from multiprocessing.queues import Queue as MPQueue -- from multiprocessing import Manager as MM -- proxy_queue = MM().Queue() -- proxy_joinable_queue = MM().JoinableQueue() - qspec = config['queue'] -- if not isinstance(qspec, (queue.Queue, MPQueue, -- type(proxy_queue), type(proxy_joinable_queue))): -- if isinstance(qspec, str): -- q = self.resolve(qspec) -- if not callable(q): -- raise TypeError('Invalid queue specifier %r' % qspec) -- q = q() -- elif isinstance(qspec, dict): -- if '()' not in qspec: -- raise TypeError('Invalid queue specifier %r' % qspec) -- q = self.configure_custom(dict(qspec)) -- else: -+ -+ if isinstance(qspec, str): -+ q = self.resolve(qspec) -+ if not callable(q): - raise TypeError('Invalid queue specifier %r' % qspec) -- config['queue'] = q -+ config['queue'] = q() -+ elif isinstance(qspec, dict): -+ if '()' not in qspec: -+ raise TypeError('Invalid queue specifier %r' % qspec) -+ config['queue'] = self.configure_custom(dict(qspec)) -+ elif not _is_queue_like_object(qspec): -+ raise TypeError('Invalid queue specifier %r' % qspec) -+ - if 'listener' in config: - lspec = config['listener'] - if isinstance(lspec, type): -diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py -index 1ae6bb84434..73757758af2 100644 ---- a/Lib/logging/handlers.py -+++ b/Lib/logging/handlers.py -@@ -187,15 +187,18 @@ - Basically, see if the supplied record would cause the file to exceed - the size limit we have. - """ -- # See bpo-45401: Never rollover anything other than regular files -- if os.path.exists(self.baseFilename) and not os.path.isfile(self.baseFilename): -- return False - if self.stream is None: # delay was set... - self.stream = self._open() - if self.maxBytes > 0: # are we rolling over? -+ pos = self.stream.tell() -+ if not pos: -+ # gh-116263: Never rollover an empty file -+ return False - msg = "%s\n" % self.format(record) -- self.stream.seek(0, 2) #due to non-posix-compliant Windows feature -- if self.stream.tell() + len(msg) >= self.maxBytes: -+ if pos + len(msg) >= self.maxBytes: -+ # See bpo-45401: Never rollover anything other than regular files -+ if os.path.exists(self.baseFilename) and not os.path.isfile(self.baseFilename): -+ return False - return True - return False - -diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py -index 3cc027aa368..10f3ddc5a15 100644 ---- a/Lib/mimetypes.py -+++ b/Lib/mimetypes.py -@@ -551,6 +551,8 @@ - '.csv' : 'text/csv', - '.html' : 'text/html', - '.htm' : 'text/html', -+ '.md' : 'text/markdown', -+ '.markdown': 'text/markdown', - '.n3' : 'text/n3', - '.txt' : 'text/plain', - '.bat' : 'text/plain', diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py index a0a020f9eeb..ac478ee7f51 100644 --- a/Lib/modulefinder.py @@ -15994,279 +1598,6 @@ index a0a020f9eeb..ac478ee7f51 100644 kind = _C_EXTENSION elif isinstance(spec.loader, importlib.machinery.SourcelessFileLoader): -diff --git a/Lib/pdb.py b/Lib/pdb.py -index 225c9f253ef..89cf975164a 100755 ---- a/Lib/pdb.py -+++ b/Lib/pdb.py -@@ -395,7 +395,7 @@ - - # Called before loop, handles display expressions - # Set up convenience variable containers -- def preloop(self): -+ def _show_display(self): - displaying = self.displaying.get(self.curframe) - if displaying: - for expr, oldvalue in displaying.items(): -@@ -419,10 +419,16 @@ - else: - Pdb._previous_sigint_handler = None - self.setup(frame, traceback) -- # if we have more commands to process, do not show the stack entry -- if not self.cmdqueue: -- self.print_stack_entry(self.stack[self.curindex]) -+ # We should print the stack entry if and only if the user input -+ # is expected, and we should print it right before the user input. -+ # We achieve this by appending _pdbcmd_print_frame_status to the -+ # command queue. If cmdqueue is not exausted, the user input is -+ # not expected and we will not print the stack entry. -+ self.cmdqueue.append('_pdbcmd_print_frame_status') - self._cmdloop() -+ # If _pdbcmd_print_frame_status is not used, pop it out -+ if self.cmdqueue and self.cmdqueue[-1] == '_pdbcmd_print_frame_status': -+ self.cmdqueue.pop() - self.forget() - - def displayhook(self, obj): -@@ -524,6 +530,10 @@ - a breakpoint command list definition. - """ - if not self.commands_defining: -+ if line.startswith('_pdbcmd'): -+ command, arg, line = self.parseline(line) -+ if hasattr(self, command): -+ return getattr(self, command)(arg) - return cmd.Cmd.onecmd(self, line) - else: - return self.handle_command_def(line) -@@ -623,6 +633,12 @@ - # Complete a simple name. - return [n for n in ns.keys() if n.startswith(text)] - -+ # Pdb meta commands, only intended to be used internally by pdb -+ -+ def _pdbcmd_print_frame_status(self, arg): -+ self.print_stack_entry(self.stack[self.curindex]) -+ self._show_display() -+ - # Command definitions, called by cmdloop() - # The argument is the remaining string on the command line - # Return true to exit from the command loop -diff --git a/Lib/pickle.py b/Lib/pickle.py -index 6e3c61fd0b2..01c1a102794 100644 ---- a/Lib/pickle.py -+++ b/Lib/pickle.py -@@ -314,16 +314,17 @@ - # Tools used for pickling. - - def _getattribute(obj, name): -+ top = obj - for subpath in name.split('.'): - if subpath == '': - raise AttributeError("Can't get local attribute {!r} on {!r}" -- .format(name, obj)) -+ .format(name, top)) - try: - parent = obj - obj = getattr(obj, subpath) - except AttributeError: - raise AttributeError("Can't get attribute {!r} on {!r}" -- .format(name, obj)) from None -+ .format(name, top)) from None - return obj, parent - - def whichmodule(obj, name): -@@ -396,6 +397,8 @@ - return int.from_bytes(data, byteorder='little', signed=True) - - -+_NoValue = object() -+ - # Pickling machinery - - class _Pickler: -@@ -780,14 +783,10 @@ - self.write(FLOAT + repr(obj).encode("ascii") + b'\n') - dispatch[float] = save_float - -- def save_bytes(self, obj): -- if self.proto < 3: -- if not obj: # bytes object is empty -- self.save_reduce(bytes, (), obj=obj) -- else: -- self.save_reduce(codecs.encode, -- (str(obj, 'latin1'), 'latin1'), obj=obj) -- return -+ def _save_bytes_no_memo(self, obj): -+ # helper for writing bytes objects for protocol >= 3 -+ # without memoizing them -+ assert self.proto >= 3 - n = len(obj) - if n <= 0xff: - self.write(SHORT_BINBYTES + pack("= 5 -+ # without memoizing them -+ assert self.proto >= 5 -+ n = len(obj) -+ if n >= self.framer._FRAME_SIZE_TARGET: -+ self._write_large_bytes(BYTEARRAY8 + pack("= self.framer._FRAME_SIZE_TARGET: -- self._write_large_bytes(BYTEARRAY8 + pack("= 5") - with obj.raw() as m: - if not m.contiguous: -@@ -830,10 +845,18 @@ - if in_band: - # Write data in-band - # XXX The C implementation avoids a copy here -+ buf = m.tobytes() -+ in_memo = id(buf) in self.memo - if m.readonly: -- self.save_bytes(m.tobytes()) -+ if in_memo: -+ self._save_bytes_no_memo(buf) -+ else: -+ self.save_bytes(buf) - else: -- self.save_bytearray(m.tobytes()) -+ if in_memo: -+ self._save_bytearray_no_memo(buf) -+ else: -+ self.save_bytearray(buf) - else: - # Write data out-of-band - self.write(NEXT_BUFFER) -@@ -1070,11 +1093,16 @@ - (obj, module_name, name)) - - if self.proto >= 2: -- code = _extension_registry.get((module_name, name)) -- if code: -- assert code > 0 -+ code = _extension_registry.get((module_name, name), _NoValue) -+ if code is not _NoValue: - if code <= 0xff: -- write(EXT1 + pack("= 3: -- write(GLOBAL + bytes(module_name, "utf-8") + b'\n' + -- bytes(name, "utf-8") + b'\n') -+ elif '.' in name: -+ # In protocol < 4, objects with multi-part __qualname__ -+ # are represented as -+ # getattr(getattr(..., attrname1), attrname2). -+ dotted_path = name.split('.') -+ name = dotted_path.pop(0) -+ save = self.save -+ for attrname in dotted_path: -+ save(getattr) -+ if self.proto < 2: -+ write(MARK) -+ self._save_toplevel_by_name(module_name, name) -+ for attrname in dotted_path: -+ save(attrname) -+ if self.proto < 2: -+ write(TUPLE) -+ else: -+ write(TUPLE2) -+ write(REDUCE) -+ else: -+ self._save_toplevel_by_name(module_name, name) -+ -+ self.memoize(obj) -+ -+ def _save_toplevel_by_name(self, module_name, name): -+ if self.proto >= 3: -+ # Non-ASCII identifiers are supported only with protocols >= 3. -+ self.write(GLOBAL + bytes(module_name, "utf-8") + b'\n' + -+ bytes(name, "utf-8") + b'\n') - else: - if self.fix_imports: - r_name_mapping = _compat_pickle.REVERSE_NAME_MAPPING -@@ -1102,14 +1154,12 @@ - elif module_name in r_import_mapping: - module_name = r_import_mapping[module_name] - try: -- write(GLOBAL + bytes(module_name, "ascii") + b'\n' + -- bytes(name, "ascii") + b'\n') -+ self.write(GLOBAL + bytes(module_name, "ascii") + b'\n' + -+ bytes(name, "ascii") + b'\n') - except UnicodeEncodeError: - raise PicklingError( - "can't pickle global identifier '%s.%s' using " -- "pickle protocol %i" % (module, name, self.proto)) from None -- -- self.memoize(obj) -+ "pickle protocol %i" % (module_name, name, self.proto)) from None - - def save_type(self, obj): - if obj is type(None): -@@ -1546,9 +1596,8 @@ - dispatch[EXT4[0]] = load_ext4 - - def get_extension(self, code): -- nil = [] -- obj = _extension_cache.get(code, nil) -- if obj is not nil: -+ obj = _extension_cache.get(code, _NoValue) -+ if obj is not _NoValue: - self.append(obj) - return - key = _inverted_registry.get(code) diff --git a/Lib/platform.py b/Lib/platform.py index c5b60480369..c99f08899e4 100755 --- a/Lib/platform.py @@ -16433,823 +1764,6 @@ index c5b60480369..c99f08899e4 100755 if system == 'Windows': # MS platforms -diff --git a/Lib/pstats.py b/Lib/pstats.py -index 51bcca84188..f3611777dec 100644 ---- a/Lib/pstats.py -+++ b/Lib/pstats.py -@@ -83,7 +83,7 @@ - method now take arbitrarily many file names as arguments. - - All the print methods now take an argument that indicates how many lines -- to print. If the arg is a floating point number between 0 and 1.0, then -+ to print. If the arg is a floating-point number between 0 and 1.0, then - it is taken as a decimal percentage of the available lines to be printed - (e.g., .1 means print 10% of all available lines). If it is an integer, - it is taken to mean the number of lines of data that you wish to have -diff --git a/Lib/pydoc.py b/Lib/pydoc.py -index 9a8812392af..e3745e5453b 100755 ---- a/Lib/pydoc.py -+++ b/Lib/pydoc.py -@@ -2148,7 +2148,7 @@ - elif request in self.symbols: self.showsymbol(request) - elif request in ['True', 'False', 'None']: - # special case these keywords since they are objects too -- doc(eval(request), 'Help on %s:', is_cli=is_cli) -+ doc(eval(request), 'Help on %s:', output=self._output, is_cli=is_cli) - elif request in self.keywords: self.showtopic(request) - elif request in self.topics: self.showtopic(request) - elif request: doc(request, 'Help on %s:', output=self._output, is_cli=is_cli) -@@ -2241,7 +2241,11 @@ - text = 'Related help topics: ' + ', '.join(xrefs.split()) + '\n' - wrapped_text = textwrap.wrap(text, 72) - doc += '\n%s\n' % '\n'.join(wrapped_text) -- pager(doc) -+ -+ if self._output is None: -+ pager(doc) -+ else: -+ self.output.write(doc) - - def _gettopic(self, topic, more_xrefs=''): - """Return unbuffered tuple of (topic, xrefs). -diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py -index e9e6337cbed..8113889a1f6 100644 ---- a/Lib/pydoc_data/topics.py -+++ b/Lib/pydoc_data/topics.py -@@ -1,5 +1,5 @@ - # -*- coding: utf-8 -*- --# Autogenerated by Sphinx on Thu Jun 6 20:20:21 2024 -+# Autogenerated by Sphinx on Fri Sep 6 21:00:45 2024 - # as part of the release process. - topics = {'assert': 'The "assert" statement\n' - '**********************\n' -@@ -308,10 +308,10 @@ - 'target.\n' - 'The target is only evaluated once.\n' - '\n' -- 'An augmented assignment expression like "x += 1" can be ' -- 'rewritten as\n' -- '"x = x + 1" to achieve a similar, but not exactly equal ' -- 'effect. In the\n' -+ 'An augmented assignment statement like "x += 1" can be ' -+ 'rewritten as "x\n' -+ '= x + 1" to achieve a similar, but not exactly equal effect. ' -+ 'In the\n' - 'augmented version, "x" is only evaluated once. Also, when ' - 'possible,\n' - 'the actual operation is performed *in-place*, meaning that ' -@@ -362,21 +362,26 @@ - 'a single\n' - 'target is allowed.\n' - '\n' -- 'For simple names as assignment targets, if in class or module ' -- 'scope,\n' -- 'the annotations are evaluated and stored in a special class or ' -- 'module\n' -- 'attribute "__annotations__" that is a dictionary mapping from ' -- 'variable\n' -- 'names (mangled if private) to evaluated annotations. This ' -- 'attribute is\n' -- 'writable and is automatically created at the start of class or ' -- 'module\n' -- 'body execution, if annotations are found statically.\n' -- '\n' -- 'For expressions as assignment targets, the annotations are ' -+ 'The assignment target is considered “simple” if it consists of ' -+ 'a\n' -+ 'single name that is not enclosed in parentheses. For simple ' -+ 'assignment\n' -+ 'targets, if in class or module scope, the annotations are ' - 'evaluated\n' -- 'if in class or module scope, but not stored.\n' -+ 'and stored in a special class or module attribute ' -+ '"__annotations__"\n' -+ 'that is a dictionary mapping from variable names (mangled if ' -+ 'private)\n' -+ 'to evaluated annotations. This attribute is writable and is\n' -+ 'automatically created at the start of class or module body ' -+ 'execution,\n' -+ 'if annotations are found statically.\n' -+ '\n' -+ 'If the assignment target is not simple (an attribute, ' -+ 'subscript node,\n' -+ 'or parenthesized name), the annotation is evaluated if in ' -+ 'class or\n' -+ 'module scope, but not stored.\n' - '\n' - 'If a name is annotated in a function scope, then this name is ' - 'local\n' -@@ -555,31 +560,67 @@ - 'evaluate it\n' - 'raises a "NameError" exception.\n' - '\n' -- '**Private name mangling:** When an identifier that ' -- 'textually occurs in\n' -- 'a class definition begins with two or more underscore ' -- 'characters and\n' -- 'does not end in two or more underscores, it is ' -- 'considered a *private\n' -- 'name* of that class. Private names are transformed to a ' -- 'longer form\n' -- 'before code is generated for them. The transformation ' -- 'inserts the\n' -- 'class name, with leading underscores removed and a ' -- 'single underscore\n' -- 'inserted, in front of the name. For example, the ' -- 'identifier "__spam"\n' -- 'occurring in a class named "Ham" will be transformed to ' -- '"_Ham__spam".\n' -- 'This transformation is independent of the syntactical ' -+ '\n' -+ 'Private name mangling\n' -+ '=====================\n' -+ '\n' -+ 'When an identifier that textually occurs in a class ' -+ 'definition begins\n' -+ 'with two or more underscore characters and does not end ' -+ 'in two or more\n' -+ 'underscores, it is considered a *private name* of that ' -+ 'class.\n' -+ '\n' -+ 'See also: The class specifications.\n' -+ '\n' -+ 'More precisely, private names are transformed to a ' -+ 'longer form before\n' -+ 'code is generated for them. If the transformed name is ' -+ 'longer than\n' -+ '255 characters, implementation-defined truncation may ' -+ 'happen.\n' -+ '\n' -+ 'The transformation is independent of the syntactical ' - 'context in which\n' -- 'the identifier is used. If the transformed name is ' -- 'extremely long\n' -- '(longer than 255 characters), implementation defined ' -- 'truncation may\n' -- 'happen. If the class name consists only of underscores, ' -- 'no\n' -- 'transformation is done.\n', -+ 'the identifier is used but only the following private ' -+ 'identifiers are\n' -+ 'mangled:\n' -+ '\n' -+ '* Any name used as the name of a variable that is ' -+ 'assigned or read or\n' -+ ' any name of an attribute being accessed.\n' -+ '\n' -+ ' The "__name__" attribute of nested functions, classes, ' -+ 'and type\n' -+ ' aliases is however not mangled.\n' -+ '\n' -+ '* The name of imported modules, e.g., "__spam" in ' -+ '"import __spam". If\n' -+ ' the module is part of a package (i.e., its name ' -+ 'contains a dot), the\n' -+ ' name is *not* mangled, e.g., the "__foo" in "import ' -+ '__foo.bar" is\n' -+ ' not mangled.\n' -+ '\n' -+ '* The name of an imported member, e.g., "__f" in "from ' -+ 'spam import\n' -+ ' __f".\n' -+ '\n' -+ 'The transformation rule is defined as follows:\n' -+ '\n' -+ '* The class name, with leading underscores removed and a ' -+ 'single\n' -+ ' leading underscore inserted, is inserted in front of ' -+ 'the identifier,\n' -+ ' e.g., the identifier "__spam" occurring in a class ' -+ 'named "Foo",\n' -+ ' "_Foo" or "__Foo" is transformed to "_Foo__spam".\n' -+ '\n' -+ '* If the class name consists only of underscores, the ' -+ 'transformation\n' -+ ' is the identity, e.g., the identifier "__spam" ' -+ 'occurring in a class\n' -+ ' named "_" or "__" is left as is.\n', - 'atom-literals': 'Literals\n' - '********\n' - '\n' -@@ -592,10 +633,10 @@ - '\n' - 'Evaluation of a literal yields an object of the given type ' - '(string,\n' -- 'bytes, integer, floating point number, complex number) with ' -+ 'bytes, integer, floating-point number, complex number) with ' - 'the given\n' - 'value. The value may be approximated in the case of ' -- 'floating point\n' -+ 'floating-point\n' - 'and imaginary (complex) literals. See section Literals for ' - 'details.\n' - '\n' -@@ -1163,10 +1204,10 @@ - 'target.\n' - 'The target is only evaluated once.\n' - '\n' -- 'An augmented assignment expression like "x += 1" can be ' -- 'rewritten as\n' -- '"x = x + 1" to achieve a similar, but not exactly equal effect. ' -- 'In the\n' -+ 'An augmented assignment statement like "x += 1" can be ' -+ 'rewritten as "x\n' -+ '= x + 1" to achieve a similar, but not exactly equal effect. In ' -+ 'the\n' - 'augmented version, "x" is only evaluated once. Also, when ' - 'possible,\n' - 'the actual operation is performed *in-place*, meaning that ' -@@ -1239,6 +1280,10 @@ - 'The "@" (at) operator is intended to be used for matrix\n' - 'multiplication. No builtin Python types implement this operator.\n' - '\n' -+ 'This operation can be customized using the special "__matmul__()" ' -+ 'and\n' -+ '"__rmatmul__()" methods.\n' -+ '\n' - 'Added in version 3.5.\n' - '\n' - 'The "/" (division) and "//" (floor division) operators yield the\n' -@@ -1251,17 +1296,19 @@ - 'result. Division by zero raises the "ZeroDivisionError" ' - 'exception.\n' - '\n' -- 'This operation can be customized using the special "__truediv__()" ' -+ 'The division operation can be customized using the special\n' -+ '"__truediv__()" and "__rtruediv__()" methods. The floor division\n' -+ 'operation can be customized using the special "__floordiv__()" ' - 'and\n' -- '"__floordiv__()" methods.\n' -+ '"__rfloordiv__()" methods.\n' - '\n' - 'The "%" (modulo) operator yields the remainder from the division ' - 'of\n' - 'the first argument by the second. The numeric arguments are ' - 'first\n' - 'converted to a common type. A zero right argument raises the\n' -- '"ZeroDivisionError" exception. The arguments may be floating ' -- 'point\n' -+ '"ZeroDivisionError" exception. The arguments may be ' -+ 'floating-point\n' - 'numbers, e.g., "3.14%0.7" equals "0.34" (since "3.14" equals ' - '"4*0.7 +\n' - '0.34".) The modulo operator always yields a result with the same ' -@@ -1288,13 +1335,13 @@ - '\n' - 'The *modulo* operation can be customized using the special ' - '"__mod__()"\n' -- 'method.\n' -+ 'and "__rmod__()" methods.\n' - '\n' - 'The floor division operator, the modulo operator, and the ' - '"divmod()"\n' - 'function are not defined for complex numbers. Instead, convert to ' - 'a\n' -- 'floating point number using the "abs()" function if appropriate.\n' -+ 'floating-point number using the "abs()" function if appropriate.\n' - '\n' - 'The "+" (addition) operator yields the sum of its arguments. The\n' - 'arguments must either both be numbers or both be sequences of the ' -@@ -1313,7 +1360,8 @@ - 'The numeric arguments are first converted to a common type.\n' - '\n' - 'This operation can be customized using the special "__sub__()" ' -- 'method.\n', -+ 'and\n' -+ '"__rsub__()" methods.\n', - 'bitwise': 'Binary bitwise operations\n' - '*************************\n' - '\n' -@@ -2388,18 +2436,16 @@ - 'An\n' - 'expression-less "except" clause, if present, must be last; it ' - 'matches\n' -- 'any exception. For an "except" clause with an expression, that\n' -- 'expression is evaluated, and the clause matches the exception if ' -- 'the\n' -- 'resulting object is “compatible” with the exception. An object ' -- 'is\n' -- 'compatible with an exception if the object is the class or a ' -- '*non-\n' -- 'virtual base class* of the exception object, or a tuple ' -- 'containing an\n' -- 'item that is the class or a non-virtual base class of the ' -- 'exception\n' -- 'object.\n' -+ 'any exception.\n' -+ '\n' -+ 'For an "except" clause with an expression, the expression must\n' -+ 'evaluate to an exception type or a tuple of exception types. ' -+ 'The\n' -+ 'raised exception matches an "except" clause whose expression ' -+ 'evaluates\n' -+ 'to the class or a *non-virtual base class* of the exception ' -+ 'object, or\n' -+ 'to a tuple that contains such a class.\n' - '\n' - 'If no "except" clause matches the exception, the search for an\n' - 'exception handler continues in the surrounding code and on the\n' -@@ -2548,13 +2594,16 @@ - ' ...\n' - " ExceptionGroup('', (BlockingIOError()))\n" - '\n' -- 'An "except*" clause must have a matching type, and this type ' -- 'cannot be\n' -- 'a subclass of "BaseExceptionGroup". It is not possible to mix ' -- '"except"\n' -- 'and "except*" in the same "try". "break", "continue" and ' -- '"return"\n' -- 'cannot appear in an "except*" clause.\n' -+ 'An "except*" clause must have a matching expression; it cannot ' -+ 'be\n' -+ '"except*:". Furthermore, this expression cannot contain ' -+ 'exception\n' -+ 'group types, because that would have ambiguous semantics.\n' -+ '\n' -+ 'It is not possible to mix "except" and "except*" in the same ' -+ '"try".\n' -+ '"break", "continue" and "return" cannot appear in an "except*" ' -+ 'clause.\n' - '\n' - '\n' - '"else" clause\n' -@@ -3052,7 +3101,7 @@ - ' | "None"\n' - ' | "True"\n' - ' | "False"\n' -- ' | signed_number: NUMBER | "-" NUMBER\n' -+ ' signed_number ::= ["-"] NUMBER\n' - '\n' - 'The rule "strings" and the token "NUMBER" are defined in the ' - 'standard\n' -@@ -4400,7 +4449,7 @@ - 'converted to\n' - ' complex;\n' - '\n' -- '* otherwise, if either argument is a floating point number, ' -+ '* otherwise, if either argument is a floating-point number, ' - 'the other\n' - ' is converted to floating point;\n' - '\n' -@@ -4511,6 +4560,10 @@ - ' It is not guaranteed that "__del__()" methods are called ' - 'for\n' - ' objects that still exist when the interpreter exits.\n' -+ ' "weakref.finalize" provides a straightforward way to ' -+ 'register a\n' -+ ' cleanup function to be called when an object is garbage ' -+ 'collected.\n' - '\n' - ' Note:\n' - '\n' -@@ -6413,10 +6466,10 @@ - 'that expression. (To create an empty tuple, use an empty pair ' - 'of\n' - 'parentheses: "()".)\n', -- 'floating': 'Floating point literals\n' -+ 'floating': 'Floating-point literals\n' - '***********************\n' - '\n' -- 'Floating point literals are described by the following lexical\n' -+ 'Floating-point literals are described by the following lexical\n' - 'definitions:\n' - '\n' - ' floatnumber ::= pointfloat | exponentfloat\n' -@@ -6430,12 +6483,12 @@ - 'using\n' - 'radix 10. For example, "077e010" is legal, and denotes the same ' - 'number\n' -- 'as "77e10". The allowed range of floating point literals is\n' -+ 'as "77e10". The allowed range of floating-point literals is\n' - 'implementation-dependent. As in integer literals, underscores ' - 'are\n' - 'supported for digit grouping.\n' - '\n' -- 'Some examples of floating point literals:\n' -+ 'Some examples of floating-point literals:\n' - '\n' - ' 3.14 10. .001 1e100 3.14e-10 0e0 ' - '3.14_15_93\n' -@@ -6818,7 +6871,7 @@ - '\n' - 'The "\'_\'" option signals the use of an underscore for a ' - 'thousands\n' -- 'separator for floating point presentation types and for ' -+ 'separator for floating-point presentation types and for ' - 'integer\n' - 'presentation type "\'d\'". For integer presentation types ' - '"\'b\'", "\'o\'",\n' -@@ -6945,11 +6998,11 @@ - '\n' - 'In addition to the above presentation types, integers can ' - 'be formatted\n' -- 'with the floating point presentation types listed below ' -+ 'with the floating-point presentation types listed below ' - '(except "\'n\'"\n' - 'and "None"). When doing so, "float()" is used to convert ' - 'the integer\n' -- 'to a floating point number before formatting.\n' -+ 'to a floating-point number before formatting.\n' - '\n' - 'The available presentation types for "float" and "Decimal" ' - 'values are:\n' -@@ -7807,11 +7860,11 @@ - '\n' - 'An imaginary literal yields a complex number with a real part ' - 'of 0.0.\n' -- 'Complex numbers are represented as a pair of floating point ' -+ 'Complex numbers are represented as a pair of floating-point ' - 'numbers\n' - 'and have the same restrictions on their range. To create a ' - 'complex\n' -- 'number with a nonzero real part, add a floating point number to ' -+ 'number with a nonzero real part, add a floating-point number to ' - 'it,\n' - 'e.g., "(3+4j)". Some examples of imaginary literals:\n' - '\n' -@@ -8605,8 +8658,8 @@ - 'numbers': 'Numeric literals\n' - '****************\n' - '\n' -- 'There are three types of numeric literals: integers, floating ' -- 'point\n' -+ 'There are three types of numeric literals: integers, ' -+ 'floating-point\n' - 'numbers, and imaginary numbers. There are no complex literals\n' - '(complex numbers can be formed by adding a real number and an\n' - 'imaginary number).\n' -@@ -8938,16 +8991,22 @@ - 'types, operations that compute new values may actually return a\n' - 'reference to any existing object with the same type and value, ' - 'while\n' -- 'for mutable objects this is not allowed. E.g., after "a = 1; b = ' -- '1",\n' -- '"a" and "b" may or may not refer to the same object with the ' -- 'value\n' -- 'one, depending on the implementation, but after "c = []; d = []", ' -- '"c"\n' -- 'and "d" are guaranteed to refer to two different, unique, newly\n' -- 'created empty lists. (Note that "c = d = []" assigns the same ' -- 'object\n' -- 'to both "c" and "d".)\n', -+ 'for mutable objects this is not allowed. For example, after "a = ' -+ '1; b\n' -+ '= 1", *a* and *b* may or may not refer to the same object with ' -+ 'the\n' -+ 'value one, depending on the implementation. This is because "int" ' -+ 'is\n' -+ 'an immutable type, so the reference to "1" can be reused. This\n' -+ 'behaviour depends on the implementation used, so should not be ' -+ 'relied\n' -+ 'upon, but is something to be aware of when making use of object\n' -+ 'identity tests. However, after "c = []; d = []", *c* and *d* are\n' -+ 'guaranteed to refer to two different, unique, newly created ' -+ 'empty\n' -+ 'lists. (Note that "e = f = []" assigns the *same* object to both ' -+ '*e*\n' -+ 'and *f*.)\n', - 'operator-summary': 'Operator precedence\n' - '*******************\n' - '\n' -@@ -9176,8 +9235,8 @@ - '"complex"\n' - 'number. (In earlier versions it raised a "ValueError".)\n' - '\n' -- 'This operation can be customized using the special "__pow__()" ' -- 'method.\n', -+ 'This operation can be customized using the special "__pow__()" and\n' -+ '"__rpow__()" methods.\n', - 'raise': 'The "raise" statement\n' - '*********************\n' - '\n' -@@ -9591,9 +9650,12 @@ - 'the\n' - 'second argument.\n' - '\n' -- 'This operation can be customized using the special ' -- '"__lshift__()" and\n' -- '"__rshift__()" methods.\n' -+ 'The left shift operation can be customized using the special\n' -+ '"__lshift__()" and "__rlshift__()" methods. The right shift ' -+ 'operation\n' -+ 'can be customized using the special "__rshift__()" and ' -+ '"__rrshift__()"\n' -+ 'methods.\n' - '\n' - 'A right shift by *n* bits is defined as floor division by ' - '"pow(2,n)".\n' -@@ -9863,6 +9925,10 @@ - ' It is not guaranteed that "__del__()" methods are called ' - 'for\n' - ' objects that still exist when the interpreter exits.\n' -+ ' "weakref.finalize" provides a straightforward way to ' -+ 'register a\n' -+ ' cleanup function to be called when an object is garbage ' -+ 'collected.\n' - '\n' - ' Note:\n' - '\n' -@@ -12657,11 +12723,11 @@ - ' and are deemed to delimit empty strings (for example,\n' - ' "\'1,,2\'.split(\',\')" returns "[\'1\', \'\', ' - '\'2\']"). The *sep* argument\n' -- ' may consist of multiple characters (for example,\n' -- ' "\'1<>2<>3\'.split(\'<>\')" returns "[\'1\', \'2\', ' -- '\'3\']"). Splitting an\n' -- ' empty string with a specified separator returns ' -- '"[\'\']".\n' -+ ' may consist of multiple characters as a single ' -+ 'delimiter (to split\n' -+ ' with multiple delimiters, use "re.split()"). Splitting ' -+ 'an empty\n' -+ ' string with a specified separator returns "[\'\']".\n' - '\n' - ' For example:\n' - '\n' -@@ -12671,6 +12737,8 @@ - " ['1', '2,3']\n" - " >>> '1,2,,3,'.split(',')\n" - " ['1', '2', '', '3', '']\n" -+ " >>> '1<>2<>3<4'.split('<>')\n" -+ " ['1', '2', '3<4']\n" - '\n' - ' If *sep* is not specified or is "None", a different ' - 'splitting\n' -@@ -13013,15 +13081,13 @@ - 'greater must be expressed with escapes.\n' - '\n' - 'Both string and bytes literals may optionally be prefixed with a\n' -- 'letter "\'r\'" or "\'R\'"; such strings are called *raw strings* ' -- 'and treat\n' -- 'backslashes as literal characters. As a result, in string ' -- 'literals,\n' -- '"\'\\U\'" and "\'\\u\'" escapes in raw strings are not treated ' -- 'specially.\n' -- 'Given that Python 2.x’s raw unicode literals behave differently ' -- 'than\n' -- 'Python 3.x’s the "\'ur\'" syntax is not supported.\n' -+ 'letter "\'r\'" or "\'R\'"; such constructs are called *raw ' -+ 'string\n' -+ 'literals* and *raw bytes literals* respectively and treat ' -+ 'backslashes\n' -+ 'as literal characters. As a result, in raw string literals, ' -+ '"\'\\U\'"\n' -+ 'and "\'\\u\'" escapes are not treated specially.\n' - '\n' - 'Added in version 3.3: The "\'rb\'" prefix of raw bytes literals ' - 'has been\n' -@@ -13351,14 +13417,15 @@ - 'clauses in turn until one is found that matches the exception. An\n' - 'expression-less "except" clause, if present, must be last; it ' - 'matches\n' -- 'any exception. For an "except" clause with an expression, that\n' -- 'expression is evaluated, and the clause matches the exception if the\n' -- 'resulting object is “compatible” with the exception. An object is\n' -- 'compatible with an exception if the object is the class or a *non-\n' -- 'virtual base class* of the exception object, or a tuple containing ' -- 'an\n' -- 'item that is the class or a non-virtual base class of the exception\n' -- 'object.\n' -+ 'any exception.\n' -+ '\n' -+ 'For an "except" clause with an expression, the expression must\n' -+ 'evaluate to an exception type or a tuple of exception types. The\n' -+ 'raised exception matches an "except" clause whose expression ' -+ 'evaluates\n' -+ 'to the class or a *non-virtual base class* of the exception object, ' -+ 'or\n' -+ 'to a tuple that contains such a class.\n' - '\n' - 'If no "except" clause matches the exception, the search for an\n' - 'exception handler continues in the surrounding code and on the\n' -@@ -13487,12 +13554,13 @@ - ' ...\n' - " ExceptionGroup('', (BlockingIOError()))\n" - '\n' -- 'An "except*" clause must have a matching type, and this type cannot ' -- 'be\n' -- 'a subclass of "BaseExceptionGroup". It is not possible to mix ' -- '"except"\n' -- 'and "except*" in the same "try". "break", "continue" and "return"\n' -- 'cannot appear in an "except*" clause.\n' -+ 'An "except*" clause must have a matching expression; it cannot be\n' -+ '"except*:". Furthermore, this expression cannot contain exception\n' -+ 'group types, because that would have ambiguous semantics.\n' -+ '\n' -+ 'It is not possible to mix "except" and "except*" in the same "try".\n' -+ '"break", "continue" and "return" cannot appear in an "except*" ' -+ 'clause.\n' - '\n' - '\n' - '"else" clause\n' -@@ -13653,7 +13721,7 @@ - '\n' - '* A sign is shown only when the number is negative.\n' - '\n' -- 'Python distinguishes between integers, floating point numbers, and\n' -+ 'Python distinguishes between integers, floating-point numbers, and\n' - 'complex numbers:\n' - '\n' - '\n' -@@ -13698,28 +13766,28 @@ - '"numbers.Real" ("float")\n' - '------------------------\n' - '\n' -- 'These represent machine-level double precision floating point ' -+ 'These represent machine-level double precision floating-point ' - 'numbers.\n' - 'You are at the mercy of the underlying machine architecture (and C ' - 'or\n' - 'Java implementation) for the accepted range and handling of ' - 'overflow.\n' -- 'Python does not support single-precision floating point numbers; ' -+ 'Python does not support single-precision floating-point numbers; ' - 'the\n' - 'savings in processor and memory usage that are usually the reason ' - 'for\n' - 'using these are dwarfed by the overhead of using objects in Python, ' - 'so\n' - 'there is no reason to complicate the language with two kinds of\n' -- 'floating point numbers.\n' -+ 'floating-point numbers.\n' - '\n' - '\n' - '"numbers.Complex" ("complex")\n' - '-----------------------------\n' - '\n' - 'These represent complex numbers as a pair of machine-level double\n' -- 'precision floating point numbers. The same caveats apply as for\n' -- 'floating point numbers. The real and imaginary parts of a complex\n' -+ 'precision floating-point numbers. The same caveats apply as for\n' -+ 'floating-point numbers. The real and imaginary parts of a complex\n' - 'number "z" can be retrieved through the read-only attributes ' - '"z.real"\n' - 'and "z.imag".\n' -@@ -13913,8 +13981,7 @@ - 'however removing a key and re-inserting it will add it to the end\n' - 'instead of keeping its old place.\n' - '\n' -- 'Dictionaries are mutable; they can be created by the "{...}" ' -- 'notation\n' -+ 'Dictionaries are mutable; they can be created by the "{}" notation\n' - '(see section Dictionary displays).\n' - '\n' - 'The extension modules "dbm.ndbm" and "dbm.gnu" provide additional\n' -@@ -14134,21 +14201,10 @@ - 'to\n' - 'calling "f(C,1)" where "f" is the underlying function.\n' - '\n' -- 'Note that the transformation from function object to instance ' -- 'method\n' -- 'object happens each time the attribute is retrieved from the ' -- 'instance.\n' -- 'In some cases, a fruitful optimization is to assign the attribute ' -- 'to a\n' -- 'local variable and call that local variable. Also notice that this\n' -- 'transformation only happens for user-defined functions; other ' -- 'callable\n' -- 'objects (and all non-callable objects) are retrieved without\n' -- 'transformation. It is also important to note that user-defined\n' -- 'functions which are attributes of a class instance are not ' -- 'converted\n' -- 'to bound methods; this *only* happens when the function is an\n' -- 'attribute of the class.\n' -+ 'It is important to note that user-defined functions which are\n' -+ 'attributes of a class instance are not converted to bound methods;\n' -+ 'this *only* happens when the function is an attribute of the ' -+ 'class.\n' - '\n' - '\n' - 'Generator functions\n' -@@ -15155,7 +15211,7 @@ - '\n' - ' Return a shallow copy of the dictionary.\n' - '\n' -- ' classmethod fromkeys(iterable, value=None)\n' -+ ' classmethod fromkeys(iterable, value=None, /)\n' - '\n' - ' Create a new dictionary with keys from *iterable* and ' - 'values set\n' -@@ -15910,8 +15966,8 @@ - '| | also removes it from ' - '*s* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' -- '| "s.remove(x)" | remove the first item from ' -- '*s* | (3) |\n' -+ '| "s.remove(x)" | removes the first item from ' -+ '*s* | (3) |\n' - '| | where "s[i]" is equal to ' - '*x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' -@@ -15923,7 +15979,9 @@ - '\n' - 'Notes:\n' - '\n' -- '1. *t* must have the same length as the slice it is replacing.\n' -+ '1. If *k* is not equal to "1", *t* must have the same length as ' -+ 'the\n' -+ ' slice it is replacing.\n' - '\n' - '2. The optional argument *i* defaults to "-1", so that by ' - 'default the\n' -@@ -16280,7 +16338,7 @@ - '\n' - ' * The linspace recipe shows how to implement a lazy version of ' - 'range\n' -- ' suitable for floating point applications.\n', -+ ' suitable for floating-point applications.\n', - 'typesseq-mutable': 'Mutable Sequence Types\n' - '**********************\n' - '\n' -@@ -16373,8 +16431,8 @@ - '| | also removes it from ' - '*s* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' -- '| "s.remove(x)" | remove the first item ' -- 'from *s* | (3) |\n' -+ '| "s.remove(x)" | removes the first ' -+ 'item from *s* | (3) |\n' - '| | where "s[i]" is equal ' - 'to *x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' -@@ -16387,8 +16445,9 @@ - '\n' - 'Notes:\n' - '\n' -- '1. *t* must have the same length as the slice it is ' -- 'replacing.\n' -+ '1. If *k* is not equal to "1", *t* must have the same ' -+ 'length as the\n' -+ ' slice it is replacing.\n' - '\n' - '2. The optional argument *i* defaults to "-1", so that ' - 'by default the\n' -diff --git a/Lib/re/_casefix.py b/Lib/re/_casefix.py -index 06507d08bee..fed2d84fc01 100644 ---- a/Lib/re/_casefix.py -+++ b/Lib/re/_casefix.py -@@ -1,4 +1,4 @@ --# Auto-generated by Tools/scripts/generate_re_casefix.py. -+# Auto-generated by Tools/build/generate_re_casefix.py. - - # Maps the code of lowercased character to codes of different lowercased - # characters which have the same uppercase. -diff --git a/Lib/runpy.py b/Lib/runpy.py -index 42f896c9cd5..ef54d3282ee 100644 ---- a/Lib/runpy.py -+++ b/Lib/runpy.py -@@ -247,17 +247,17 @@ - sys.modules[main_name] = saved_main - - --def _get_code_from_file(run_name, fname): -+def _get_code_from_file(fname): - # Check for a compiled file first - from pkgutil import read_code -- decoded_path = os.path.abspath(os.fsdecode(fname)) -- with io.open_code(decoded_path) as f: -+ code_path = os.path.abspath(fname) -+ with io.open_code(code_path) as f: - code = read_code(f) - if code is None: - # That didn't work, so try it as normal source code -- with io.open_code(decoded_path) as f: -+ with io.open_code(code_path) as f: - code = compile(f.read(), fname, 'exec') -- return code, fname -+ return code - - def run_path(path_name, init_globals=None, run_name=None): - """Execute code located at the specified filesystem location. -@@ -279,12 +279,13 @@ - pkg_name = run_name.rpartition(".")[0] - from pkgutil import get_importer - importer = get_importer(path_name) -+ path_name = os.fsdecode(path_name) - if isinstance(importer, type(None)): - # Not a valid sys.path entry, so run the code directly - # execfile() doesn't help as we want to allow compiled files -- code, fname = _get_code_from_file(run_name, path_name) -+ code = _get_code_from_file(path_name) - return _run_module_code(code, init_globals, run_name, -- pkg_name=pkg_name, script_name=fname) -+ pkg_name=pkg_name, script_name=path_name) - else: - # Finder is defined for path, so add it to - # the start of sys.path -diff --git a/Lib/sched.py b/Lib/sched.py -index 14613cf2987..fb20639d459 100644 ---- a/Lib/sched.py -+++ b/Lib/sched.py -@@ -11,7 +11,7 @@ - implement simulated time by writing your own functions. This can - also be used to integrate scheduling with STDWIN events; the delay - function is allowed to modify the queue. Time can be expressed as --integers or floating point numbers, as long as it is consistent. -+integers or floating-point numbers, as long as it is consistent. - - Events are specified by tuples (time, priority, action, argument, kwargs). - As in UNIX, lower priority numbers mean higher priority; in this diff --git a/Lib/site.py b/Lib/site.py index 924cfbecec2..763bab27430 100644 --- a/Lib/site.py @@ -17265,170 +1779,6 @@ index 924cfbecec2..763bab27430 100644 return None def joinuser(*args): -diff --git a/Lib/socket.py b/Lib/socket.py -index d796082e054..c1880c4ea51 100644 ---- a/Lib/socket.py -+++ b/Lib/socket.py -@@ -592,16 +592,65 @@ - return socket(0, 0, 0, info) - __all__.append("fromshare") - --if hasattr(_socket, "socketpair"): -+# Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain. -+# This is used if _socket doesn't natively provide socketpair. It's -+# always defined so that it can be patched in for testing purposes. -+def _fallback_socketpair(family=AF_INET, type=SOCK_STREAM, proto=0): -+ if family == AF_INET: -+ host = _LOCALHOST -+ elif family == AF_INET6: -+ host = _LOCALHOST_V6 -+ else: -+ raise ValueError("Only AF_INET and AF_INET6 socket address families " -+ "are supported") -+ if type != SOCK_STREAM: -+ raise ValueError("Only SOCK_STREAM socket type is supported") -+ if proto != 0: -+ raise ValueError("Only protocol zero is supported") -+ -+ # We create a connected TCP socket. Note the trick with -+ # setblocking(False) that prevents us from having to create a thread. -+ lsock = socket(family, type, proto) -+ try: -+ lsock.bind((host, 0)) -+ lsock.listen() -+ # On IPv6, ignore flow_info and scope_id -+ addr, port = lsock.getsockname()[:2] -+ csock = socket(family, type, proto) -+ try: -+ csock.setblocking(False) -+ try: -+ csock.connect((addr, port)) -+ except (BlockingIOError, InterruptedError): -+ pass -+ csock.setblocking(True) -+ ssock, _ = lsock.accept() -+ except: -+ csock.close() -+ raise -+ finally: -+ lsock.close() - -- def socketpair(family=None, type=SOCK_STREAM, proto=0): -- """socketpair([family[, type[, proto]]]) -> (socket object, socket object) -+ # Authenticating avoids using a connection from something else -+ # able to connect to {host}:{port} instead of us. -+ # We expect only AF_INET and AF_INET6 families. -+ try: -+ if ( -+ ssock.getsockname() != csock.getpeername() -+ or csock.getsockname() != ssock.getpeername() -+ ): -+ raise ConnectionError("Unexpected peer connection") -+ except: -+ # getsockname() and getpeername() can fail -+ # if either socket isn't connected. -+ ssock.close() -+ csock.close() -+ raise - -- Create a pair of socket objects from the sockets returned by the platform -- socketpair() function. -- The arguments are the same as for socket() except the default family is -- AF_UNIX if defined on the platform; otherwise, the default is AF_INET. -- """ -+ return (ssock, csock) -+ -+if hasattr(_socket, "socketpair"): -+ def socketpair(family=None, type=SOCK_STREAM, proto=0): - if family is None: - try: - family = AF_UNIX -@@ -613,44 +662,7 @@ - return a, b - - else: -- -- # Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain. -- def socketpair(family=AF_INET, type=SOCK_STREAM, proto=0): -- if family == AF_INET: -- host = _LOCALHOST -- elif family == AF_INET6: -- host = _LOCALHOST_V6 -- else: -- raise ValueError("Only AF_INET and AF_INET6 socket address families " -- "are supported") -- if type != SOCK_STREAM: -- raise ValueError("Only SOCK_STREAM socket type is supported") -- if proto != 0: -- raise ValueError("Only protocol zero is supported") -- -- # We create a connected TCP socket. Note the trick with -- # setblocking(False) that prevents us from having to create a thread. -- lsock = socket(family, type, proto) -- try: -- lsock.bind((host, 0)) -- lsock.listen() -- # On IPv6, ignore flow_info and scope_id -- addr, port = lsock.getsockname()[:2] -- csock = socket(family, type, proto) -- try: -- csock.setblocking(False) -- try: -- csock.connect((addr, port)) -- except (BlockingIOError, InterruptedError): -- pass -- csock.setblocking(True) -- ssock, _ = lsock.accept() -- except: -- csock.close() -- raise -- finally: -- lsock.close() -- return (ssock, csock) -+ socketpair = _fallback_socketpair - __all__.append("socketpair") - - socketpair.__doc__ = """socketpair([family[, type[, proto]]]) -> (socket object, socket object) -diff --git a/Lib/ssl.py b/Lib/ssl.py -index 983c2db6361..42ebb8ed384 100644 ---- a/Lib/ssl.py -+++ b/Lib/ssl.py -@@ -513,18 +513,17 @@ - self._set_alpn_protocols(protos) - - def _load_windows_store_certs(self, storename, purpose): -- certs = bytearray() - try: - for cert, encoding, trust in enum_certificates(storename): - # CA certs are never PKCS#7 encoded - if encoding == "x509_asn": - if trust is True or purpose.oid in trust: -- certs.extend(cert) -+ try: -+ self.load_verify_locations(cadata=cert) -+ except SSLError as exc: -+ warnings.warn(f"Bad certificate in Windows certificate store: {exc!s}") - except PermissionError: - warnings.warn("unable to enumerate Windows certificate store") -- if certs: -- self.load_verify_locations(cadata=certs) -- return certs - - def load_default_certs(self, purpose=Purpose.SERVER_AUTH): - if not isinstance(purpose, _ASN1Object): -diff --git a/Lib/statistics.py b/Lib/statistics.py -index 6bd214bbfe2..db108b3e2c8 100644 ---- a/Lib/statistics.py -+++ b/Lib/statistics.py -@@ -11,7 +11,7 @@ - Function Description - ================== ================================================== - mean Arithmetic mean (average) of data. --fmean Fast, floating point arithmetic mean. -+fmean Fast, floating-point arithmetic mean. - geometric_mean Geometric mean of data. - harmonic_mean Harmonic mean of data. - median Median (middle value) of data. diff --git a/Lib/subprocess.py b/Lib/subprocess.py index 1d17ae3608a..34dfa0019a5 100644 --- a/Lib/subprocess.py @@ -17520,49 +1870,6 @@ index 1d17ae3608a..34dfa0019a5 100644 # This happens if SIGCLD is set to be ignored or # waiting for child processes has otherwise been # disabled for our process. This child is dead, we -diff --git a/Lib/symtable.py b/Lib/symtable.py -index 4b0bc6f497a..f95639bee3a 100644 ---- a/Lib/symtable.py -+++ b/Lib/symtable.py -@@ -217,8 +217,37 @@ - """ - if self.__methods is None: - d = {} -+ -+ def is_local_symbol(ident): -+ flags = self._table.symbols.get(ident, 0) -+ return ((flags >> SCOPE_OFF) & SCOPE_MASK) == LOCAL -+ - for st in self._table.children: -- d[st.name] = 1 -+ # pick the function-like symbols that are local identifiers -+ if is_local_symbol(st.name): -+ match st.type: -+ case _symtable.TYPE_FUNCTION: -+ # generators are of type TYPE_FUNCTION with a ".0" -+ # parameter as a first parameter (which makes them -+ # distinguishable from a function named 'genexpr') -+ if st.name == 'genexpr' and '.0' in st.varnames: -+ continue -+ d[st.name] = 1 -+ case _symtable.TYPE_TYPE_PARAM: -+ # Get the function-def block in the annotation -+ # scope 'st' with the same identifier, if any. -+ scope_name = st.name -+ for c in st.children: -+ if c.name == scope_name and c.type == _symtable.TYPE_FUNCTION: -+ # A generic generator of type TYPE_FUNCTION -+ # cannot be a direct child of 'st' (but it -+ # can be a descendant), e.g.: -+ # -+ # class A: -+ # type genexpr[genexpr] = (x for x in []) -+ assert scope_name != 'genexpr' or '.0' not in c.varnames -+ d[scope_name] = 1 -+ break - self.__methods = tuple(d) - return self.__methods - diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py index 122d441bd19..6a7d3b60610 100644 --- a/Lib/sysconfig.py @@ -17630,1624 +1937,6 @@ index 122d441bd19..6a7d3b60610 100644 return f"{osname}-{release}-{machine}" -diff --git a/Lib/tabnanny.py b/Lib/tabnanny.py -index e2ac6837f15..d06c4c221e9 100755 ---- a/Lib/tabnanny.py -+++ b/Lib/tabnanny.py -@@ -107,14 +107,14 @@ - errprint("%r: Token Error: %s" % (file, msg)) - return - -- except SyntaxError as msg: -- errprint("%r: Token Error: %s" % (file, msg)) -- return -- - except IndentationError as msg: - errprint("%r: Indentation Error: %s" % (file, msg)) - return - -+ except SyntaxError as msg: -+ errprint("%r: Syntax Error: %s" % (file, msg)) -+ return -+ - except NannyNag as nag: - badline = nag.get_lineno() - line = nag.get_line() -diff --git a/Lib/tarfile.py b/Lib/tarfile.py -index e1487e3864d..0a0f31eca06 100755 ---- a/Lib/tarfile.py -+++ b/Lib/tarfile.py -@@ -843,6 +843,9 @@ - # Sentinel for replace() defaults, meaning "don't change the attribute" - _KEEP = object() - -+# Header length is digits followed by a space. -+_header_length_prefix_re = re.compile(br"([0-9]{1,20}) ") -+ - class TarInfo(object): - """Informational class which holds the details about an - archive member given by a tar header block. -@@ -1412,37 +1415,59 @@ - else: - pax_headers = tarfile.pax_headers.copy() - -- # Check if the pax header contains a hdrcharset field. This tells us -- # the encoding of the path, linkpath, uname and gname fields. Normally, -- # these fields are UTF-8 encoded but since POSIX.1-2008 tar -- # implementations are allowed to store them as raw binary strings if -- # the translation to UTF-8 fails. -- match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) -- if match is not None: -- pax_headers["hdrcharset"] = match.group(1).decode("utf-8") -- -- # For the time being, we don't care about anything other than "BINARY". -- # The only other value that is currently allowed by the standard is -- # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. -- hdrcharset = pax_headers.get("hdrcharset") -- if hdrcharset == "BINARY": -- encoding = tarfile.encoding -- else: -- encoding = "utf-8" -- - # Parse pax header information. A record looks like that: - # "%d %s=%s\n" % (length, keyword, value). length is the size - # of the complete record including the length field itself and -- # the newline. keyword and value are both UTF-8 encoded strings. -- regex = re.compile(br"(\d+) ([^=]+)=") -+ # the newline. - pos = 0 -- while match := regex.match(buf, pos): -- length, keyword = match.groups() -- length = int(length) -- if length == 0: -+ encoding = None -+ raw_headers = [] -+ while len(buf) > pos and buf[pos] != 0x00: -+ if not (match := _header_length_prefix_re.match(buf, pos)): -+ raise InvalidHeaderError("invalid header") -+ try: -+ length = int(match.group(1)) -+ except ValueError: -+ raise InvalidHeaderError("invalid header") -+ # Headers must be at least 5 bytes, shortest being '5 x=\n'. -+ # Value is allowed to be empty. -+ if length < 5: -+ raise InvalidHeaderError("invalid header") -+ if pos + length > len(buf): -+ raise InvalidHeaderError("invalid header") -+ -+ header_value_end_offset = match.start(1) + length - 1 # Last byte of the header -+ keyword_and_value = buf[match.end(1) + 1:header_value_end_offset] -+ raw_keyword, equals, raw_value = keyword_and_value.partition(b"=") -+ -+ # Check the framing of the header. The last character must be '\n' (0x0A) -+ if not raw_keyword or equals != b"=" or buf[header_value_end_offset] != 0x0A: - raise InvalidHeaderError("invalid header") -- value = buf[match.end(2) + 1:match.start(1) + length - 1] -+ raw_headers.append((length, raw_keyword, raw_value)) -+ -+ # Check if the pax header contains a hdrcharset field. This tells us -+ # the encoding of the path, linkpath, uname and gname fields. Normally, -+ # these fields are UTF-8 encoded but since POSIX.1-2008 tar -+ # implementations are allowed to store them as raw binary strings if -+ # the translation to UTF-8 fails. For the time being, we don't care about -+ # anything other than "BINARY". The only other value that is currently -+ # allowed by the standard is "ISO-IR 10646 2000 UTF-8" in other words UTF-8. -+ # Note that we only follow the initial 'hdrcharset' setting to preserve -+ # the initial behavior of the 'tarfile' module. -+ if raw_keyword == b"hdrcharset" and encoding is None: -+ if raw_value == b"BINARY": -+ encoding = tarfile.encoding -+ else: # This branch ensures only the first 'hdrcharset' header is used. -+ encoding = "utf-8" - -+ pos += length -+ -+ # If no explicit hdrcharset is set, we use UTF-8 as a default. -+ if encoding is None: -+ encoding = "utf-8" -+ -+ # After parsing the raw headers we can decode them to text. -+ for length, raw_keyword, raw_value in raw_headers: - # Normally, we could just use "utf-8" as the encoding and "strict" - # as the error handler, but we better not take the risk. For - # example, GNU tar <= 1.23 is known to store filenames it cannot -@@ -1450,17 +1475,16 @@ - # hdrcharset=BINARY header). - # We first try the strict standard encoding, and if that fails we - # fall back on the user's encoding and error handler. -- keyword = self._decode_pax_field(keyword, "utf-8", "utf-8", -+ keyword = self._decode_pax_field(raw_keyword, "utf-8", "utf-8", - tarfile.errors) - if keyword in PAX_NAME_FIELDS: -- value = self._decode_pax_field(value, encoding, tarfile.encoding, -+ value = self._decode_pax_field(raw_value, encoding, tarfile.encoding, - tarfile.errors) - else: -- value = self._decode_pax_field(value, "utf-8", "utf-8", -+ value = self._decode_pax_field(raw_value, "utf-8", "utf-8", - tarfile.errors) - - pax_headers[keyword] = value -- pos += length - - # Fetch the next header. - try: -@@ -1475,7 +1499,7 @@ - - elif "GNU.sparse.size" in pax_headers: - # GNU extended sparse format version 0.0. -- self._proc_gnusparse_00(next, pax_headers, buf) -+ self._proc_gnusparse_00(next, raw_headers) - - elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": - # GNU extended sparse format version 1.0. -@@ -1497,15 +1521,24 @@ - - return next - -- def _proc_gnusparse_00(self, next, pax_headers, buf): -+ def _proc_gnusparse_00(self, next, raw_headers): - """Process a GNU tar extended sparse header, version 0.0. - """ - offsets = [] -- for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): -- offsets.append(int(match.group(1))) - numbytes = [] -- for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): -- numbytes.append(int(match.group(1))) -+ for _, keyword, value in raw_headers: -+ if keyword == b"GNU.sparse.offset": -+ try: -+ offsets.append(int(value.decode())) -+ except ValueError: -+ raise InvalidHeaderError("invalid header") -+ -+ elif keyword == b"GNU.sparse.numbytes": -+ try: -+ numbytes.append(int(value.decode())) -+ except ValueError: -+ raise InvalidHeaderError("invalid header") -+ - next.sparse = list(zip(offsets, numbytes)) - - def _proc_gnusparse_01(self, next, pax_headers): -diff --git a/Lib/test/_test_eintr.py b/Lib/test/_test_eintr.py -index 15586f15dfa..493932d6c6d 100644 ---- a/Lib/test/_test_eintr.py -+++ b/Lib/test/_test_eintr.py -@@ -18,6 +18,7 @@ - import socket - import subprocess - import sys -+import textwrap - import time - import unittest - -@@ -492,29 +493,31 @@ - self.check_elapsed_time(dt) - - --class FNTLEINTRTest(EINTRBaseTest): -+class FCNTLEINTRTest(EINTRBaseTest): - def _lock(self, lock_func, lock_name): - self.addCleanup(os_helper.unlink, os_helper.TESTFN) -- code = '\n'.join(( -- "import fcntl, time", -- "with open('%s', 'wb') as f:" % os_helper.TESTFN, -- " fcntl.%s(f, fcntl.LOCK_EX)" % lock_name, -- " time.sleep(%s)" % self.sleep_time)) -- start_time = time.monotonic() -- proc = self.subprocess(code) -+ rd1, wr1 = os.pipe() -+ rd2, wr2 = os.pipe() -+ for fd in (rd1, wr1, rd2, wr2): -+ self.addCleanup(os.close, fd) -+ code = textwrap.dedent(f""" -+ import fcntl, os, time -+ with open('{os_helper.TESTFN}', 'wb') as f: -+ fcntl.{lock_name}(f, fcntl.LOCK_EX) -+ os.write({wr1}, b"ok") -+ _ = os.read({rd2}, 2) # wait for parent process -+ time.sleep({self.sleep_time}) -+ """) -+ proc = self.subprocess(code, pass_fds=[wr1, rd2]) - with kill_on_error(proc): - with open(os_helper.TESTFN, 'wb') as f: - # synchronize the subprocess -+ ok = os.read(rd1, 2) -+ self.assertEqual(ok, b"ok") -+ -+ # notify the child that the parent is ready - start_time = time.monotonic() -- for _ in support.sleeping_retry(support.LONG_TIMEOUT, error=False): -- try: -- lock_func(f, fcntl.LOCK_EX | fcntl.LOCK_NB) -- lock_func(f, fcntl.LOCK_UN) -- except BlockingIOError: -- break -- else: -- dt = time.monotonic() - start_time -- raise Exception("failed to sync child in %.1f sec" % dt) -+ os.write(wr2, b"go") - - # the child locked the file just a moment ago for 'sleep_time' seconds - # that means that the lock below will block for 'sleep_time' minus some -diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py -index 39551234552..aac74ea311f 100644 ---- a/Lib/test/_test_multiprocessing.py -+++ b/Lib/test/_test_multiprocessing.py -@@ -1332,6 +1332,23 @@ - self.assertTrue(not_serializable_obj.reduce_was_called) - self.assertTrue(not_serializable_obj.on_queue_feeder_error_was_called) - -+ def test_closed_queue_empty_exceptions(self): -+ # Assert that checking the emptiness of an unused closed queue -+ # does not raise an OSError. The rationale is that q.close() is -+ # a no-op upon construction and becomes effective once the queue -+ # has been used (e.g., by calling q.put()). -+ for q in multiprocessing.Queue(), multiprocessing.JoinableQueue(): -+ q.close() # this is a no-op since the feeder thread is None -+ q.join_thread() # this is also a no-op -+ self.assertTrue(q.empty()) -+ -+ for q in multiprocessing.Queue(), multiprocessing.JoinableQueue(): -+ q.put('foo') # make sure that the queue is 'used' -+ q.close() # close the feeder thread -+ q.join_thread() # make sure to join the feeder thread -+ with self.assertRaisesRegex(OSError, 'is closed'): -+ q.empty() -+ - def test_closed_queue_put_get_exceptions(self): - for q in multiprocessing.Queue(), multiprocessing.JoinableQueue(): - q.close() -@@ -5691,6 +5708,15 @@ - finally: - parent_can_continue.set() - -+ def test_empty_exceptions(self): -+ # Assert that checking emptiness of a closed queue raises -+ # an OSError, independently of whether the queue was used -+ # or not. This differs from Queue and JoinableQueue. -+ q = multiprocessing.SimpleQueue() -+ q.close() # close the pipe -+ with self.assertRaisesRegex(OSError, 'is closed'): -+ q.empty() -+ - def test_empty(self): - queue = multiprocessing.SimpleQueue() - child_can_start = multiprocessing.Event() -diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py -index 404894ac36b..0528e0701fa 100644 ---- a/Lib/test/datetimetester.py -+++ b/Lib/test/datetimetester.py -@@ -1331,6 +1331,11 @@ - self.assertRaises(OverflowError, self.theclass.fromtimestamp, - insane) - -+ def test_fromtimestamp_with_none_arg(self): -+ # See gh-120268 for more details -+ with self.assertRaises(TypeError): -+ self.theclass.fromtimestamp(None) -+ - def test_today(self): - import time - -diff --git a/Lib/test/libregrtest/logger.py b/Lib/test/libregrtest/logger.py -index a1257069273..fa1d4d575c8 100644 ---- a/Lib/test/libregrtest/logger.py -+++ b/Lib/test/libregrtest/logger.py -@@ -43,7 +43,10 @@ - - def get_load_avg(self) -> float | None: - if hasattr(os, 'getloadavg'): -- return os.getloadavg()[0] -+ try: -+ return os.getloadavg()[0] -+ except OSError: -+ pass - if self.win_load_tracker is not None: - return self.win_load_tracker.getloadavg() - return None -diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py -index a257d102353..568690bdec4 100644 ---- a/Lib/test/libregrtest/refleak.py -+++ b/Lib/test/libregrtest/refleak.py -@@ -239,9 +239,13 @@ - abs_classes = filter(isabstract, abs_classes) - for abc in abs_classes: - for obj in abc.__subclasses__() + [abc]: -- for ref in abcs.get(obj, set()): -- if ref() is not None: -- obj.register(ref()) -+ refs = abcs.get(obj, None) -+ if refs is not None: -+ obj._abc_registry_clear() -+ for ref in refs: -+ subclass = ref() -+ if subclass is not None: -+ obj.register(subclass) - obj._abc_caches_clear() - - # Clear caches -diff --git a/Lib/test/libregrtest/testresult.py b/Lib/test/libregrtest/testresult.py -index de23fdd59de..1820f354572 100644 ---- a/Lib/test/libregrtest/testresult.py -+++ b/Lib/test/libregrtest/testresult.py -@@ -9,6 +9,7 @@ - import traceback - import unittest - from test import support -+from test.libregrtest.utils import sanitize_xml - - class RegressionTestResult(unittest.TextTestResult): - USE_XML = False -@@ -65,23 +66,24 @@ - if capture: - if self._stdout_buffer is not None: - stdout = self._stdout_buffer.getvalue().rstrip() -- ET.SubElement(e, 'system-out').text = stdout -+ ET.SubElement(e, 'system-out').text = sanitize_xml(stdout) - if self._stderr_buffer is not None: - stderr = self._stderr_buffer.getvalue().rstrip() -- ET.SubElement(e, 'system-err').text = stderr -+ ET.SubElement(e, 'system-err').text = sanitize_xml(stderr) - - for k, v in args.items(): - if not k or not v: - continue -+ - e2 = ET.SubElement(e, k) - if hasattr(v, 'items'): - for k2, v2 in v.items(): - if k2: -- e2.set(k2, str(v2)) -+ e2.set(k2, sanitize_xml(str(v2))) - else: -- e2.text = str(v2) -+ e2.text = sanitize_xml(str(v2)) - else: -- e2.text = str(v) -+ e2.text = sanitize_xml(str(v)) - - @classmethod - def __makeErrorDict(cls, err_type, err_value, err_tb): -diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py -index 265dbf9ab75..a4fd1ee7a85 100644 ---- a/Lib/test/libregrtest/utils.py -+++ b/Lib/test/libregrtest/utils.py -@@ -5,6 +5,7 @@ - import os.path - import platform - import random -+import re - import shlex - import signal - import subprocess -@@ -263,6 +264,12 @@ - for f in typing._cleanups: - f() - -+ import inspect -+ abs_classes = filter(inspect.isabstract, typing.__dict__.values()) -+ for abc in abs_classes: -+ for obj in abc.__subclasses__() + [abc]: -+ obj._abc_caches_clear() -+ - try: - fractions = sys.modules['fractions'] - except KeyError: -@@ -336,6 +343,11 @@ - if support.check_cflags_pgo(): - # PGO (--enable-optimizations) - optimizations.append('PGO') -+ -+ if support.check_bolt_optimized(): -+ # BOLT (--enable-bolt) -+ optimizations.append('BOLT') -+ - if optimizations: - build.append('+'.join(optimizations)) - -@@ -710,3 +722,24 @@ - pass - - return None -+ -+ -+ILLEGAL_XML_CHARS_RE = re.compile( -+ '[' -+ # Control characters; newline (\x0A and \x0D) and TAB (\x09) are legal -+ '\x00-\x08\x0B\x0C\x0E-\x1F' -+ # Surrogate characters -+ '\uD800-\uDFFF' -+ # Special Unicode characters -+ '\uFFFE' -+ '\uFFFF' -+ # Match multiple sequential invalid characters for better effiency -+ ']+') -+ -+def _sanitize_xml_replace(regs): -+ text = regs[0] -+ return ''.join(f'\\x{ord(ch):02x}' if ch <= '\xff' else ascii(ch)[1:-1] -+ for ch in text) -+ -+def sanitize_xml(text): -+ return ILLEGAL_XML_CHARS_RE.sub(_sanitize_xml_replace, text) -diff --git a/Lib/test/list_tests.py b/Lib/test/list_tests.py -index b1ef332522d..ac13b110b20 100644 ---- a/Lib/test/list_tests.py -+++ b/Lib/test/list_tests.py -@@ -191,6 +191,14 @@ - - self.assertRaises(TypeError, a.__setitem__) - -+ def test_slice_assign_iterator(self): -+ x = self.type2test(range(5)) -+ x[0:3] = reversed(range(3)) -+ self.assertEqual(x, self.type2test([2, 1, 0, 3, 4])) -+ -+ x[:] = reversed(range(3)) -+ self.assertEqual(x, self.type2test([2, 1, 0])) -+ - def test_delslice(self): - a = self.type2test([0, 1]) - del a[1:2] -diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py -index 024c6debcd4..8c8f8901f00 100644 ---- a/Lib/test/lock_tests.py -+++ b/Lib/test/lock_tests.py -@@ -1013,6 +1013,10 @@ - self.assertEqual(self.barrier.n_waiting, 0) - self.assertFalse(self.barrier.broken) - -+ def test_constructor(self): -+ self.assertRaises(ValueError, self.barriertype, parties=0) -+ self.assertRaises(ValueError, self.barriertype, parties=-1) -+ - def test_barrier(self, passes=1): - """ - Test that a barrier is passed in lockstep -diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py -index 5b9bceceba1..4a5976afa75 100644 ---- a/Lib/test/pickletester.py -+++ b/Lib/test/pickletester.py -@@ -144,6 +144,14 @@ - def __getinitargs__(self): - return () - -+import __main__ -+__main__.C = C -+C.__module__ = "__main__" -+__main__.D = D -+D.__module__ = "__main__" -+__main__.E = E -+E.__module__ = "__main__" -+ - # Simple mutable object. - class Object: - pass -@@ -157,14 +165,6 @@ - # Shouldn't support the recursion itself - return K, (self.value,) - --import __main__ --__main__.C = C --C.__module__ = "__main__" --__main__.D = D --D.__module__ = "__main__" --__main__.E = E --E.__module__ = "__main__" -- - class myint(int): - def __init__(self, x): - self.str = str(x) -@@ -1179,6 +1179,153 @@ - self.assertIs(type(unpickled), collections.UserDict) - self.assertEqual(unpickled, collections.UserDict({1: 2})) - -+ def test_load_global(self): -+ self.assertIs(self.loads(b'cbuiltins\nstr\n.'), str) -+ self.assertIs(self.loads(b'cmath\nlog\n.'), math.log) -+ self.assertIs(self.loads(b'cos.path\njoin\n.'), os.path.join) -+ self.assertIs(self.loads(b'\x80\x04cbuiltins\nstr.upper\n.'), str.upper) -+ with support.swap_item(sys.modules, 'mödule', types.SimpleNamespace(glöbal=42)): -+ self.assertEqual(self.loads(b'\x80\x04cm\xc3\xb6dule\ngl\xc3\xb6bal\n.'), 42) -+ -+ self.assertRaises(UnicodeDecodeError, self.loads, b'c\xff\nlog\n.') -+ self.assertRaises(UnicodeDecodeError, self.loads, b'cmath\n\xff\n.') -+ self.assertRaises(self.truncated_errors, self.loads, b'c\nlog\n.') -+ self.assertRaises(self.truncated_errors, self.loads, b'cmath\n\n.') -+ self.assertRaises(self.truncated_errors, self.loads, b'\x80\x04cmath\n\n.') -+ -+ def test_load_stack_global(self): -+ self.assertIs(self.loads(b'\x8c\x08builtins\x8c\x03str\x93.'), str) -+ self.assertIs(self.loads(b'\x8c\x04math\x8c\x03log\x93.'), math.log) -+ self.assertIs(self.loads(b'\x8c\x07os.path\x8c\x04join\x93.'), -+ os.path.join) -+ self.assertIs(self.loads(b'\x80\x04\x8c\x08builtins\x8c\x09str.upper\x93.'), -+ str.upper) -+ with support.swap_item(sys.modules, 'mödule', types.SimpleNamespace(glöbal=42)): -+ self.assertEqual(self.loads(b'\x80\x04\x8c\x07m\xc3\xb6dule\x8c\x07gl\xc3\xb6bal\x93.'), 42) -+ -+ self.assertRaises(UnicodeDecodeError, self.loads, b'\x8c\x01\xff\x8c\x03log\x93.') -+ self.assertRaises(UnicodeDecodeError, self.loads, b'\x8c\x04math\x8c\x01\xff\x93.') -+ self.assertRaises(ValueError, self.loads, b'\x8c\x00\x8c\x03log\x93.') -+ self.assertRaises(AttributeError, self.loads, b'\x8c\x04math\x8c\x00\x93.') -+ self.assertRaises(AttributeError, self.loads, b'\x80\x04\x8c\x04math\x8c\x00\x93.') -+ -+ self.assertRaises(pickle.UnpicklingError, self.loads, b'N\x8c\x03log\x93.') -+ self.assertRaises(pickle.UnpicklingError, self.loads, b'\x8c\x04mathN\x93.') -+ self.assertRaises(pickle.UnpicklingError, self.loads, b'\x80\x04\x8c\x04mathN\x93.') -+ -+ def test_find_class(self): -+ unpickler = self.unpickler(io.BytesIO()) -+ unpickler_nofix = self.unpickler(io.BytesIO(), fix_imports=False) -+ unpickler4 = self.unpickler(io.BytesIO(b'\x80\x04N.')) -+ unpickler4.load() -+ -+ self.assertIs(unpickler.find_class('__builtin__', 'str'), str) -+ self.assertRaises(ModuleNotFoundError, -+ unpickler_nofix.find_class, '__builtin__', 'str') -+ self.assertIs(unpickler.find_class('builtins', 'str'), str) -+ self.assertIs(unpickler_nofix.find_class('builtins', 'str'), str) -+ self.assertIs(unpickler.find_class('math', 'log'), math.log) -+ self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join) -+ self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join) -+ -+ self.assertIs(unpickler4.find_class('builtins', 'str.upper'), str.upper) -+ with self.assertRaises(AttributeError): -+ unpickler.find_class('builtins', 'str.upper') -+ -+ with self.assertRaises(AttributeError): -+ unpickler.find_class('math', 'spam') -+ with self.assertRaises(AttributeError): -+ unpickler4.find_class('math', 'spam') -+ with self.assertRaises(AttributeError): -+ unpickler.find_class('math', 'log.spam') -+ with self.assertRaises(AttributeError): -+ unpickler4.find_class('math', 'log.spam') -+ with self.assertRaises(AttributeError): -+ unpickler.find_class('math', 'log..spam') -+ with self.assertRaises(AttributeError): -+ unpickler4.find_class('math', 'log..spam') -+ with self.assertRaises(AttributeError): -+ unpickler.find_class('math', '') -+ with self.assertRaises(AttributeError): -+ unpickler4.find_class('math', '') -+ self.assertRaises(ModuleNotFoundError, unpickler.find_class, 'spam', 'log') -+ self.assertRaises(ValueError, unpickler.find_class, '', 'log') -+ -+ self.assertRaises(TypeError, unpickler.find_class, None, 'log') -+ self.assertRaises(TypeError, unpickler.find_class, 'math', None) -+ self.assertRaises((TypeError, AttributeError), unpickler4.find_class, 'math', None) -+ -+ def test_custom_find_class(self): -+ def loads(data): -+ class Unpickler(self.unpickler): -+ def find_class(self, module_name, global_name): -+ return (module_name, global_name) -+ return Unpickler(io.BytesIO(data)).load() -+ -+ self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) -+ self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) -+ -+ def loads(data): -+ class Unpickler(self.unpickler): -+ @staticmethod -+ def find_class(module_name, global_name): -+ return (module_name, global_name) -+ return Unpickler(io.BytesIO(data)).load() -+ -+ self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) -+ self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) -+ -+ def loads(data): -+ class Unpickler(self.unpickler): -+ @classmethod -+ def find_class(cls, module_name, global_name): -+ return (module_name, global_name) -+ return Unpickler(io.BytesIO(data)).load() -+ -+ self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) -+ self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) -+ -+ def loads(data): -+ class Unpickler(self.unpickler): -+ pass -+ def find_class(module_name, global_name): -+ return (module_name, global_name) -+ unpickler = Unpickler(io.BytesIO(data)) -+ unpickler.find_class = find_class -+ return unpickler.load() -+ -+ self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) -+ self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) -+ -+ def test_bad_ext_code(self): -+ # unregistered extension code -+ self.check_unpickling_error(ValueError, b'\x82\x01.') -+ self.check_unpickling_error(ValueError, b'\x82\xff.') -+ self.check_unpickling_error(ValueError, b'\x83\x01\x00.') -+ self.check_unpickling_error(ValueError, b'\x83\xff\xff.') -+ self.check_unpickling_error(ValueError, b'\x84\x01\x00\x00\x00.') -+ self.check_unpickling_error(ValueError, b'\x84\xff\xff\xff\x7f.') -+ # EXT specifies code <= 0 -+ self.check_unpickling_error(pickle.UnpicklingError, b'\x82\x00.') -+ self.check_unpickling_error(pickle.UnpicklingError, b'\x83\x00\x00.') -+ self.check_unpickling_error(pickle.UnpicklingError, b'\x84\x00\x00\x00\x00.') -+ self.check_unpickling_error(pickle.UnpicklingError, b'\x84\x00\x00\x00\x80.') -+ self.check_unpickling_error(pickle.UnpicklingError, b'\x84\xff\xff\xff\xff.') -+ -+ @support.cpython_only -+ def test_bad_ext_inverted_registry(self): -+ code = 1 -+ def check(key, exc): -+ with support.swap_item(copyreg._inverted_registry, code, key): -+ with self.assertRaises(exc): -+ self.loads(b'\x82\x01.') -+ check(None, ValueError) -+ check((), ValueError) -+ check((__name__,), (TypeError, ValueError)) -+ check((__name__, "MyList", "x"), (TypeError, ValueError)) -+ check((__name__, None), (TypeError, ValueError)) -+ check((None, "MyList"), (TypeError, ValueError)) -+ - def test_bad_reduce(self): - self.assertEqual(self.loads(b'cbuiltins\nint\n)R.'), 0) - self.check_unpickling_error(TypeError, b'N)R.') -@@ -1443,6 +1590,500 @@ - [ToBeUnpickled] * 2) - - -+class AbstractPicklingErrorTests: -+ # Subclass must define self.dumps, self.pickler. -+ -+ def test_bad_reduce_result(self): -+ obj = REX([print, ()]) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ obj = REX((print,)) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ obj = REX((print, (), None, None, None, None, None)) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_bad_reconstructor(self): -+ obj = REX((42, ())) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_reconstructor(self): -+ obj = REX((UnpickleableCallable(), ())) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_bad_reconstructor_args(self): -+ obj = REX((print, [])) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_reconstructor_args(self): -+ obj = REX((print, (1, 2, UNPICKLEABLE))) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_bad_newobj_args(self): -+ obj = REX((copyreg.__newobj__, ())) -+ for proto in protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises((IndexError, pickle.PicklingError)) as cm: -+ self.dumps(obj, proto) -+ -+ obj = REX((copyreg.__newobj__, [REX])) -+ for proto in protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises((IndexError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ def test_bad_newobj_class(self): -+ obj = REX((copyreg.__newobj__, (NoNew(),))) -+ for proto in protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_wrong_newobj_class(self): -+ obj = REX((copyreg.__newobj__, (str,))) -+ for proto in protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_newobj_class(self): -+ class LocalREX(REX): pass -+ obj = LocalREX((copyreg.__newobj__, (LocalREX,))) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((pickle.PicklingError, AttributeError)): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_newobj_args(self): -+ obj = REX((copyreg.__newobj__, (REX, 1, 2, UNPICKLEABLE))) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_bad_newobj_ex_args(self): -+ obj = REX((copyreg.__newobj_ex__, ())) -+ for proto in protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises((ValueError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ obj = REX((copyreg.__newobj_ex__, 42)) -+ for proto in protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ obj = REX((copyreg.__newobj_ex__, (REX, 42, {}))) -+ is_py = self.pickler is pickle._Pickler -+ for proto in protocols[2:4] if is_py else protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises((TypeError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ obj = REX((copyreg.__newobj_ex__, (REX, (), []))) -+ for proto in protocols[2:4] if is_py else protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises((TypeError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ def test_bad_newobj_ex__class(self): -+ obj = REX((copyreg.__newobj_ex__, (NoNew(), (), {}))) -+ for proto in protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_wrong_newobj_ex_class(self): -+ if self.pickler is not pickle._Pickler: -+ self.skipTest('only verified in the Python implementation') -+ obj = REX((copyreg.__newobj_ex__, (str, (), {}))) -+ for proto in protocols[2:]: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_newobj_ex_class(self): -+ class LocalREX(REX): pass -+ obj = LocalREX((copyreg.__newobj_ex__, (LocalREX, (), {}))) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((pickle.PicklingError, AttributeError)): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_newobj_ex_args(self): -+ obj = REX((copyreg.__newobj_ex__, (REX, (1, 2, UNPICKLEABLE), {}))) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_newobj_ex_kwargs(self): -+ obj = REX((copyreg.__newobj_ex__, (REX, (), {'a': UNPICKLEABLE}))) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_state(self): -+ obj = REX_state(UNPICKLEABLE) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_bad_state_setter(self): -+ if self.pickler is pickle._Pickler: -+ self.skipTest('only verified in the C implementation') -+ obj = REX((print, (), 'state', None, None, 42)) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_state_setter(self): -+ obj = REX((print, (), 'state', None, None, UnpickleableCallable())) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_state_with_state_setter(self): -+ obj = REX((print, (), UNPICKLEABLE, None, None, print)) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_bad_object_list_items(self): -+ # Issue4176: crash when 4th and 5th items of __reduce__() -+ # are not iterators -+ obj = REX((list, (), None, 42)) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((TypeError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ if self.pickler is not pickle._Pickler: -+ # Python implementation is less strict and also accepts iterables. -+ obj = REX((list, (), None, [])) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((TypeError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_object_list_items(self): -+ obj = REX_six([1, 2, UNPICKLEABLE]) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_bad_object_dict_items(self): -+ # Issue4176: crash when 4th and 5th items of __reduce__() -+ # are not iterators -+ obj = REX((dict, (), None, None, 42)) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((TypeError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ for proto in protocols: -+ obj = REX((dict, (), None, None, iter([('a',)]))) -+ with self.subTest(proto=proto): -+ with self.assertRaises((ValueError, TypeError)): -+ self.dumps(obj, proto) -+ -+ if self.pickler is not pickle._Pickler: -+ # Python implementation is less strict and also accepts iterables. -+ obj = REX((dict, (), None, None, [])) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((TypeError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_object_dict_items(self): -+ obj = REX_seven({'a': UNPICKLEABLE}) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_list_items(self): -+ obj = [1, [2, 3, UNPICKLEABLE]] -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ for n in [0, 1, 1000, 1005]: -+ obj = [*range(n), UNPICKLEABLE] -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_tuple_items(self): -+ obj = (1, (2, 3, UNPICKLEABLE)) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ obj = (*range(10), UNPICKLEABLE) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_dict_items(self): -+ obj = {'a': {'b': UNPICKLEABLE}} -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ for n in [0, 1, 1000, 1005]: -+ obj = dict.fromkeys(range(n)) -+ obj['a'] = UNPICKLEABLE -+ for proto in protocols: -+ with self.subTest(proto=proto, n=n): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_set_items(self): -+ obj = {UNPICKLEABLE} -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_unpickleable_frozenset_items(self): -+ obj = frozenset({frozenset({UNPICKLEABLE})}) -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(CustomError): -+ self.dumps(obj, proto) -+ -+ def test_global_lookup_error(self): -+ # Global name does not exist -+ obj = REX('spam') -+ obj.__module__ = __name__ -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ obj.__module__ = 'nonexisting' -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ obj.__module__ = '' -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((ValueError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ obj.__module__ = None -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_nonencodable_global_name_error(self): -+ for proto in protocols[:4]: -+ with self.subTest(proto=proto): -+ name = 'nonascii\xff' if proto < 3 else 'nonencodable\udbff' -+ obj = REX(name) -+ obj.__module__ = __name__ -+ with support.swap_item(globals(), name, obj): -+ with self.assertRaises((UnicodeEncodeError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ def test_nonencodable_module_name_error(self): -+ for proto in protocols[:4]: -+ with self.subTest(proto=proto): -+ name = 'nonascii\xff' if proto < 3 else 'nonencodable\udbff' -+ obj = REX('test') -+ obj.__module__ = name -+ mod = types.SimpleNamespace(test=obj) -+ with support.swap_item(sys.modules, name, mod): -+ with self.assertRaises((UnicodeEncodeError, pickle.PicklingError)): -+ self.dumps(obj, proto) -+ -+ def test_nested_lookup_error(self): -+ # Nested name does not exist -+ obj = REX('AbstractPickleTests.spam') -+ obj.__module__ = __name__ -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ obj.__module__ = None -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_wrong_object_lookup_error(self): -+ # Name is bound to different object -+ obj = REX('AbstractPickleTests') -+ obj.__module__ = __name__ -+ AbstractPickleTests.ham = [] -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ obj.__module__ = None -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(obj, proto) -+ -+ def test_local_lookup_error(self): -+ # Test that whichmodule() errors out cleanly when looking up -+ # an assumed globally-reachable object fails. -+ def f(): -+ pass -+ # Since the function is local, lookup will fail -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((AttributeError, pickle.PicklingError)): -+ self.dumps(f, proto) -+ # Same without a __module__ attribute (exercises a different path -+ # in _pickle.c). -+ del f.__module__ -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((AttributeError, pickle.PicklingError)): -+ self.dumps(f, proto) -+ # Yet a different path. -+ f.__name__ = f.__qualname__ -+ for proto in protocols: -+ with self.subTest(proto=proto): -+ with self.assertRaises((AttributeError, pickle.PicklingError)): -+ self.dumps(f, proto) -+ -+ def test_reduce_ex_None(self): -+ if self.pickler is pickle._Pickler: -+ self.skipTest('only verified in the C implementation') -+ c = REX_None() -+ with self.assertRaises(TypeError): -+ self.dumps(c) -+ -+ def test_reduce_None(self): -+ c = R_None() -+ with self.assertRaises(TypeError): -+ self.dumps(c) -+ -+ @no_tracing -+ def test_bad_getattr(self): -+ # Issue #3514: crash when there is an infinite loop in __getattr__ -+ x = BadGetattr() -+ for proto in range(2): -+ with support.infinite_recursion(25): -+ self.assertRaises(RuntimeError, self.dumps, x, proto) -+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1): -+ s = self.dumps(x, proto) -+ -+ def test_picklebuffer_error(self): -+ # PickleBuffer forbidden with protocol < 5 -+ pb = pickle.PickleBuffer(b"foobar") -+ for proto in range(0, 5): -+ with self.subTest(proto=proto): -+ with self.assertRaises(pickle.PickleError) as cm: -+ self.dumps(pb, proto) -+ self.assertEqual(str(cm.exception), -+ 'PickleBuffer can only be pickled with protocol >= 5') -+ -+ def test_non_continuous_buffer(self): -+ if self.pickler is pickle._Pickler: -+ self.skipTest('CRASHES (see gh-122306)') -+ for proto in protocols[5:]: -+ with self.subTest(proto=proto): -+ pb = pickle.PickleBuffer(memoryview(b"foobar")[::2]) -+ with self.assertRaises(pickle.PicklingError): -+ self.dumps(pb, proto) -+ -+ def test_buffer_callback_error(self): -+ def buffer_callback(buffers): -+ raise CustomError -+ pb = pickle.PickleBuffer(b"foobar") -+ with self.assertRaises(CustomError): -+ self.dumps(pb, 5, buffer_callback=buffer_callback) -+ -+ def test_evil_pickler_mutating_collection(self): -+ # https://github.com/python/cpython/issues/92930 -+ global Clearer -+ class Clearer: -+ pass -+ -+ def check(collection): -+ class EvilPickler(self.pickler): -+ def persistent_id(self, obj): -+ if isinstance(obj, Clearer): -+ collection.clear() -+ return None -+ pickler = EvilPickler(io.BytesIO(), proto) -+ try: -+ pickler.dump(collection) -+ except RuntimeError as e: -+ expected = "changed size during iteration" -+ self.assertIn(expected, str(e)) -+ -+ for proto in protocols: -+ check([Clearer()]) -+ check([Clearer(), Clearer()]) -+ check({Clearer()}) -+ check({Clearer(), Clearer()}) -+ check({Clearer(): 1}) -+ check({Clearer(): 1, Clearer(): 2}) -+ check({1: Clearer(), 2: Clearer()}) -+ -+ @support.cpython_only -+ def test_bad_ext_code(self): -+ # This should never happen in normal circumstances, because the type -+ # and the value of the extesion code is checked in copyreg.add_extension(). -+ key = (__name__, 'MyList') -+ def check(code, exc): -+ assert key not in copyreg._extension_registry -+ assert code not in copyreg._inverted_registry -+ with (support.swap_item(copyreg._extension_registry, key, code), -+ support.swap_item(copyreg._inverted_registry, code, key)): -+ for proto in protocols[2:]: -+ with self.assertRaises(exc): -+ self.dumps(MyList, proto) -+ -+ check(object(), TypeError) -+ check(None, TypeError) -+ check(-1, (RuntimeError, struct.error)) -+ check(0, RuntimeError) -+ check(2**31, (RuntimeError, OverflowError, struct.error)) -+ check(2**1000, (OverflowError, struct.error)) -+ check(-2**1000, (OverflowError, struct.error)) -+ - - class AbstractPickleTests: - # Subclass must define self.dumps, self.loads. -@@ -1845,6 +2486,25 @@ - p = self.dumps(s, proto) - self.assert_is_copy(s, self.loads(p)) - -+ def test_bytes_memoization(self): -+ for proto in protocols: -+ for array_type in [bytes, ZeroCopyBytes]: -+ for s in b'', b'xyz', b'xyz'*100: -+ with self.subTest(proto=proto, array_type=array_type, s=s, independent=False): -+ b = array_type(s) -+ p = self.dumps((b, b), proto) -+ x, y = self.loads(p) -+ self.assertIs(x, y) -+ self.assert_is_copy((b, b), (x, y)) -+ -+ with self.subTest(proto=proto, array_type=array_type, s=s, independent=True): -+ b1, b2 = array_type(s), array_type(s) -+ p = self.dumps((b1, b2), proto) -+ # Note that (b1, b2) = self.loads(p) might have identical -+ # components, i.e., b1 is b2, but this is not always the -+ # case if the content is large (equality still holds). -+ self.assert_is_copy((b1, b2), self.loads(p)) -+ - def test_bytearray(self): - for proto in protocols: - for s in b'', b'xyz', b'xyz'*100: -@@ -1864,13 +2524,31 @@ - self.assertNotIn(b'bytearray', p) - self.assertTrue(opcode_in_pickle(pickle.BYTEARRAY8, p)) - -- def test_bytearray_memoization_bug(self): -+ def test_bytearray_memoization(self): - for proto in protocols: -- for s in b'', b'xyz', b'xyz'*100: -- b = bytearray(s) -- p = self.dumps((b, b), proto) -- b1, b2 = self.loads(p) -- self.assertIs(b1, b2) -+ for array_type in [bytearray, ZeroCopyBytearray]: -+ for s in b'', b'xyz', b'xyz'*100: -+ with self.subTest(proto=proto, array_type=array_type, s=s, independent=False): -+ b = array_type(s) -+ p = self.dumps((b, b), proto) -+ b1, b2 = self.loads(p) -+ self.assertIs(b1, b2) -+ -+ with self.subTest(proto=proto, array_type=array_type, s=s, independent=True): -+ b1a, b2a = array_type(s), array_type(s) -+ # Unlike bytes, equal but independent bytearray objects are -+ # never identical. -+ self.assertIsNot(b1a, b2a) -+ -+ p = self.dumps((b1a, b2a), proto) -+ b1b, b2b = self.loads(p) -+ self.assertIsNot(b1b, b2b) -+ -+ self.assertIsNot(b1a, b1b) -+ self.assert_is_copy(b1a, b1b) -+ -+ self.assertIsNot(b2a, b2b) -+ self.assert_is_copy(b2a, b2b) - - def test_ints(self): - for proto in protocols: -@@ -2416,38 +3094,11 @@ - y = self.loads(s) - self.assertEqual(y._reduce_called, 1) - -- @no_tracing -- def test_bad_getattr(self): -- # Issue #3514: crash when there is an infinite loop in __getattr__ -- x = BadGetattr() -- for proto in range(2): -- with support.infinite_recursion(): -- self.assertRaises(RuntimeError, self.dumps, x, proto) -- for proto in range(2, pickle.HIGHEST_PROTOCOL + 1): -- s = self.dumps(x, proto) -- -- def test_reduce_bad_iterator(self): -- # Issue4176: crash when 4th and 5th items of __reduce__() -- # are not iterators -- class C(object): -- def __reduce__(self): -- # 4th item is not an iterator -- return list, (), None, [], None -- class D(object): -- def __reduce__(self): -- # 5th item is not an iterator -- return dict, (), None, None, [] -- -- # Python implementation is less strict and also accepts iterables. -- for proto in protocols: -- try: -- self.dumps(C(), proto) -- except pickle.PicklingError: -- pass -- try: -- self.dumps(D(), proto) -- except pickle.PicklingError: -- pass -+ def test_pickle_setstate_None(self): -+ c = C_None_setstate() -+ p = self.dumps(c) -+ with self.assertRaises((AttributeError, TypeError)): -+ self.loads(p) - - def test_many_puts_and_gets(self): - # Test that internal data structures correctly deal with lots of -@@ -2765,6 +3416,18 @@ - self.assertIs(unpickled, Recursive) - del Recursive.mod # break reference loop - -+ def test_recursive_nested_names2(self): -+ global Recursive -+ class Recursive: -+ pass -+ Recursive.ref = Recursive -+ Recursive.__qualname__ = 'Recursive.ref' -+ for proto in range(pickle.HIGHEST_PROTOCOL + 1): -+ with self.subTest(proto=proto): -+ unpickled = self.loads(self.dumps(Recursive, proto)) -+ self.assertIs(unpickled, Recursive) -+ del Recursive.ref # break reference loop -+ - def test_py_methods(self): - global PyMethodsTest - class PyMethodsTest: -@@ -2885,27 +3548,6 @@ - self.assertIn(('c%s\n%s' % (mod, name)).encode(), pickled) - self.assertIs(type(self.loads(pickled)), type(val)) - -- def test_local_lookup_error(self): -- # Test that whichmodule() errors out cleanly when looking up -- # an assumed globally-reachable object fails. -- def f(): -- pass -- # Since the function is local, lookup will fail -- for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): -- with self.assertRaises((AttributeError, pickle.PicklingError)): -- pickletools.dis(self.dumps(f, proto)) -- # Same without a __module__ attribute (exercises a different path -- # in _pickle.c). -- del f.__module__ -- for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): -- with self.assertRaises((AttributeError, pickle.PicklingError)): -- pickletools.dis(self.dumps(f, proto)) -- # Yet a different path. -- f.__name__ = f.__qualname__ -- for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): -- with self.assertRaises((AttributeError, pickle.PicklingError)): -- pickletools.dis(self.dumps(f, proto)) -- - # - # PEP 574 tests below - # -@@ -3016,20 +3658,6 @@ - self.assertIs(type(new), type(obj)) - self.assertEqual(new, obj) - -- def test_picklebuffer_error(self): -- # PickleBuffer forbidden with protocol < 5 -- pb = pickle.PickleBuffer(b"foobar") -- for proto in range(0, 5): -- with self.assertRaises(pickle.PickleError): -- self.dumps(pb, proto) -- -- def test_buffer_callback_error(self): -- def buffer_callback(buffers): -- 1/0 -- pb = pickle.PickleBuffer(b"foobar") -- with self.assertRaises(ZeroDivisionError): -- self.dumps(pb, 5, buffer_callback=buffer_callback) -- - def test_buffers_error(self): - pb = pickle.PickleBuffer(b"foobar") - for proto in range(5, pickle.HIGHEST_PROTOCOL + 1): -@@ -3121,37 +3749,6 @@ - expected = "changed size during iteration" - self.assertIn(expected, str(e)) - -- def test_evil_pickler_mutating_collection(self): -- # https://github.com/python/cpython/issues/92930 -- if not hasattr(self, "pickler"): -- raise self.skipTest(f"{type(self)} has no associated pickler type") -- -- global Clearer -- class Clearer: -- pass -- -- def check(collection): -- class EvilPickler(self.pickler): -- def persistent_id(self, obj): -- if isinstance(obj, Clearer): -- collection.clear() -- return None -- pickler = EvilPickler(io.BytesIO(), proto) -- try: -- pickler.dump(collection) -- except RuntimeError as e: -- expected = "changed size during iteration" -- self.assertIn(expected, str(e)) -- -- for proto in protocols: -- check([Clearer()]) -- check([Clearer(), Clearer()]) -- check({Clearer()}) -- check({Clearer(), Clearer()}) -- check({Clearer(): 1}) -- check({Clearer(): 1, Clearer(): 2}) -- check({1: Clearer(), 2: Clearer()}) -- - - class BigmemPickleTests: - -@@ -3282,6 +3879,18 @@ - - # Test classes for reduce_ex - -+class R: -+ def __init__(self, reduce=None): -+ self.reduce = reduce -+ def __reduce__(self, proto): -+ return self.reduce -+ -+class REX: -+ def __init__(self, reduce_ex=None): -+ self.reduce_ex = reduce_ex -+ def __reduce_ex__(self, proto): -+ return self.reduce_ex -+ - class REX_one(object): - """No __reduce_ex__ here, but inheriting it from object""" - _reduce_called = 0 -@@ -3357,6 +3966,34 @@ - def __reduce__(self): - return type(self), (), self.state - -+class REX_None: -+ """ Setting __reduce_ex__ to None should fail """ -+ __reduce_ex__ = None -+ -+class R_None: -+ """ Setting __reduce__ to None should fail """ -+ __reduce__ = None -+ -+class C_None_setstate: -+ """ Setting __setstate__ to None should fail """ -+ def __getstate__(self): -+ return 1 -+ -+ __setstate__ = None -+ -+class CustomError(Exception): -+ pass -+ -+class Unpickleable: -+ def __reduce__(self): -+ raise CustomError -+ -+UNPICKLEABLE = Unpickleable() -+ -+class UnpickleableCallable(Unpickleable): -+ def __call__(self, *args, **kwargs): -+ pass -+ - - # Test classes for newobj - -@@ -3425,6 +4062,12 @@ - def __getattr__(self, key): - self.foo - -+class NoNew: -+ def __getattribute__(self, name): -+ if name == '__new__': -+ raise AttributeError -+ return super().__getattribute__(name) -+ - - class AbstractPickleModuleTests: - -@@ -3497,7 +4140,7 @@ - raise OSError - @property - def bad_property(self): -- 1/0 -+ raise CustomError - - # File without read and readline - class F: -@@ -3518,23 +4161,23 @@ - class F: - read = bad_property - readline = raises_oserror -- self.assertRaises(ZeroDivisionError, self.Unpickler, F()) -+ self.assertRaises(CustomError, self.Unpickler, F()) - - # File with bad readline - class F: - readline = bad_property - read = raises_oserror -- self.assertRaises(ZeroDivisionError, self.Unpickler, F()) -+ self.assertRaises(CustomError, self.Unpickler, F()) - - # File with bad readline, no read - class F: - readline = bad_property -- self.assertRaises(ZeroDivisionError, self.Unpickler, F()) -+ self.assertRaises(CustomError, self.Unpickler, F()) - - # File with bad read, no readline - class F: - read = bad_property -- self.assertRaises((AttributeError, ZeroDivisionError), self.Unpickler, F()) -+ self.assertRaises((AttributeError, CustomError), self.Unpickler, F()) - - # File with bad peek - class F: -@@ -3543,7 +4186,7 @@ - readline = raises_oserror - try: - self.Unpickler(F()) -- except ZeroDivisionError: -+ except CustomError: - pass - - # File with bad readinto -@@ -3553,7 +4196,7 @@ - readline = raises_oserror - try: - self.Unpickler(F()) -- except ZeroDivisionError: -+ except CustomError: - pass - - def test_pickler_bad_file(self): -@@ -3566,8 +4209,8 @@ - class F: - @property - def write(self): -- 1/0 -- self.assertRaises(ZeroDivisionError, self.Pickler, F()) -+ raise CustomError -+ self.assertRaises(CustomError, self.Pickler, F()) - - def check_dumps_loads_oob_buffers(self, dumps, loads): - # No need to do the full gamut of tests here, just enough to -@@ -3675,9 +4318,15 @@ - - def test_protocol0_is_ascii_only(self): - non_ascii_str = "\N{EMPTY SET}" -- self.assertRaises(pickle.PicklingError, self.dumps, non_ascii_str, 0) -+ with self.assertRaises(pickle.PicklingError) as cm: -+ self.dumps(non_ascii_str, 0) -+ self.assertEqual(str(cm.exception), -+ 'persistent IDs in protocol 0 must be ASCII strings') - pickled = pickle.PERSID + non_ascii_str.encode('utf-8') + b'\n.' -- self.assertRaises(pickle.UnpicklingError, self.loads, pickled) -+ with self.assertRaises(pickle.UnpicklingError) as cm: -+ self.loads(pickled) -+ self.assertEqual(str(cm.exception), -+ 'persistent IDs in protocol 0 must be ASCII strings') - - - class AbstractPicklerUnpicklerObjectTests: -diff --git a/Lib/test/pyclbr_input.py b/Lib/test/pyclbr_input.py -index 19ccd62dead..5535edbfa77 100644 ---- a/Lib/test/pyclbr_input.py -+++ b/Lib/test/pyclbr_input.py -@@ -12,17 +12,19 @@ - def bm(self): pass - - class C (B): -- foo = Other().foo -- om = Other.om -- - d = 10 - -- # XXX: This causes test_pyclbr.py to fail, but only because the -- # introspection-based is_method() code in the test can't -- # distinguish between this and a genuine method function like m(). -- # The pyclbr.py module gets this right as it parses the text. -+ # This one is correctly considered by both test_pyclbr.py and pyclbr.py -+ # as a non-method of C. -+ foo = Other().foo -+ -+ # This causes test_pyclbr.py to fail, but only because the -+ # introspection-based is_method() code in the test can't -+ # distinguish between this and a genuine method function like m(). - # -- #f = f -+ # The pyclbr.py module gets this right as it parses the text. -+ om = Other.om -+ f = f - - def m(self): pass - -@@ -31,3 +33,53 @@ - - @classmethod - def cm(self): pass -+ -+# Check that mangling is correctly handled -+ -+class a: -+ def a(self): pass -+ def _(self): pass -+ def _a(self): pass -+ def __(self): pass -+ def ___(self): pass -+ def __a(self): pass -+ -+class _: -+ def a(self): pass -+ def _(self): pass -+ def _a(self): pass -+ def __(self): pass -+ def ___(self): pass -+ def __a(self): pass -+ -+class __: -+ def a(self): pass -+ def _(self): pass -+ def _a(self): pass -+ def __(self): pass -+ def ___(self): pass -+ def __a(self): pass -+ -+class ___: -+ def a(self): pass -+ def _(self): pass -+ def _a(self): pass -+ def __(self): pass -+ def ___(self): pass -+ def __a(self): pass -+ -+class _a: -+ def a(self): pass -+ def _(self): pass -+ def _a(self): pass -+ def __(self): pass -+ def ___(self): pass -+ def __a(self): pass -+ -+class __a: -+ def a(self): pass -+ def _(self): pass -+ def _a(self): pass -+ def __(self): pass -+ def ___(self): pass -+ def __a(self): pass diff --git a/Lib/test/pythoninfo.py b/Lib/test/pythoninfo.py index 6efeaad8126..e9b0df085d0 100644 --- a/Lib/test/pythoninfo.py @@ -19261,7 +1950,7 @@ index 6efeaad8126..e9b0df085d0 100644 "LDFLAGS", "LDSHARED", diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py -index 4e793f15494..6e7ff01e68b 100644 +index 8519fedf8db..6e7ff01e68b 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -43,7 +43,7 @@ @@ -19273,24 +1962,7 @@ index 4e793f15494..6e7ff01e68b 100644 # os "get_pagesize", # network -@@ -58,6 +58,7 @@ - "LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT", - "Py_DEBUG", "EXCEEDS_RECURSION_LIMIT", "C_RECURSION_LIMIT", - "skip_on_s390x", -+ "BrokenIter", - ] - - -@@ -386,7 +387,7 @@ - reason = 'not suitable for buildbots' - try: - isbuildbot = getpass.getuser().lower() == 'buildbot' -- except (KeyError, EnvironmentError) as err: -+ except (KeyError, OSError) as err: - warnings.warn(f'getpass.getuser() failed {err}.', RuntimeWarning) - isbuildbot = False - return unittest.skipIf(isbuildbot, reason) -@@ -530,7 +531,7 @@ +@@ -531,7 +531,7 @@ is_android = hasattr(sys, 'getandroidapilevel') @@ -19299,7 +1971,7 @@ index 4e793f15494..6e7ff01e68b 100644 unix_shell = '/system/bin/sh' if is_android else '/bin/sh' else: unix_shell = None -@@ -540,19 +541,35 @@ +@@ -541,19 +541,35 @@ is_emscripten = sys.platform == "emscripten" is_wasi = sys.platform == "wasi" @@ -19338,103 +2010,6 @@ index 4e793f15494..6e7ff01e68b 100644 def requires_working_socket(*, module=False): """Skip tests or modules that require working sockets -@@ -816,10 +833,20 @@ - _align = '0P' - _vheader = _header + 'n' - -+def check_bolt_optimized(): -+ # Always return false, if the platform is WASI, -+ # because BOLT optimization does not support WASM binary. -+ if is_wasi: -+ return False -+ config_args = sysconfig.get_config_var('CONFIG_ARGS') or '' -+ return '--enable-bolt' in config_args -+ -+ - def calcobjsize(fmt): - import struct - return struct.calcsize(_header + fmt + _align) - -+ - def calcvobjsize(fmt): - import struct - return struct.calcsize(_vheader + fmt + _align) -@@ -2413,3 +2440,75 @@ - 'build', - } - return ignored -+ -+ -+def iter_builtin_types(): -+ for obj in __builtins__.values(): -+ if not isinstance(obj, type): -+ continue -+ cls = obj -+ if cls.__module__ != 'builtins': -+ continue -+ yield cls -+ -+ -+def iter_slot_wrappers(cls): -+ assert cls.__module__ == 'builtins', cls -+ -+ def is_slot_wrapper(name, value): -+ if not isinstance(value, types.WrapperDescriptorType): -+ assert not repr(value).startswith(' compare the 8bit string values instead - self.assertNotEqual(a.tobytes(), b.tobytes()) - b.byteswap() ---- /dev/null -+++ b/Lib/test/test_ast/__init__.py -@@ -0,0 +1,7 @@ -+import os -+ -+from test import support -+ -+ -+def load_tests(*args): -+ return support.load_package_tests(os.path.dirname(__file__), *args) ---- /dev/null -+++ b/Lib/test/test_ast/snippets.py -@@ -0,0 +1,400 @@ -+import ast -+import sys -+ -+from test.test_ast.utils import to_tuple -+ -+ -+# These tests are compiled through "exec" -+# There should be at least one test per statement -+exec_tests = [ -+ # None -+ "None", -+ # Module docstring -+ "'module docstring'", -+ # FunctionDef -+ "def f(): pass", -+ # FunctionDef with docstring -+ "def f(): 'function docstring'", -+ # FunctionDef with arg -+ "def f(a): pass", -+ # FunctionDef with arg and default value -+ "def f(a=0): pass", -+ # FunctionDef with varargs -+ "def f(*args): pass", -+ # FunctionDef with varargs as TypeVarTuple -+ "def f(*args: *Ts): pass", -+ # FunctionDef with varargs as unpacked Tuple -+ "def f(*args: *tuple[int, ...]): pass", -+ # FunctionDef with varargs as unpacked Tuple *and* TypeVarTuple -+ "def f(*args: *tuple[int, *Ts]): pass", -+ # FunctionDef with kwargs -+ "def f(**kwargs): pass", -+ # FunctionDef with all kind of args and docstring -+ "def f(a, b=1, c=None, d=[], e={}, *args, f=42, **kwargs): 'doc for f()'", -+ # FunctionDef with type annotation on return involving unpacking -+ "def f() -> tuple[*Ts]: pass", -+ "def f() -> tuple[int, *Ts]: pass", -+ "def f() -> tuple[int, *tuple[int, ...]]: pass", -+ # ClassDef -+ "class C:pass", -+ # ClassDef with docstring -+ "class C: 'docstring for class C'", -+ # ClassDef, new style class -+ "class C(object): pass", -+ # Return -+ "def f():return 1", -+ # Delete -+ "del v", -+ # Assign -+ "v = 1", -+ "a,b = c", -+ "(a,b) = c", -+ "[a,b] = c", -+ # AnnAssign with unpacked types -+ "x: tuple[*Ts]", -+ "x: tuple[int, *Ts]", -+ "x: tuple[int, *tuple[str, ...]]", -+ # AugAssign -+ "v += 1", -+ # For -+ "for v in v:pass", -+ # While -+ "while v:pass", -+ # If -+ "if v:pass", -+ # If-Elif -+ "if a:\n pass\nelif b:\n pass", -+ # If-Elif-Else -+ "if a:\n pass\nelif b:\n pass\nelse:\n pass", -+ # With -+ "with x as y: pass", -+ "with x as y, z as q: pass", -+ # Raise -+ "raise Exception('string')", -+ # TryExcept -+ "try:\n pass\nexcept Exception:\n pass", -+ # TryFinally -+ "try:\n pass\nfinally:\n pass", -+ # TryStarExcept -+ "try:\n pass\nexcept* Exception:\n pass", -+ # Assert -+ "assert v", -+ # Import -+ "import sys", -+ # ImportFrom -+ "from sys import v", -+ # Global -+ "global v", -+ # Expr -+ "1", -+ # Pass, -+ "pass", -+ # Break -+ "for v in v:break", -+ # Continue -+ "for v in v:continue", -+ # for statements with naked tuples (see http://bugs.python.org/issue6704) -+ "for a,b in c: pass", -+ "for (a,b) in c: pass", -+ "for [a,b] in c: pass", -+ # Multiline generator expression (test for .lineno & .col_offset) -+ """( -+ ( -+ Aa -+ , -+ Bb -+ ) -+ for -+ Aa -+ , -+ Bb in Cc -+ )""", -+ # dictcomp -+ "{a : b for w in x for m in p if g}", -+ # dictcomp with naked tuple -+ "{a : b for v,w in x}", -+ # setcomp -+ "{r for l in x if g}", -+ # setcomp with naked tuple -+ "{r for l,m in x}", -+ # AsyncFunctionDef -+ "async def f():\n 'async function'\n await something()", -+ # AsyncFor -+ "async def f():\n async for e in i: 1\n else: 2", -+ # AsyncWith -+ "async def f():\n async with a as b: 1", -+ # PEP 448: Additional Unpacking Generalizations -+ "{**{1:2}, 2:3}", -+ "{*{1, 2}, 3}", -+ # Asynchronous comprehensions -+ "async def f():\n [i async for b in c]", -+ # Decorated FunctionDef -+ "@deco1\n@deco2()\n@deco3(1)\ndef f(): pass", -+ # Decorated AsyncFunctionDef -+ "@deco1\n@deco2()\n@deco3(1)\nasync def f(): pass", -+ # Decorated ClassDef -+ "@deco1\n@deco2()\n@deco3(1)\nclass C: pass", -+ # Decorator with generator argument -+ "@deco(a for a in b)\ndef f(): pass", -+ # Decorator with attribute -+ "@a.b.c\ndef f(): pass", -+ # Simple assignment expression -+ "(a := 1)", -+ # Positional-only arguments -+ "def f(a, /,): pass", -+ "def f(a, /, c, d, e): pass", -+ "def f(a, /, c, *, d, e): pass", -+ "def f(a, /, c, *, d, e, **kwargs): pass", -+ # Positional-only arguments with defaults -+ "def f(a=1, /,): pass", -+ "def f(a=1, /, b=2, c=4): pass", -+ "def f(a=1, /, b=2, *, c=4): pass", -+ "def f(a=1, /, b=2, *, c): pass", -+ "def f(a=1, /, b=2, *, c=4, **kwargs): pass", -+ "def f(a=1, /, b=2, *, c, **kwargs): pass", -+ # Type aliases -+ "type X = int", -+ "type X[T] = int", -+ "type X[T, *Ts, **P] = (T, Ts, P)", -+ "type X[T: int, *Ts, **P] = (T, Ts, P)", -+ "type X[T: (int, str), *Ts, **P] = (T, Ts, P)", -+ # Generic classes -+ "class X[T]: pass", -+ "class X[T, *Ts, **P]: pass", -+ "class X[T: int, *Ts, **P]: pass", -+ "class X[T: (int, str), *Ts, **P]: pass", -+ # Generic functions -+ "def f[T](): pass", -+ "def f[T, *Ts, **P](): pass", -+ "def f[T: int, *Ts, **P](): pass", -+ "def f[T: (int, str), *Ts, **P](): pass", -+] -+ -+# These are compiled through "single" -+# because of overlap with "eval", it just tests what -+# can't be tested with "eval" -+single_tests = ["1+2"] -+ -+# These are compiled through "eval" -+# It should test all expressions -+eval_tests = [ -+ # None -+ "None", -+ # BoolOp -+ "a and b", -+ # BinOp -+ "a + b", -+ # UnaryOp -+ "not v", -+ # Lambda -+ "lambda:None", -+ # Dict -+ "{ 1:2 }", -+ # Empty dict -+ "{}", -+ # Set -+ "{None,}", -+ # Multiline dict (test for .lineno & .col_offset) -+ """{ -+ 1 -+ : -+ 2 -+ }""", -+ # ListComp -+ "[a for b in c if d]", -+ # GeneratorExp -+ "(a for b in c if d)", -+ # Comprehensions with multiple for targets -+ "[(a,b) for a,b in c]", -+ "[(a,b) for (a,b) in c]", -+ "[(a,b) for [a,b] in c]", -+ "{(a,b) for a,b in c}", -+ "{(a,b) for (a,b) in c}", -+ "{(a,b) for [a,b] in c}", -+ "((a,b) for a,b in c)", -+ "((a,b) for (a,b) in c)", -+ "((a,b) for [a,b] in c)", -+ # Yield - yield expressions can't work outside a function -+ # -+ # Compare -+ "1 < 2 < 3", -+ # Call -+ "f(1,2,c=3,*d,**e)", -+ # Call with multi-character starred -+ "f(*[0, 1])", -+ # Call with a generator argument -+ "f(a for a in b)", -+ # Num -+ "10", -+ # Str -+ "'string'", -+ # Attribute -+ "a.b", -+ # Subscript -+ "a[b:c]", -+ # Name -+ "v", -+ # List -+ "[1,2,3]", -+ # Empty list -+ "[]", -+ # Tuple -+ "1,2,3", -+ # Tuple -+ "(1,2,3)", -+ # Empty tuple -+ "()", -+ # Combination -+ "a.b.c.d(a.b[1:2])", -+] -+ -+ -+def main(): -+ if __name__ != '__main__': -+ return -+ if sys.argv[1:] == ['-g']: -+ for statements, kind in ((exec_tests, "exec"), (single_tests, "single"), -+ (eval_tests, "eval")): -+ print(kind+"_results = [") -+ for statement in statements: -+ tree = ast.parse(statement, "?", kind) -+ print("%r," % (to_tuple(tree),)) -+ print("]") -+ print("main()") -+ raise SystemExit -+ unittest.main() -+ -+#### EVERYTHING BELOW IS GENERATED BY python Lib/test/test_ast/snippets.py -g ##### -+exec_results = [ -+('Module', [('Expr', (1, 0, 1, 4), ('Constant', (1, 0, 1, 4), None, None))], []), -+('Module', [('Expr', (1, 0, 1, 18), ('Constant', (1, 0, 1, 18), 'module docstring', None))], []), -+('Module', [('FunctionDef', (1, 0, 1, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 9, 1, 13))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 29), ('Constant', (1, 9, 1, 29), 'function docstring', None))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, []), [('Pass', (1, 10, 1, 14))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 0, None)]), [('Pass', (1, 12, 1, 16))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 11), 'args', None, None), [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 23), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 16), 'args', ('Starred', (1, 13, 1, 16), ('Name', (1, 14, 1, 16), 'Ts', ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 19, 1, 23))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Constant', (1, 25, 1, 28), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Starred', (1, 25, 1, 28), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 21), 'f', ('arguments', [], [], None, [], [], ('arg', (1, 8, 1, 14), 'kwargs', None, None), []), [('Pass', (1, 17, 1, 21))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 71), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None), ('arg', (1, 9, 1, 10), 'b', None, None), ('arg', (1, 14, 1, 15), 'c', None, None), ('arg', (1, 22, 1, 23), 'd', None, None), ('arg', (1, 28, 1, 29), 'e', None, None)], ('arg', (1, 35, 1, 39), 'args', None, None), [('arg', (1, 41, 1, 42), 'f', None, None)], [('Constant', (1, 43, 1, 45), 42, None)], ('arg', (1, 49, 1, 55), 'kwargs', None, None), [('Constant', (1, 11, 1, 12), 1, None), ('Constant', (1, 16, 1, 20), None, None), ('List', (1, 24, 1, 26), [], ('Load',)), ('Dict', (1, 30, 1, 32), [], [])]), [('Expr', (1, 58, 1, 71), ('Constant', (1, 58, 1, 71), 'doc for f()', None))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 27), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 23, 1, 27))], [], ('Subscript', (1, 11, 1, 21), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 20), [('Starred', (1, 17, 1, 20), ('Name', (1, 18, 1, 20), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 28, 1, 32))], [], ('Subscript', (1, 11, 1, 26), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 25), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 25), ('Name', (1, 23, 1, 25), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 45), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 41, 1, 45))], [], ('Subscript', (1, 11, 1, 39), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 38), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 38), ('Subscript', (1, 23, 1, 38), ('Name', (1, 23, 1, 28), 'tuple', ('Load',)), ('Tuple', (1, 29, 1, 37), [('Name', (1, 29, 1, 32), 'int', ('Load',)), ('Constant', (1, 34, 1, 37), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), -+('Module', [('ClassDef', (1, 0, 1, 12), 'C', [], [], [('Pass', (1, 8, 1, 12))], [], [])], []), -+('Module', [('ClassDef', (1, 0, 1, 32), 'C', [], [], [('Expr', (1, 9, 1, 32), ('Constant', (1, 9, 1, 32), 'docstring for class C', None))], [], [])], []), -+('Module', [('ClassDef', (1, 0, 1, 21), 'C', [('Name', (1, 8, 1, 14), 'object', ('Load',))], [], [('Pass', (1, 17, 1, 21))], [], [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 16), ('Constant', (1, 15, 1, 16), 1, None))], [], None, None, [])], []), -+('Module', [('Delete', (1, 0, 1, 5), [('Name', (1, 4, 1, 5), 'v', ('Del',))])], []), -+('Module', [('Assign', (1, 0, 1, 5), [('Name', (1, 0, 1, 1), 'v', ('Store',))], ('Constant', (1, 4, 1, 5), 1, None), None)], []), -+('Module', [('Assign', (1, 0, 1, 7), [('Tuple', (1, 0, 1, 3), [('Name', (1, 0, 1, 1), 'a', ('Store',)), ('Name', (1, 2, 1, 3), 'b', ('Store',))], ('Store',))], ('Name', (1, 6, 1, 7), 'c', ('Load',)), None)], []), -+('Module', [('Assign', (1, 0, 1, 9), [('Tuple', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), -+('Module', [('Assign', (1, 0, 1, 9), [('List', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), -+('Module', [('AnnAssign', (1, 0, 1, 13), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 13), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 12), [('Starred', (1, 9, 1, 12), ('Name', (1, 10, 1, 12), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), -+('Module', [('AnnAssign', (1, 0, 1, 18), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 18), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 17), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 17), ('Name', (1, 15, 1, 17), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), -+('Module', [('AnnAssign', (1, 0, 1, 31), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 31), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 30), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 30), ('Subscript', (1, 15, 1, 30), ('Name', (1, 15, 1, 20), 'tuple', ('Load',)), ('Tuple', (1, 21, 1, 29), [('Name', (1, 21, 1, 24), 'str', ('Load',)), ('Constant', (1, 26, 1, 29), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), -+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Add',), ('Constant', (1, 5, 1, 6), 1, None))], []), -+('Module', [('For', (1, 0, 1, 15), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (1, 11, 1, 15))], [], None)], []), -+('Module', [('While', (1, 0, 1, 12), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (1, 8, 1, 12))], [])], []), -+('Module', [('If', (1, 0, 1, 9), ('Name', (1, 3, 1, 4), 'v', ('Load',)), [('Pass', (1, 5, 1, 9))], [])], []), -+('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 4, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [])])], []), -+('Module', [('If', (1, 0, 6, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 6, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('Pass', (6, 2, 6, 6))])])], []), -+('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',)))], [('Pass', (1, 13, 1, 17))], None)], []), -+('Module', [('With', (1, 0, 1, 25), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',))), ('withitem', ('Name', (1, 13, 1, 14), 'z', ('Load',)), ('Name', (1, 18, 1, 19), 'q', ('Store',)))], [('Pass', (1, 21, 1, 25))], None)], []), -+('Module', [('Raise', (1, 0, 1, 25), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), None)], []), -+('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), -+('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [], [], [('Pass', (4, 2, 4, 6))])], []), -+('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), -+('Module', [('Assert', (1, 0, 1, 8), ('Name', (1, 7, 1, 8), 'v', ('Load',)), None)], []), -+('Module', [('Import', (1, 0, 1, 10), [('alias', (1, 7, 1, 10), 'sys', None)])], []), -+('Module', [('ImportFrom', (1, 0, 1, 17), 'sys', [('alias', (1, 16, 1, 17), 'v', None)], 0)], []), -+('Module', [('Global', (1, 0, 1, 8), ['v'])], []), -+('Module', [('Expr', (1, 0, 1, 1), ('Constant', (1, 0, 1, 1), 1, None))], []), -+('Module', [('Pass', (1, 0, 1, 4))], []), -+('Module', [('For', (1, 0, 1, 16), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Break', (1, 11, 1, 16))], [], None)], []), -+('Module', [('For', (1, 0, 1, 19), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Continue', (1, 11, 1, 19))], [], None)], []), -+('Module', [('For', (1, 0, 1, 18), ('Tuple', (1, 4, 1, 7), [('Name', (1, 4, 1, 5), 'a', ('Store',)), ('Name', (1, 6, 1, 7), 'b', ('Store',))], ('Store',)), ('Name', (1, 11, 1, 12), 'c', ('Load',)), [('Pass', (1, 14, 1, 18))], [], None)], []), -+('Module', [('For', (1, 0, 1, 20), ('Tuple', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), -+('Module', [('For', (1, 0, 1, 20), ('List', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), -+('Module', [('Expr', (1, 0, 11, 5), ('GeneratorExp', (1, 0, 11, 5), ('Tuple', (2, 4, 6, 5), [('Name', (3, 4, 3, 6), 'Aa', ('Load',)), ('Name', (5, 7, 5, 9), 'Bb', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (8, 4, 10, 6), [('Name', (8, 4, 8, 6), 'Aa', ('Store',)), ('Name', (10, 4, 10, 6), 'Bb', ('Store',))], ('Store',)), ('Name', (10, 10, 10, 12), 'Cc', ('Load',)), [], 0)]))], []), -+('Module', [('Expr', (1, 0, 1, 34), ('DictComp', (1, 0, 1, 34), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Name', (1, 11, 1, 12), 'w', ('Store',)), ('Name', (1, 16, 1, 17), 'x', ('Load',)), [], 0), ('comprehension', ('Name', (1, 22, 1, 23), 'm', ('Store',)), ('Name', (1, 27, 1, 28), 'p', ('Load',)), [('Name', (1, 32, 1, 33), 'g', ('Load',))], 0)]))], []), -+('Module', [('Expr', (1, 0, 1, 20), ('DictComp', (1, 0, 1, 20), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'v', ('Store',)), ('Name', (1, 13, 1, 14), 'w', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'x', ('Load',)), [], 0)]))], []), -+('Module', [('Expr', (1, 0, 1, 19), ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 12, 1, 13), 'x', ('Load',)), [('Name', (1, 17, 1, 18), 'g', ('Load',))], 0)]))], []), -+('Module', [('Expr', (1, 0, 1, 16), ('SetComp', (1, 0, 1, 16), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Tuple', (1, 7, 1, 10), [('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 9, 1, 10), 'm', ('Store',))], ('Store',)), ('Name', (1, 14, 1, 15), 'x', ('Load',)), [], 0)]))], []), -+('Module', [('AsyncFunctionDef', (1, 0, 3, 18), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 17), ('Constant', (2, 1, 2, 17), 'async function', None)), ('Expr', (3, 1, 3, 18), ('Await', (3, 1, 3, 18), ('Call', (3, 7, 3, 18), ('Name', (3, 7, 3, 16), 'something', ('Load',)), [], [])))], [], None, None, [])], []), -+('Module', [('AsyncFunctionDef', (1, 0, 3, 8), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncFor', (2, 1, 3, 8), ('Name', (2, 11, 2, 12), 'e', ('Store',)), ('Name', (2, 16, 2, 17), 'i', ('Load',)), [('Expr', (2, 19, 2, 20), ('Constant', (2, 19, 2, 20), 1, None))], [('Expr', (3, 7, 3, 8), ('Constant', (3, 7, 3, 8), 2, None))], None)], [], None, None, [])], []), -+('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncWith', (2, 1, 2, 21), [('withitem', ('Name', (2, 12, 2, 13), 'a', ('Load',)), ('Name', (2, 17, 2, 18), 'b', ('Store',)))], [('Expr', (2, 20, 2, 21), ('Constant', (2, 20, 2, 21), 1, None))], None)], [], None, None, [])], []), -+('Module', [('Expr', (1, 0, 1, 14), ('Dict', (1, 0, 1, 14), [None, ('Constant', (1, 10, 1, 11), 2, None)], [('Dict', (1, 3, 1, 8), [('Constant', (1, 4, 1, 5), 1, None)], [('Constant', (1, 6, 1, 7), 2, None)]), ('Constant', (1, 12, 1, 13), 3, None)]))], []), -+('Module', [('Expr', (1, 0, 1, 12), ('Set', (1, 0, 1, 12), [('Starred', (1, 1, 1, 8), ('Set', (1, 2, 1, 8), [('Constant', (1, 3, 1, 4), 1, None), ('Constant', (1, 6, 1, 7), 2, None)]), ('Load',)), ('Constant', (1, 10, 1, 11), 3, None)]))], []), -+('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 21), ('ListComp', (2, 1, 2, 21), ('Name', (2, 2, 2, 3), 'i', ('Load',)), [('comprehension', ('Name', (2, 14, 2, 15), 'b', ('Store',)), ('Name', (2, 19, 2, 20), 'c', ('Load',)), [], 1)]))], [], None, None, [])], []), -+('Module', [('FunctionDef', (4, 0, 4, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), -+('Module', [('AsyncFunctionDef', (4, 0, 4, 19), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 15, 4, 19))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), -+('Module', [('ClassDef', (4, 0, 4, 13), 'C', [], [], [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], [])], []), -+('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Call', (1, 1, 1, 19), ('Name', (1, 1, 1, 5), 'deco', ('Load',)), [('GeneratorExp', (1, 5, 1, 19), ('Name', (1, 6, 1, 7), 'a', ('Load',)), [('comprehension', ('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 17, 1, 18), 'b', ('Load',)), [], 0)])], [])], None, None, [])], []), -+('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Attribute', (1, 1, 1, 6), ('Attribute', (1, 1, 1, 4), ('Name', (1, 1, 1, 2), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',))], None, None, [])], []), -+('Module', [('Expr', (1, 0, 1, 8), ('NamedExpr', (1, 1, 1, 7), ('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Constant', (1, 6, 1, 7), 1, None)))], []), -+('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None), ('arg', (1, 15, 1, 16), 'd', None, None), ('arg', (1, 18, 1, 19), 'e', None, None)], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], None, []), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 39), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], ('arg', (1, 26, 1, 32), 'kwargs', None, None), []), [('Pass', (1, 35, 1, 39))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 20), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None)]), [('Pass', (1, 16, 1, 20))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None), ('arg', (1, 19, 1, 20), 'c', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None), ('Constant', (1, 21, 1, 22), 4, None)]), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 28, 1, 32))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 30), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 26, 1, 30))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 42), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], ('arg', (1, 29, 1, 35), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 38, 1, 42))], [], None, None, [])], []), -+('Module', [('FunctionDef', (1, 0, 1, 40), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], ('arg', (1, 27, 1, 33), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 36, 1, 40))], [], None, None, [])], []), -+('Module', [('TypeAlias', (1, 0, 1, 12), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [], ('Name', (1, 9, 1, 12), 'int', ('Load',)))], []), -+('Module', [('TypeAlias', (1, 0, 1, 15), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None)], ('Name', (1, 12, 1, 15), 'int', ('Load',)))], []), -+('Module', [('TypeAlias', (1, 0, 1, 32), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None), ('TypeVarTuple', (1, 10, 1, 13), 'Ts'), ('ParamSpec', (1, 15, 1, 18), 'P')], ('Tuple', (1, 22, 1, 32), [('Name', (1, 23, 1, 24), 'T', ('Load',)), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Name', (1, 30, 1, 31), 'P', ('Load',))], ('Load',)))], []), -+('Module', [('TypeAlias', (1, 0, 1, 37), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 13), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',))), ('TypeVarTuple', (1, 15, 1, 18), 'Ts'), ('ParamSpec', (1, 20, 1, 23), 'P')], ('Tuple', (1, 27, 1, 37), [('Name', (1, 28, 1, 29), 'T', ('Load',)), ('Name', (1, 31, 1, 33), 'Ts', ('Load',)), ('Name', (1, 35, 1, 36), 'P', ('Load',))], ('Load',)))], []), -+('Module', [('TypeAlias', (1, 0, 1, 44), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 20), 'T', ('Tuple', (1, 10, 1, 20), [('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Name', (1, 16, 1, 19), 'str', ('Load',))], ('Load',))), ('TypeVarTuple', (1, 22, 1, 25), 'Ts'), ('ParamSpec', (1, 27, 1, 30), 'P')], ('Tuple', (1, 34, 1, 44), [('Name', (1, 35, 1, 36), 'T', ('Load',)), ('Name', (1, 38, 1, 40), 'Ts', ('Load',)), ('Name', (1, 42, 1, 43), 'P', ('Load',))], ('Load',)))], []), -+('Module', [('ClassDef', (1, 0, 1, 16), 'X', [], [], [('Pass', (1, 12, 1, 16))], [], [('TypeVar', (1, 8, 1, 9), 'T', None)])], []), -+('Module', [('ClassDef', (1, 0, 1, 26), 'X', [], [], [('Pass', (1, 22, 1, 26))], [], [('TypeVar', (1, 8, 1, 9), 'T', None), ('TypeVarTuple', (1, 11, 1, 14), 'Ts'), ('ParamSpec', (1, 16, 1, 19), 'P')])], []), -+('Module', [('ClassDef', (1, 0, 1, 31), 'X', [], [], [('Pass', (1, 27, 1, 31))], [], [('TypeVar', (1, 8, 1, 14), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',))), ('TypeVarTuple', (1, 16, 1, 19), 'Ts'), ('ParamSpec', (1, 21, 1, 24), 'P')])], []), -+('Module', [('ClassDef', (1, 0, 1, 38), 'X', [], [], [('Pass', (1, 34, 1, 38))], [], [('TypeVar', (1, 8, 1, 21), 'T', ('Tuple', (1, 11, 1, 21), [('Name', (1, 12, 1, 15), 'int', ('Load',)), ('Name', (1, 17, 1, 20), 'str', ('Load',))], ('Load',))), ('TypeVarTuple', (1, 23, 1, 26), 'Ts'), ('ParamSpec', (1, 28, 1, 31), 'P')])], []), -+('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 12, 1, 16))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None)])], []), -+('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None), ('TypeVarTuple', (1, 9, 1, 12), 'Ts'), ('ParamSpec', (1, 14, 1, 17), 'P')])], []), -+('Module', [('FunctionDef', (1, 0, 1, 31), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 27, 1, 31))], [], None, None, [('TypeVar', (1, 6, 1, 12), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',))), ('TypeVarTuple', (1, 14, 1, 17), 'Ts'), ('ParamSpec', (1, 19, 1, 22), 'P')])], []), -+('Module', [('FunctionDef', (1, 0, 1, 38), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 34, 1, 38))], [], None, None, [('TypeVar', (1, 6, 1, 19), 'T', ('Tuple', (1, 9, 1, 19), [('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Name', (1, 15, 1, 18), 'str', ('Load',))], ('Load',))), ('TypeVarTuple', (1, 21, 1, 24), 'Ts'), ('ParamSpec', (1, 26, 1, 29), 'P')])], []), -+] -+single_results = [ -+('Interactive', [('Expr', (1, 0, 1, 3), ('BinOp', (1, 0, 1, 3), ('Constant', (1, 0, 1, 1), 1, None), ('Add',), ('Constant', (1, 2, 1, 3), 2, None)))]), -+] -+eval_results = [ -+('Expression', ('Constant', (1, 0, 1, 4), None, None)), -+('Expression', ('BoolOp', (1, 0, 1, 7), ('And',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 6, 1, 7), 'b', ('Load',))])), -+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Add',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -+('Expression', ('UnaryOp', (1, 0, 1, 5), ('Not',), ('Name', (1, 4, 1, 5), 'v', ('Load',)))), -+('Expression', ('Lambda', (1, 0, 1, 11), ('arguments', [], [], None, [], [], None, []), ('Constant', (1, 7, 1, 11), None, None))), -+('Expression', ('Dict', (1, 0, 1, 7), [('Constant', (1, 2, 1, 3), 1, None)], [('Constant', (1, 4, 1, 5), 2, None)])), -+('Expression', ('Dict', (1, 0, 1, 2), [], [])), -+('Expression', ('Set', (1, 0, 1, 7), [('Constant', (1, 1, 1, 5), None, None)])), -+('Expression', ('Dict', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None)], [('Constant', (4, 10, 4, 11), 2, None)])), -+('Expression', ('ListComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), -+('Expression', ('GeneratorExp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), -+('Expression', ('ListComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), -+('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -+('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -+('Expression', ('SetComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), -+('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -+('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -+('Expression', ('GeneratorExp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), -+('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -+('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -+('Expression', ('Compare', (1, 0, 1, 9), ('Constant', (1, 0, 1, 1), 1, None), [('Lt',), ('Lt',)], [('Constant', (1, 4, 1, 5), 2, None), ('Constant', (1, 8, 1, 9), 3, None)])), -+('Expression', ('Call', (1, 0, 1, 17), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Constant', (1, 2, 1, 3), 1, None), ('Constant', (1, 4, 1, 5), 2, None), ('Starred', (1, 10, 1, 12), ('Name', (1, 11, 1, 12), 'd', ('Load',)), ('Load',))], [('keyword', (1, 6, 1, 9), 'c', ('Constant', (1, 8, 1, 9), 3, None)), ('keyword', (1, 13, 1, 16), None, ('Name', (1, 15, 1, 16), 'e', ('Load',)))])), -+('Expression', ('Call', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Starred', (1, 2, 1, 9), ('List', (1, 3, 1, 9), [('Constant', (1, 4, 1, 5), 0, None), ('Constant', (1, 7, 1, 8), 1, None)], ('Load',)), ('Load',))], [])), -+('Expression', ('Call', (1, 0, 1, 15), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('GeneratorExp', (1, 1, 1, 15), ('Name', (1, 2, 1, 3), 'a', ('Load',)), [('comprehension', ('Name', (1, 8, 1, 9), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Load',)), [], 0)])], [])), -+('Expression', ('Constant', (1, 0, 1, 2), 10, None)), -+('Expression', ('Constant', (1, 0, 1, 8), 'string', None)), -+('Expression', ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',))), -+('Expression', ('Subscript', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Slice', (1, 2, 1, 5), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Name', (1, 4, 1, 5), 'c', ('Load',)), None), ('Load',))), -+('Expression', ('Name', (1, 0, 1, 1), 'v', ('Load',))), -+('Expression', ('List', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), -+('Expression', ('List', (1, 0, 1, 2), [], ('Load',))), -+('Expression', ('Tuple', (1, 0, 1, 5), [('Constant', (1, 0, 1, 1), 1, None), ('Constant', (1, 2, 1, 3), 2, None), ('Constant', (1, 4, 1, 5), 3, None)], ('Load',))), -+('Expression', ('Tuple', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), -+('Expression', ('Tuple', (1, 0, 1, 2), [], ('Load',))), -+('Expression', ('Call', (1, 0, 1, 17), ('Attribute', (1, 0, 1, 7), ('Attribute', (1, 0, 1, 5), ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 8, 1, 16), ('Attribute', (1, 8, 1, 11), ('Name', (1, 8, 1, 9), 'a', ('Load',)), 'b', ('Load',)), ('Slice', (1, 12, 1, 15), ('Constant', (1, 12, 1, 13), 1, None), ('Constant', (1, 14, 1, 15), 2, None), None), ('Load',))], [])), -+] -+main() ---- /dev/null -+++ b/Lib/test/test_ast/test_ast.py -@@ -0,0 +1,2764 @@ -+import ast -+import builtins -+import dis -+import enum -+import os -+import re -+import sys -+import textwrap -+import types -+import unittest -+import warnings -+import weakref -+from functools import partial -+from textwrap import dedent -+ -+from test import support -+from test.support.import_helper import import_fresh_module -+from test.support import os_helper, script_helper -+from test.support.ast_helper import ASTTestMixin -+from test.test_ast.utils import to_tuple -+from test.test_ast.snippets import ( -+ eval_tests, eval_results, exec_tests, exec_results, single_tests, single_results -+) -+ -+ -+class AST_Tests(unittest.TestCase): -+ maxDiff = None -+ -+ def _is_ast_node(self, name, node): -+ if not isinstance(node, type): -+ return False -+ if "ast" not in node.__module__: -+ return False -+ return name != "AST" and name[0].isupper() -+ -+ def _assertTrueorder(self, ast_node, parent_pos): -+ if not isinstance(ast_node, ast.AST) or ast_node._fields is None: -+ return -+ if isinstance(ast_node, (ast.expr, ast.stmt, ast.excepthandler)): -+ node_pos = (ast_node.lineno, ast_node.col_offset) -+ self.assertGreaterEqual(node_pos, parent_pos) -+ parent_pos = (ast_node.lineno, ast_node.col_offset) -+ for name in ast_node._fields: -+ value = getattr(ast_node, name) -+ if isinstance(value, list): -+ first_pos = parent_pos -+ if value and name == "decorator_list": -+ first_pos = (value[0].lineno, value[0].col_offset) -+ for child in value: -+ self._assertTrueorder(child, first_pos) -+ elif value is not None: -+ self._assertTrueorder(value, parent_pos) -+ self.assertEqual(ast_node._fields, ast_node.__match_args__) -+ -+ def test_AST_objects(self): -+ x = ast.AST() -+ self.assertEqual(x._fields, ()) -+ x.foobar = 42 -+ self.assertEqual(x.foobar, 42) -+ self.assertEqual(x.__dict__["foobar"], 42) -+ -+ with self.assertRaises(AttributeError): -+ x.vararg -+ -+ with self.assertRaises(TypeError): -+ # "ast.AST constructor takes 0 positional arguments" -+ ast.AST(2) -+ -+ def test_AST_garbage_collection(self): -+ class X: -+ pass -+ -+ a = ast.AST() -+ a.x = X() -+ a.x.a = a -+ ref = weakref.ref(a.x) -+ del a -+ support.gc_collect() -+ self.assertIsNone(ref()) -+ -+ def test_snippets(self): -+ for input, output, kind in ( -+ (exec_tests, exec_results, "exec"), -+ (single_tests, single_results, "single"), -+ (eval_tests, eval_results, "eval"), -+ ): -+ for i, o in zip(input, output): -+ with self.subTest(action="parsing", input=i): -+ ast_tree = compile(i, "?", kind, ast.PyCF_ONLY_AST) -+ self.assertEqual(to_tuple(ast_tree), o) -+ self._assertTrueorder(ast_tree, (0, 0)) -+ with self.subTest(action="compiling", input=i, kind=kind): -+ compile(ast_tree, "?", kind) -+ -+ def test_ast_validation(self): -+ # compile() is the only function that calls PyAST_Validate -+ snippets_to_validate = exec_tests + single_tests + eval_tests -+ for snippet in snippets_to_validate: -+ tree = ast.parse(snippet) -+ compile(tree, "", "exec") -+ -+ def test_invalid_position_information(self): -+ invalid_linenos = [(10, 1), (-10, -11), (10, -11), (-5, -2), (-5, 1)] -+ -+ for lineno, end_lineno in invalid_linenos: -+ with self.subTest(f"Check invalid linenos {lineno}:{end_lineno}"): -+ snippet = "a = 1" -+ tree = ast.parse(snippet) -+ tree.body[0].lineno = lineno -+ tree.body[0].end_lineno = end_lineno -+ with self.assertRaises(ValueError): -+ compile(tree, "", "exec") -+ -+ invalid_col_offsets = [(10, 1), (-10, -11), (10, -11), (-5, -2), (-5, 1)] -+ for col_offset, end_col_offset in invalid_col_offsets: -+ with self.subTest( -+ f"Check invalid col_offset {col_offset}:{end_col_offset}" -+ ): -+ snippet = "a = 1" -+ tree = ast.parse(snippet) -+ tree.body[0].col_offset = col_offset -+ tree.body[0].end_col_offset = end_col_offset -+ with self.assertRaises(ValueError): -+ compile(tree, "", "exec") -+ -+ def test_compilation_of_ast_nodes_with_default_end_position_values(self): -+ tree = ast.Module( -+ body=[ -+ ast.Import( -+ names=[ast.alias(name="builtins", lineno=1, col_offset=0)], -+ lineno=1, -+ col_offset=0, -+ ), -+ ast.Import( -+ names=[ast.alias(name="traceback", lineno=0, col_offset=0)], -+ lineno=0, -+ col_offset=1, -+ ), -+ ], -+ type_ignores=[], -+ ) -+ -+ # Check that compilation doesn't crash. Note: this may crash explicitly only on debug mode. -+ compile(tree, "", "exec") -+ -+ def test_slice(self): -+ slc = ast.parse("x[::]").body[0].value.slice -+ self.assertIsNone(slc.upper) -+ self.assertIsNone(slc.lower) -+ self.assertIsNone(slc.step) -+ -+ def test_from_import(self): -+ im = ast.parse("from . import y").body[0] -+ self.assertIsNone(im.module) -+ -+ def test_non_interned_future_from_ast(self): -+ mod = ast.parse("from __future__ import division") -+ self.assertIsInstance(mod.body[0], ast.ImportFrom) -+ mod.body[0].module = " __future__ ".strip() -+ compile(mod, "", "exec") -+ -+ def test_alias(self): -+ im = ast.parse("from bar import y").body[0] -+ self.assertEqual(len(im.names), 1) -+ alias = im.names[0] -+ self.assertEqual(alias.name, "y") -+ self.assertIsNone(alias.asname) -+ self.assertEqual(alias.lineno, 1) -+ self.assertEqual(alias.end_lineno, 1) -+ self.assertEqual(alias.col_offset, 16) -+ self.assertEqual(alias.end_col_offset, 17) -+ -+ im = ast.parse("from bar import *").body[0] -+ alias = im.names[0] -+ self.assertEqual(alias.name, "*") -+ self.assertIsNone(alias.asname) -+ self.assertEqual(alias.lineno, 1) -+ self.assertEqual(alias.end_lineno, 1) -+ self.assertEqual(alias.col_offset, 16) -+ self.assertEqual(alias.end_col_offset, 17) -+ -+ im = ast.parse("from bar import y as z").body[0] -+ alias = im.names[0] -+ self.assertEqual(alias.name, "y") -+ self.assertEqual(alias.asname, "z") -+ self.assertEqual(alias.lineno, 1) -+ self.assertEqual(alias.end_lineno, 1) -+ self.assertEqual(alias.col_offset, 16) -+ self.assertEqual(alias.end_col_offset, 22) -+ -+ im = ast.parse("import bar as foo").body[0] -+ alias = im.names[0] -+ self.assertEqual(alias.name, "bar") -+ self.assertEqual(alias.asname, "foo") -+ self.assertEqual(alias.lineno, 1) -+ self.assertEqual(alias.end_lineno, 1) -+ self.assertEqual(alias.col_offset, 7) -+ self.assertEqual(alias.end_col_offset, 17) -+ -+ def test_base_classes(self): -+ self.assertTrue(issubclass(ast.For, ast.stmt)) -+ self.assertTrue(issubclass(ast.Name, ast.expr)) -+ self.assertTrue(issubclass(ast.stmt, ast.AST)) -+ self.assertTrue(issubclass(ast.expr, ast.AST)) -+ self.assertTrue(issubclass(ast.comprehension, ast.AST)) -+ self.assertTrue(issubclass(ast.Gt, ast.AST)) -+ -+ def test_import_deprecated(self): -+ ast = import_fresh_module("ast") -+ depr_regex = ( -+ r"ast\.{} is deprecated and will be removed in Python 3.14; " -+ r"use ast\.Constant instead" -+ ) -+ for name in "Num", "Str", "Bytes", "NameConstant", "Ellipsis": -+ with self.assertWarnsRegex(DeprecationWarning, depr_regex.format(name)): -+ getattr(ast, name) -+ -+ def test_field_attr_existence_deprecated(self): -+ with warnings.catch_warnings(): -+ warnings.filterwarnings("ignore", "", DeprecationWarning) -+ from ast import Num, Str, Bytes, NameConstant, Ellipsis -+ -+ for name in ("Num", "Str", "Bytes", "NameConstant", "Ellipsis"): -+ item = getattr(ast, name) -+ if self._is_ast_node(name, item): -+ with self.subTest(item): -+ with self.assertWarns(DeprecationWarning): -+ x = item() -+ if isinstance(x, ast.AST): -+ self.assertIs(type(x._fields), tuple) -+ -+ def test_field_attr_existence(self): -+ for name, item in ast.__dict__.items(): -+ # These emit DeprecationWarnings -+ if name in {"Num", "Str", "Bytes", "NameConstant", "Ellipsis"}: -+ continue -+ # constructor has a different signature -+ if name == "Index": -+ continue -+ if self._is_ast_node(name, item): -+ x = item() -+ if isinstance(x, ast.AST): -+ self.assertIs(type(x._fields), tuple) -+ -+ def test_arguments(self): -+ x = ast.arguments() -+ self.assertEqual( -+ x._fields, -+ ( -+ "posonlyargs", -+ "args", -+ "vararg", -+ "kwonlyargs", -+ "kw_defaults", -+ "kwarg", -+ "defaults", -+ ), -+ ) -+ -+ with self.assertRaises(AttributeError): -+ x.args -+ self.assertIsNone(x.vararg) -+ -+ x = ast.arguments(*range(1, 8)) -+ self.assertEqual(x.args, 2) -+ self.assertEqual(x.vararg, 3) -+ -+ def test_field_attr_writable_deprecated(self): -+ with warnings.catch_warnings(): -+ warnings.filterwarnings("ignore", "", DeprecationWarning) -+ x = ast.Num() -+ # We can assign to _fields -+ x._fields = 666 -+ self.assertEqual(x._fields, 666) -+ -+ def test_field_attr_writable(self): -+ x = ast.Constant() -+ # We can assign to _fields -+ x._fields = 666 -+ self.assertEqual(x._fields, 666) -+ -+ def test_classattrs_deprecated(self): -+ with warnings.catch_warnings(): -+ warnings.filterwarnings("ignore", "", DeprecationWarning) -+ from ast import Num, Str, Bytes, NameConstant, Ellipsis -+ -+ with warnings.catch_warnings(record=True) as wlog: -+ warnings.filterwarnings("always", "", DeprecationWarning) -+ x = ast.Num() -+ self.assertEqual(x._fields, ("value", "kind")) -+ -+ with self.assertRaises(AttributeError): -+ x.value -+ -+ with self.assertRaises(AttributeError): -+ x.n -+ -+ x = ast.Num(42) -+ self.assertEqual(x.value, 42) -+ self.assertEqual(x.n, 42) -+ -+ with self.assertRaises(AttributeError): -+ x.lineno -+ -+ with self.assertRaises(AttributeError): -+ x.foobar -+ -+ x = ast.Num(lineno=2) -+ self.assertEqual(x.lineno, 2) -+ -+ x = ast.Num(42, lineno=0) -+ self.assertEqual(x.lineno, 0) -+ self.assertEqual(x._fields, ("value", "kind")) -+ self.assertEqual(x.value, 42) -+ self.assertEqual(x.n, 42) -+ -+ self.assertRaises(TypeError, ast.Num, 1, None, 2) -+ self.assertRaises(TypeError, ast.Num, 1, None, 2, lineno=0) -+ -+ # Arbitrary keyword arguments are supported -+ self.assertEqual(ast.Num(1, foo="bar").foo, "bar") -+ -+ with self.assertRaisesRegex( -+ TypeError, "Num got multiple values for argument 'n'" -+ ): -+ ast.Num(1, n=2) -+ -+ self.assertEqual(ast.Num(42).n, 42) -+ self.assertEqual(ast.Num(4.25).n, 4.25) -+ self.assertEqual(ast.Num(4.25j).n, 4.25j) -+ self.assertEqual(ast.Str("42").s, "42") -+ self.assertEqual(ast.Bytes(b"42").s, b"42") -+ self.assertIs(ast.NameConstant(True).value, True) -+ self.assertIs(ast.NameConstant(False).value, False) -+ self.assertIs(ast.NameConstant(None).value, None) -+ -+ self.assertEqual( -+ [str(w.message) for w in wlog], -+ [ -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "Attribute s is deprecated and will be removed in Python 3.14; use value instead", -+ "ast.Bytes is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "Attribute s is deprecated and will be removed in Python 3.14; use value instead", -+ "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ ], -+ ) -+ -+ def test_classattrs(self): -+ x = ast.Constant() -+ self.assertEqual(x._fields, ("value", "kind")) -+ -+ with self.assertRaises(AttributeError): -+ x.value -+ -+ x = ast.Constant(42) -+ self.assertEqual(x.value, 42) -+ -+ with self.assertRaises(AttributeError): -+ x.lineno -+ -+ with self.assertRaises(AttributeError): -+ x.foobar -+ -+ x = ast.Constant(lineno=2) -+ self.assertEqual(x.lineno, 2) -+ -+ x = ast.Constant(42, lineno=0) -+ self.assertEqual(x.lineno, 0) -+ self.assertEqual(x._fields, ("value", "kind")) -+ self.assertEqual(x.value, 42) -+ -+ self.assertRaises(TypeError, ast.Constant, 1, None, 2) -+ self.assertRaises(TypeError, ast.Constant, 1, None, 2, lineno=0) -+ -+ # Arbitrary keyword arguments are supported -+ self.assertEqual(ast.Constant(1, foo="bar").foo, "bar") -+ -+ with self.assertRaisesRegex( -+ TypeError, "Constant got multiple values for argument 'value'" -+ ): -+ ast.Constant(1, value=2) -+ -+ self.assertEqual(ast.Constant(42).value, 42) -+ self.assertEqual(ast.Constant(4.25).value, 4.25) -+ self.assertEqual(ast.Constant(4.25j).value, 4.25j) -+ self.assertEqual(ast.Constant("42").value, "42") -+ self.assertEqual(ast.Constant(b"42").value, b"42") -+ self.assertIs(ast.Constant(True).value, True) -+ self.assertIs(ast.Constant(False).value, False) -+ self.assertIs(ast.Constant(None).value, None) -+ self.assertIs(ast.Constant(...).value, ...) -+ -+ def test_realtype(self): -+ with warnings.catch_warnings(): -+ warnings.filterwarnings("ignore", "", DeprecationWarning) -+ from ast import Num, Str, Bytes, NameConstant, Ellipsis -+ -+ with warnings.catch_warnings(record=True) as wlog: -+ warnings.filterwarnings("always", "", DeprecationWarning) -+ self.assertIs(type(ast.Num(42)), ast.Constant) -+ self.assertIs(type(ast.Num(4.25)), ast.Constant) -+ self.assertIs(type(ast.Num(4.25j)), ast.Constant) -+ self.assertIs(type(ast.Str("42")), ast.Constant) -+ self.assertIs(type(ast.Bytes(b"42")), ast.Constant) -+ self.assertIs(type(ast.NameConstant(True)), ast.Constant) -+ self.assertIs(type(ast.NameConstant(False)), ast.Constant) -+ self.assertIs(type(ast.NameConstant(None)), ast.Constant) -+ self.assertIs(type(ast.Ellipsis()), ast.Constant) -+ -+ self.assertEqual( -+ [str(w.message) for w in wlog], -+ [ -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Bytes is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Ellipsis is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ ], -+ ) -+ -+ def test_isinstance(self): -+ from ast import Constant -+ -+ with warnings.catch_warnings(): -+ warnings.filterwarnings("ignore", "", DeprecationWarning) -+ from ast import Num, Str, Bytes, NameConstant, Ellipsis -+ -+ cls_depr_msg = ( -+ "ast.{} is deprecated and will be removed in Python 3.14; " -+ "use ast.Constant instead" -+ ) -+ -+ assertNumDeprecated = partial( -+ self.assertWarnsRegex, DeprecationWarning, cls_depr_msg.format("Num") -+ ) -+ assertStrDeprecated = partial( -+ self.assertWarnsRegex, DeprecationWarning, cls_depr_msg.format("Str") -+ ) -+ assertBytesDeprecated = partial( -+ self.assertWarnsRegex, DeprecationWarning, cls_depr_msg.format("Bytes") -+ ) -+ assertNameConstantDeprecated = partial( -+ self.assertWarnsRegex, -+ DeprecationWarning, -+ cls_depr_msg.format("NameConstant"), -+ ) -+ assertEllipsisDeprecated = partial( -+ self.assertWarnsRegex, DeprecationWarning, cls_depr_msg.format("Ellipsis") -+ ) -+ -+ for arg in 42, 4.2, 4.2j: -+ with self.subTest(arg=arg): -+ with assertNumDeprecated(): -+ n = Num(arg) -+ with assertNumDeprecated(): -+ self.assertIsInstance(n, Num) -+ -+ with assertStrDeprecated(): -+ s = Str("42") -+ with assertStrDeprecated(): -+ self.assertIsInstance(s, Str) -+ -+ with assertBytesDeprecated(): -+ b = Bytes(b"42") -+ with assertBytesDeprecated(): -+ self.assertIsInstance(b, Bytes) -+ -+ for arg in True, False, None: -+ with self.subTest(arg=arg): -+ with assertNameConstantDeprecated(): -+ n = NameConstant(arg) -+ with assertNameConstantDeprecated(): -+ self.assertIsInstance(n, NameConstant) -+ -+ with assertEllipsisDeprecated(): -+ e = Ellipsis() -+ with assertEllipsisDeprecated(): -+ self.assertIsInstance(e, Ellipsis) -+ -+ for arg in 42, 4.2, 4.2j: -+ with self.subTest(arg=arg): -+ with assertNumDeprecated(): -+ self.assertIsInstance(Constant(arg), Num) -+ -+ with assertStrDeprecated(): -+ self.assertIsInstance(Constant("42"), Str) -+ -+ with assertBytesDeprecated(): -+ self.assertIsInstance(Constant(b"42"), Bytes) -+ -+ for arg in True, False, None: -+ with self.subTest(arg=arg): -+ with assertNameConstantDeprecated(): -+ self.assertIsInstance(Constant(arg), NameConstant) -+ -+ with assertEllipsisDeprecated(): -+ self.assertIsInstance(Constant(...), Ellipsis) -+ -+ with assertStrDeprecated(): -+ s = Str("42") -+ assertNumDeprecated(self.assertNotIsInstance, s, Num) -+ assertBytesDeprecated(self.assertNotIsInstance, s, Bytes) -+ -+ with assertNumDeprecated(): -+ n = Num(42) -+ assertStrDeprecated(self.assertNotIsInstance, n, Str) -+ assertNameConstantDeprecated(self.assertNotIsInstance, n, NameConstant) -+ assertEllipsisDeprecated(self.assertNotIsInstance, n, Ellipsis) -+ -+ with assertNameConstantDeprecated(): -+ n = NameConstant(True) -+ with assertNumDeprecated(): -+ self.assertNotIsInstance(n, Num) -+ -+ with assertNameConstantDeprecated(): -+ n = NameConstant(False) -+ with assertNumDeprecated(): -+ self.assertNotIsInstance(n, Num) -+ -+ for arg in "42", True, False: -+ with self.subTest(arg=arg): -+ with assertNumDeprecated(): -+ self.assertNotIsInstance(Constant(arg), Num) -+ -+ assertStrDeprecated(self.assertNotIsInstance, Constant(42), Str) -+ assertBytesDeprecated(self.assertNotIsInstance, Constant("42"), Bytes) -+ assertNameConstantDeprecated( -+ self.assertNotIsInstance, Constant(42), NameConstant -+ ) -+ assertEllipsisDeprecated(self.assertNotIsInstance, Constant(42), Ellipsis) -+ assertNumDeprecated(self.assertNotIsInstance, Constant(), Num) -+ assertStrDeprecated(self.assertNotIsInstance, Constant(), Str) -+ assertBytesDeprecated(self.assertNotIsInstance, Constant(), Bytes) -+ assertNameConstantDeprecated(self.assertNotIsInstance, Constant(), NameConstant) -+ assertEllipsisDeprecated(self.assertNotIsInstance, Constant(), Ellipsis) -+ -+ class S(str): -+ pass -+ -+ with assertStrDeprecated(): -+ self.assertIsInstance(Constant(S("42")), Str) -+ with assertNumDeprecated(): -+ self.assertNotIsInstance(Constant(S("42")), Num) -+ -+ def test_constant_subclasses_deprecated(self): -+ with warnings.catch_warnings(): -+ warnings.filterwarnings("ignore", "", DeprecationWarning) -+ from ast import Num -+ -+ with warnings.catch_warnings(record=True) as wlog: -+ warnings.filterwarnings("always", "", DeprecationWarning) -+ -+ class N(ast.Num): -+ def __init__(self, *args, **kwargs): -+ super().__init__(*args, **kwargs) -+ self.z = "spam" -+ -+ class N2(ast.Num): -+ pass -+ -+ n = N(42) -+ self.assertEqual(n.n, 42) -+ self.assertEqual(n.z, "spam") -+ self.assertIs(type(n), N) -+ self.assertIsInstance(n, N) -+ self.assertIsInstance(n, ast.Num) -+ self.assertNotIsInstance(n, N2) -+ self.assertNotIsInstance(ast.Num(42), N) -+ n = N(n=42) -+ self.assertEqual(n.n, 42) -+ self.assertIs(type(n), N) -+ -+ self.assertEqual( -+ [str(w.message) for w in wlog], -+ [ -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ ], -+ ) -+ -+ def test_constant_subclasses(self): -+ class N(ast.Constant): -+ def __init__(self, *args, **kwargs): -+ super().__init__(*args, **kwargs) -+ self.z = "spam" -+ -+ class N2(ast.Constant): -+ pass -+ -+ n = N(42) -+ self.assertEqual(n.value, 42) -+ self.assertEqual(n.z, "spam") -+ self.assertEqual(type(n), N) -+ self.assertTrue(isinstance(n, N)) -+ self.assertTrue(isinstance(n, ast.Constant)) -+ self.assertFalse(isinstance(n, N2)) -+ self.assertFalse(isinstance(ast.Constant(42), N)) -+ n = N(value=42) -+ self.assertEqual(n.value, 42) -+ self.assertEqual(type(n), N) -+ -+ def test_module(self): -+ body = [ast.Constant(42)] -+ x = ast.Module(body, []) -+ self.assertEqual(x.body, body) -+ -+ def test_nodeclasses(self): -+ # Zero arguments constructor explicitly allowed -+ x = ast.BinOp() -+ self.assertEqual(x._fields, ("left", "op", "right")) -+ -+ # Random attribute allowed too -+ x.foobarbaz = 5 -+ self.assertEqual(x.foobarbaz, 5) -+ -+ n1 = ast.Constant(1) -+ n3 = ast.Constant(3) -+ addop = ast.Add() -+ x = ast.BinOp(n1, addop, n3) -+ self.assertEqual(x.left, n1) -+ self.assertEqual(x.op, addop) -+ self.assertEqual(x.right, n3) -+ -+ x = ast.BinOp(1, 2, 3) -+ self.assertEqual(x.left, 1) -+ self.assertEqual(x.op, 2) -+ self.assertEqual(x.right, 3) -+ -+ x = ast.BinOp(1, 2, 3, lineno=0) -+ self.assertEqual(x.left, 1) -+ self.assertEqual(x.op, 2) -+ self.assertEqual(x.right, 3) -+ self.assertEqual(x.lineno, 0) -+ -+ # node raises exception when given too many arguments -+ self.assertRaises(TypeError, ast.BinOp, 1, 2, 3, 4) -+ # node raises exception when given too many arguments -+ self.assertRaises(TypeError, ast.BinOp, 1, 2, 3, 4, lineno=0) -+ -+ # can set attributes through kwargs too -+ x = ast.BinOp(left=1, op=2, right=3, lineno=0) -+ self.assertEqual(x.left, 1) -+ self.assertEqual(x.op, 2) -+ self.assertEqual(x.right, 3) -+ self.assertEqual(x.lineno, 0) -+ -+ # Random kwargs also allowed -+ x = ast.BinOp(1, 2, 3, foobarbaz=42) -+ self.assertEqual(x.foobarbaz, 42) -+ -+ def test_no_fields(self): -+ # this used to fail because Sub._fields was None -+ x = ast.Sub() -+ self.assertEqual(x._fields, ()) -+ -+ def test_pickling(self): -+ import pickle -+ -+ for protocol in range(pickle.HIGHEST_PROTOCOL + 1): -+ for ast in (compile(i, "?", "exec", 0x400) for i in exec_tests): -+ ast2 = pickle.loads(pickle.dumps(ast, protocol)) -+ self.assertEqual(to_tuple(ast2), to_tuple(ast)) -+ -+ def test_invalid_sum(self): -+ pos = dict(lineno=2, col_offset=3) -+ m = ast.Module([ast.Expr(ast.expr(**pos), **pos)], []) -+ with self.assertRaises(TypeError) as cm: -+ compile(m, "", "exec") -+ self.assertIn("but got ", "exec") -+ self.assertIn("identifier must be of type str", str(cm.exception)) -+ -+ def test_invalid_constant(self): -+ for invalid_constant in int, (1, 2, int), frozenset((1, 2, int)): -+ e = ast.Expression(body=ast.Constant(invalid_constant)) -+ ast.fix_missing_locations(e) -+ with self.assertRaisesRegex(TypeError, "invalid type in Constant: type"): -+ compile(e, "", "eval") -+ -+ def test_empty_yield_from(self): -+ # Issue 16546: yield from value is not optional. -+ empty_yield_from = ast.parse("def f():\n yield from g()") -+ empty_yield_from.body[0].body[0].value.value = None -+ with self.assertRaises(ValueError) as cm: -+ compile(empty_yield_from, "", "exec") -+ self.assertIn("field 'value' is required", str(cm.exception)) -+ -+ @support.cpython_only -+ def test_issue31592(self): -+ # There shouldn't be an assertion failure in case of a bad -+ # unicodedata.normalize(). -+ import unicodedata -+ -+ def bad_normalize(*args): -+ return None -+ -+ with support.swap_attr(unicodedata, "normalize", bad_normalize): -+ self.assertRaises(TypeError, ast.parse, "\u03d5") -+ -+ def test_issue18374_binop_col_offset(self): -+ tree = ast.parse("4+5+6+7") -+ parent_binop = tree.body[0].value -+ child_binop = parent_binop.left -+ grandchild_binop = child_binop.left -+ self.assertEqual(parent_binop.col_offset, 0) -+ self.assertEqual(parent_binop.end_col_offset, 7) -+ self.assertEqual(child_binop.col_offset, 0) -+ self.assertEqual(child_binop.end_col_offset, 5) -+ self.assertEqual(grandchild_binop.col_offset, 0) -+ self.assertEqual(grandchild_binop.end_col_offset, 3) -+ -+ tree = ast.parse("4+5-\\\n 6-7") -+ parent_binop = tree.body[0].value -+ child_binop = parent_binop.left -+ grandchild_binop = child_binop.left -+ self.assertEqual(parent_binop.col_offset, 0) -+ self.assertEqual(parent_binop.lineno, 1) -+ self.assertEqual(parent_binop.end_col_offset, 4) -+ self.assertEqual(parent_binop.end_lineno, 2) -+ -+ self.assertEqual(child_binop.col_offset, 0) -+ self.assertEqual(child_binop.lineno, 1) -+ self.assertEqual(child_binop.end_col_offset, 2) -+ self.assertEqual(child_binop.end_lineno, 2) -+ -+ self.assertEqual(grandchild_binop.col_offset, 0) -+ self.assertEqual(grandchild_binop.lineno, 1) -+ self.assertEqual(grandchild_binop.end_col_offset, 3) -+ self.assertEqual(grandchild_binop.end_lineno, 1) -+ -+ def test_issue39579_dotted_name_end_col_offset(self): -+ tree = ast.parse("@a.b.c\ndef f(): pass") -+ attr_b = tree.body[0].decorator_list[0].value -+ self.assertEqual(attr_b.end_col_offset, 4) -+ -+ def test_ast_asdl_signature(self): -+ self.assertEqual( -+ ast.withitem.__doc__, "withitem(expr context_expr, expr? optional_vars)" -+ ) -+ self.assertEqual(ast.GtE.__doc__, "GtE") -+ self.assertEqual(ast.Name.__doc__, "Name(identifier id, expr_context ctx)") -+ self.assertEqual( -+ ast.cmpop.__doc__, -+ "cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn", -+ ) -+ expressions = [f" | {node.__doc__}" for node in ast.expr.__subclasses__()] -+ expressions[0] = f"expr = {ast.expr.__subclasses__()[0].__doc__}" -+ self.assertCountEqual(ast.expr.__doc__.split("\n"), expressions) -+ -+ def test_positional_only_feature_version(self): -+ ast.parse("def foo(x, /): ...", feature_version=(3, 8)) -+ ast.parse("def bar(x=1, /): ...", feature_version=(3, 8)) -+ with self.assertRaises(SyntaxError): -+ ast.parse("def foo(x, /): ...", feature_version=(3, 7)) -+ with self.assertRaises(SyntaxError): -+ ast.parse("def bar(x=1, /): ...", feature_version=(3, 7)) -+ -+ ast.parse("lambda x, /: ...", feature_version=(3, 8)) -+ ast.parse("lambda x=1, /: ...", feature_version=(3, 8)) -+ with self.assertRaises(SyntaxError): -+ ast.parse("lambda x, /: ...", feature_version=(3, 7)) -+ with self.assertRaises(SyntaxError): -+ ast.parse("lambda x=1, /: ...", feature_version=(3, 7)) -+ -+ def test_assignment_expression_feature_version(self): -+ ast.parse("(x := 0)", feature_version=(3, 8)) -+ with self.assertRaises(SyntaxError): -+ ast.parse("(x := 0)", feature_version=(3, 7)) -+ -+ def test_conditional_context_managers_parse_with_low_feature_version(self): -+ # regression test for gh-115881 -+ ast.parse("with (x() if y else z()): ...", feature_version=(3, 8)) -+ -+ def test_exception_groups_feature_version(self): -+ code = dedent(""" -+ try: ... -+ except* Exception: ... -+ """) -+ ast.parse(code) -+ with self.assertRaises(SyntaxError): -+ ast.parse(code, feature_version=(3, 10)) -+ -+ def test_type_params_feature_version(self): -+ samples = [ -+ "type X = int", -+ "class X[T]: pass", -+ "def f[T](): pass", -+ ] -+ for sample in samples: -+ with self.subTest(sample): -+ ast.parse(sample) -+ with self.assertRaises(SyntaxError): -+ ast.parse(sample, feature_version=(3, 11)) -+ -+ def test_invalid_major_feature_version(self): -+ with self.assertRaises(ValueError): -+ ast.parse("pass", feature_version=(2, 7)) -+ with self.assertRaises(ValueError): -+ ast.parse("pass", feature_version=(4, 0)) -+ -+ def test_constant_as_name(self): -+ for constant in "True", "False", "None": -+ expr = ast.Expression(ast.Name(constant, ast.Load())) -+ ast.fix_missing_locations(expr) -+ with self.assertRaisesRegex( -+ ValueError, f"identifier field can't represent '{constant}' constant" -+ ): -+ compile(expr, "", "eval") -+ -+ def test_precedence_enum(self): -+ class _Precedence(enum.IntEnum): -+ """Precedence table that originated from python grammar.""" -+ -+ NAMED_EXPR = enum.auto() # := -+ TUPLE = enum.auto() # , -+ YIELD = enum.auto() # 'yield', 'yield from' -+ TEST = enum.auto() # 'if'-'else', 'lambda' -+ OR = enum.auto() # 'or' -+ AND = enum.auto() # 'and' -+ NOT = enum.auto() # 'not' -+ CMP = enum.auto() # '<', '>', '==', '>=', '<=', '!=', -+ # 'in', 'not in', 'is', 'is not' -+ EXPR = enum.auto() -+ BOR = EXPR # '|' -+ BXOR = enum.auto() # '^' -+ BAND = enum.auto() # '&' -+ SHIFT = enum.auto() # '<<', '>>' -+ ARITH = enum.auto() # '+', '-' -+ TERM = enum.auto() # '*', '@', '/', '%', '//' -+ FACTOR = enum.auto() # unary '+', '-', '~' -+ POWER = enum.auto() # '**' -+ AWAIT = enum.auto() # 'await' -+ ATOM = enum.auto() -+ -+ def next(self): -+ try: -+ return self.__class__(self + 1) -+ except ValueError: -+ return self -+ -+ enum._test_simple_enum(_Precedence, ast._Precedence) -+ -+ @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI") -+ @support.cpython_only -+ def test_ast_recursion_limit(self): -+ fail_depth = support.C_RECURSION_LIMIT + 1 -+ crash_depth = 100_000 -+ success_depth = int(support.C_RECURSION_LIMIT * 0.9) -+ -+ def check_limit(prefix, repeated): -+ expect_ok = prefix + repeated * success_depth -+ ast.parse(expect_ok) -+ for depth in (fail_depth, crash_depth): -+ broken = prefix + repeated * depth -+ details = "Compiling ({!r} + {!r} * {})".format(prefix, repeated, depth) -+ with self.assertRaises(RecursionError, msg=details): -+ with support.infinite_recursion(): -+ ast.parse(broken) -+ -+ check_limit("a", "()") -+ check_limit("a", ".b") -+ check_limit("a", "[0]") -+ check_limit("a", "*a") -+ -+ def test_null_bytes(self): -+ with self.assertRaises( -+ SyntaxError, msg="source code string cannot contain null bytes" -+ ): -+ ast.parse("a\0b") -+ -+ def assert_none_check(self, node: type[ast.AST], attr: str, source: str) -> None: -+ with self.subTest(f"{node.__name__}.{attr}"): -+ tree = ast.parse(source) -+ found = 0 -+ for child in ast.walk(tree): -+ if isinstance(child, node): -+ setattr(child, attr, None) -+ found += 1 -+ self.assertEqual(found, 1) -+ e = re.escape(f"field '{attr}' is required for {node.__name__}") -+ with self.assertRaisesRegex(ValueError, f"^{e}$"): -+ compile(tree, "", "exec") -+ -+ def test_none_checks(self) -> None: -+ tests = [ -+ (ast.alias, "name", "import spam as SPAM"), -+ (ast.arg, "arg", "def spam(SPAM): spam"), -+ (ast.comprehension, "target", "[spam for SPAM in spam]"), -+ (ast.comprehension, "iter", "[spam for spam in SPAM]"), -+ (ast.keyword, "value", "spam(**SPAM)"), -+ (ast.match_case, "pattern", "match spam:\n case SPAM: spam"), -+ (ast.withitem, "context_expr", "with SPAM: spam"), -+ ] -+ for node, attr, source in tests: -+ self.assert_none_check(node, attr, source) -+ -+ -+class ASTHelpers_Test(unittest.TestCase): -+ maxDiff = None -+ -+ def test_parse(self): -+ a = ast.parse("foo(1 + 1)") -+ b = compile("foo(1 + 1)", "", "exec", ast.PyCF_ONLY_AST) -+ self.assertEqual(ast.dump(a), ast.dump(b)) -+ -+ def test_parse_in_error(self): -+ try: -+ 1 / 0 -+ except Exception: -+ with self.assertRaises(SyntaxError) as e: -+ ast.literal_eval(r"'\U'") -+ self.assertIsNotNone(e.exception.__context__) -+ -+ def test_dump(self): -+ node = ast.parse('spam(eggs, "and cheese")') -+ self.assertEqual( -+ ast.dump(node), -+ "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), " -+ "args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')], " -+ "keywords=[]))], type_ignores=[])", -+ ) -+ self.assertEqual( -+ ast.dump(node, annotate_fields=False), -+ "Module([Expr(Call(Name('spam', Load()), [Name('eggs', Load()), " -+ "Constant('and cheese')], []))], [])", -+ ) -+ self.assertEqual( -+ ast.dump(node, include_attributes=True), -+ "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load(), " -+ "lineno=1, col_offset=0, end_lineno=1, end_col_offset=4), " -+ "args=[Name(id='eggs', ctx=Load(), lineno=1, col_offset=5, " -+ "end_lineno=1, end_col_offset=9), Constant(value='and cheese', " -+ "lineno=1, col_offset=11, end_lineno=1, end_col_offset=23)], keywords=[], " -+ "lineno=1, col_offset=0, end_lineno=1, end_col_offset=24), " -+ "lineno=1, col_offset=0, end_lineno=1, end_col_offset=24)], type_ignores=[])", -+ ) -+ -+ def test_dump_indent(self): -+ node = ast.parse('spam(eggs, "and cheese")') -+ self.assertEqual( -+ ast.dump(node, indent=3), -+ """\ -+Module( -+ body=[ -+ Expr( -+ value=Call( -+ func=Name(id='spam', ctx=Load()), -+ args=[ -+ Name(id='eggs', ctx=Load()), -+ Constant(value='and cheese')], -+ keywords=[]))], -+ type_ignores=[])""", -+ ) -+ self.assertEqual( -+ ast.dump(node, annotate_fields=False, indent="\t"), -+ """\ -+Module( -+\t[ -+\t\tExpr( -+\t\t\tCall( -+\t\t\t\tName('spam', Load()), -+\t\t\t\t[ -+\t\t\t\t\tName('eggs', Load()), -+\t\t\t\t\tConstant('and cheese')], -+\t\t\t\t[]))], -+\t[])""", -+ ) -+ self.assertEqual( -+ ast.dump(node, include_attributes=True, indent=3), -+ """\ -+Module( -+ body=[ -+ Expr( -+ value=Call( -+ func=Name( -+ id='spam', -+ ctx=Load(), -+ lineno=1, -+ col_offset=0, -+ end_lineno=1, -+ end_col_offset=4), -+ args=[ -+ Name( -+ id='eggs', -+ ctx=Load(), -+ lineno=1, -+ col_offset=5, -+ end_lineno=1, -+ end_col_offset=9), -+ Constant( -+ value='and cheese', -+ lineno=1, -+ col_offset=11, -+ end_lineno=1, -+ end_col_offset=23)], -+ keywords=[], -+ lineno=1, -+ col_offset=0, -+ end_lineno=1, -+ end_col_offset=24), -+ lineno=1, -+ col_offset=0, -+ end_lineno=1, -+ end_col_offset=24)], -+ type_ignores=[])""", -+ ) -+ -+ def test_dump_incomplete(self): -+ node = ast.Raise(lineno=3, col_offset=4) -+ self.assertEqual(ast.dump(node), "Raise()") -+ self.assertEqual( -+ ast.dump(node, include_attributes=True), "Raise(lineno=3, col_offset=4)" -+ ) -+ node = ast.Raise(exc=ast.Name(id="e", ctx=ast.Load()), lineno=3, col_offset=4) -+ self.assertEqual(ast.dump(node), "Raise(exc=Name(id='e', ctx=Load()))") -+ self.assertEqual( -+ ast.dump(node, annotate_fields=False), "Raise(Name('e', Load()))" -+ ) -+ self.assertEqual( -+ ast.dump(node, include_attributes=True), -+ "Raise(exc=Name(id='e', ctx=Load()), lineno=3, col_offset=4)", -+ ) -+ self.assertEqual( -+ ast.dump(node, annotate_fields=False, include_attributes=True), -+ "Raise(Name('e', Load()), lineno=3, col_offset=4)", -+ ) -+ node = ast.Raise(cause=ast.Name(id="e", ctx=ast.Load())) -+ self.assertEqual(ast.dump(node), "Raise(cause=Name(id='e', ctx=Load()))") -+ self.assertEqual( -+ ast.dump(node, annotate_fields=False), "Raise(cause=Name('e', Load()))" -+ ) -+ -+ def test_copy_location(self): -+ src = ast.parse("1 + 1", mode="eval") -+ src.body.right = ast.copy_location(ast.Constant(2), src.body.right) -+ self.assertEqual( -+ ast.dump(src, include_attributes=True), -+ "Expression(body=BinOp(left=Constant(value=1, lineno=1, col_offset=0, " -+ "end_lineno=1, end_col_offset=1), op=Add(), right=Constant(value=2, " -+ "lineno=1, col_offset=4, end_lineno=1, end_col_offset=5), lineno=1, " -+ "col_offset=0, end_lineno=1, end_col_offset=5))", -+ ) -+ src = ast.Call(col_offset=1, lineno=1, end_lineno=1, end_col_offset=1) -+ new = ast.copy_location(src, ast.Call(col_offset=None, lineno=None)) -+ self.assertIsNone(new.end_lineno) -+ self.assertIsNone(new.end_col_offset) -+ self.assertEqual(new.lineno, 1) -+ self.assertEqual(new.col_offset, 1) -+ -+ def test_fix_missing_locations(self): -+ src = ast.parse('write("spam")') -+ src.body.append( -+ ast.Expr(ast.Call(ast.Name("spam", ast.Load()), [ast.Constant("eggs")], [])) -+ ) -+ self.assertEqual(src, ast.fix_missing_locations(src)) -+ self.maxDiff = None -+ self.assertEqual( -+ ast.dump(src, include_attributes=True), -+ "Module(body=[Expr(value=Call(func=Name(id='write', ctx=Load(), " -+ "lineno=1, col_offset=0, end_lineno=1, end_col_offset=5), " -+ "args=[Constant(value='spam', lineno=1, col_offset=6, end_lineno=1, " -+ "end_col_offset=12)], keywords=[], lineno=1, col_offset=0, end_lineno=1, " -+ "end_col_offset=13), lineno=1, col_offset=0, end_lineno=1, " -+ "end_col_offset=13), Expr(value=Call(func=Name(id='spam', ctx=Load(), " -+ "lineno=1, col_offset=0, end_lineno=1, end_col_offset=0), " -+ "args=[Constant(value='eggs', lineno=1, col_offset=0, end_lineno=1, " -+ "end_col_offset=0)], keywords=[], lineno=1, col_offset=0, end_lineno=1, " -+ "end_col_offset=0), lineno=1, col_offset=0, end_lineno=1, end_col_offset=0)], " -+ "type_ignores=[])", -+ ) -+ -+ def test_increment_lineno(self): -+ src = ast.parse("1 + 1", mode="eval") -+ self.assertEqual(ast.increment_lineno(src, n=3), src) -+ self.assertEqual( -+ ast.dump(src, include_attributes=True), -+ "Expression(body=BinOp(left=Constant(value=1, lineno=4, col_offset=0, " -+ "end_lineno=4, end_col_offset=1), op=Add(), right=Constant(value=1, " -+ "lineno=4, col_offset=4, end_lineno=4, end_col_offset=5), lineno=4, " -+ "col_offset=0, end_lineno=4, end_col_offset=5))", -+ ) -+ # issue10869: do not increment lineno of root twice -+ src = ast.parse("1 + 1", mode="eval") -+ self.assertEqual(ast.increment_lineno(src.body, n=3), src.body) -+ self.assertEqual( -+ ast.dump(src, include_attributes=True), -+ "Expression(body=BinOp(left=Constant(value=1, lineno=4, col_offset=0, " -+ "end_lineno=4, end_col_offset=1), op=Add(), right=Constant(value=1, " -+ "lineno=4, col_offset=4, end_lineno=4, end_col_offset=5), lineno=4, " -+ "col_offset=0, end_lineno=4, end_col_offset=5))", -+ ) -+ src = ast.Call( -+ func=ast.Name("test", ast.Load()), args=[], keywords=[], lineno=1 -+ ) -+ self.assertEqual(ast.increment_lineno(src).lineno, 2) -+ self.assertIsNone(ast.increment_lineno(src).end_lineno) -+ -+ def test_increment_lineno_on_module(self): -+ src = ast.parse( -+ dedent("""\ -+ a = 1 -+ b = 2 # type: ignore -+ c = 3 -+ d = 4 # type: ignore@tag -+ """), -+ type_comments=True, -+ ) -+ ast.increment_lineno(src, n=5) -+ self.assertEqual(src.type_ignores[0].lineno, 7) -+ self.assertEqual(src.type_ignores[1].lineno, 9) -+ self.assertEqual(src.type_ignores[1].tag, "@tag") -+ -+ def test_iter_fields(self): -+ node = ast.parse("foo()", mode="eval") -+ d = dict(ast.iter_fields(node.body)) -+ self.assertEqual(d.pop("func").id, "foo") -+ self.assertEqual(d, {"keywords": [], "args": []}) -+ -+ def test_iter_child_nodes(self): -+ node = ast.parse("spam(23, 42, eggs='leek')", mode="eval") -+ self.assertEqual(len(list(ast.iter_child_nodes(node.body))), 4) -+ iterator = ast.iter_child_nodes(node.body) -+ self.assertEqual(next(iterator).id, "spam") -+ self.assertEqual(next(iterator).value, 23) -+ self.assertEqual(next(iterator).value, 42) -+ self.assertEqual( -+ ast.dump(next(iterator)), -+ "keyword(arg='eggs', value=Constant(value='leek'))", -+ ) -+ -+ def test_get_docstring(self): -+ node = ast.parse('"""line one\n line two"""') -+ self.assertEqual(ast.get_docstring(node), "line one\nline two") -+ -+ node = ast.parse('class foo:\n """line one\n line two"""') -+ self.assertEqual(ast.get_docstring(node.body[0]), "line one\nline two") -+ -+ node = ast.parse('def foo():\n """line one\n line two"""') -+ self.assertEqual(ast.get_docstring(node.body[0]), "line one\nline two") -+ -+ node = ast.parse('async def foo():\n """spam\n ham"""') -+ self.assertEqual(ast.get_docstring(node.body[0]), "spam\nham") -+ -+ node = ast.parse('async def foo():\n """spam\n ham"""') -+ self.assertEqual(ast.get_docstring(node.body[0], clean=False), "spam\n ham") -+ -+ node = ast.parse("x") -+ self.assertRaises(TypeError, ast.get_docstring, node.body[0]) -+ -+ def test_get_docstring_none(self): -+ self.assertIsNone(ast.get_docstring(ast.parse(""))) -+ node = ast.parse('x = "not docstring"') -+ self.assertIsNone(ast.get_docstring(node)) -+ node = ast.parse("def foo():\n pass") -+ self.assertIsNone(ast.get_docstring(node)) -+ -+ node = ast.parse("class foo:\n pass") -+ self.assertIsNone(ast.get_docstring(node.body[0])) -+ node = ast.parse('class foo:\n x = "not docstring"') -+ self.assertIsNone(ast.get_docstring(node.body[0])) -+ node = ast.parse("class foo:\n def bar(self): pass") -+ self.assertIsNone(ast.get_docstring(node.body[0])) -+ -+ node = ast.parse("def foo():\n pass") -+ self.assertIsNone(ast.get_docstring(node.body[0])) -+ node = ast.parse('def foo():\n x = "not docstring"') -+ self.assertIsNone(ast.get_docstring(node.body[0])) -+ -+ node = ast.parse("async def foo():\n pass") -+ self.assertIsNone(ast.get_docstring(node.body[0])) -+ node = ast.parse('async def foo():\n x = "not docstring"') -+ self.assertIsNone(ast.get_docstring(node.body[0])) -+ -+ node = ast.parse("async def foo():\n 42") -+ self.assertIsNone(ast.get_docstring(node.body[0])) -+ -+ def test_multi_line_docstring_col_offset_and_lineno_issue16806(self): -+ node = ast.parse( -+ '"""line one\nline two"""\n\n' -+ 'def foo():\n """line one\n line two"""\n\n' -+ ' def bar():\n """line one\n line two"""\n' -+ ' """line one\n line two"""\n' -+ '"""line one\nline two"""\n\n' -+ ) -+ self.assertEqual(node.body[0].col_offset, 0) -+ self.assertEqual(node.body[0].lineno, 1) -+ self.assertEqual(node.body[1].body[0].col_offset, 2) -+ self.assertEqual(node.body[1].body[0].lineno, 5) -+ self.assertEqual(node.body[1].body[1].body[0].col_offset, 4) -+ self.assertEqual(node.body[1].body[1].body[0].lineno, 9) -+ self.assertEqual(node.body[1].body[2].col_offset, 2) -+ self.assertEqual(node.body[1].body[2].lineno, 11) -+ self.assertEqual(node.body[2].col_offset, 0) -+ self.assertEqual(node.body[2].lineno, 13) -+ -+ def test_elif_stmt_start_position(self): -+ node = ast.parse("if a:\n pass\nelif b:\n pass\n") -+ elif_stmt = node.body[0].orelse[0] -+ self.assertEqual(elif_stmt.lineno, 3) -+ self.assertEqual(elif_stmt.col_offset, 0) -+ -+ def test_elif_stmt_start_position_with_else(self): -+ node = ast.parse("if a:\n pass\nelif b:\n pass\nelse:\n pass\n") -+ elif_stmt = node.body[0].orelse[0] -+ self.assertEqual(elif_stmt.lineno, 3) -+ self.assertEqual(elif_stmt.col_offset, 0) -+ -+ def test_starred_expr_end_position_within_call(self): -+ node = ast.parse("f(*[0, 1])") -+ starred_expr = node.body[0].value.args[0] -+ self.assertEqual(starred_expr.end_lineno, 1) -+ self.assertEqual(starred_expr.end_col_offset, 9) -+ -+ def test_literal_eval(self): -+ self.assertEqual(ast.literal_eval("[1, 2, 3]"), [1, 2, 3]) -+ self.assertEqual(ast.literal_eval('{"foo": 42}'), {"foo": 42}) -+ self.assertEqual(ast.literal_eval("(True, False, None)"), (True, False, None)) -+ self.assertEqual(ast.literal_eval("{1, 2, 3}"), {1, 2, 3}) -+ self.assertEqual(ast.literal_eval('b"hi"'), b"hi") -+ self.assertEqual(ast.literal_eval("set()"), set()) -+ self.assertRaises(ValueError, ast.literal_eval, "foo()") -+ self.assertEqual(ast.literal_eval("6"), 6) -+ self.assertEqual(ast.literal_eval("+6"), 6) -+ self.assertEqual(ast.literal_eval("-6"), -6) -+ self.assertEqual(ast.literal_eval("3.25"), 3.25) -+ self.assertEqual(ast.literal_eval("+3.25"), 3.25) -+ self.assertEqual(ast.literal_eval("-3.25"), -3.25) -+ self.assertEqual(repr(ast.literal_eval("-0.0")), "-0.0") -+ self.assertRaises(ValueError, ast.literal_eval, "++6") -+ self.assertRaises(ValueError, ast.literal_eval, "+True") -+ self.assertRaises(ValueError, ast.literal_eval, "2+3") -+ -+ def test_literal_eval_str_int_limit(self): -+ with support.adjust_int_max_str_digits(4000): -+ ast.literal_eval("3" * 4000) # no error -+ with self.assertRaises(SyntaxError) as err_ctx: -+ ast.literal_eval("3" * 4001) -+ self.assertIn("Exceeds the limit ", str(err_ctx.exception)) -+ self.assertIn(" Consider hexadecimal ", str(err_ctx.exception)) -+ -+ def test_literal_eval_complex(self): -+ # Issue #4907 -+ self.assertEqual(ast.literal_eval("6j"), 6j) -+ self.assertEqual(ast.literal_eval("-6j"), -6j) -+ self.assertEqual(ast.literal_eval("6.75j"), 6.75j) -+ self.assertEqual(ast.literal_eval("-6.75j"), -6.75j) -+ self.assertEqual(ast.literal_eval("3+6j"), 3 + 6j) -+ self.assertEqual(ast.literal_eval("-3+6j"), -3 + 6j) -+ self.assertEqual(ast.literal_eval("3-6j"), 3 - 6j) -+ self.assertEqual(ast.literal_eval("-3-6j"), -3 - 6j) -+ self.assertEqual(ast.literal_eval("3.25+6.75j"), 3.25 + 6.75j) -+ self.assertEqual(ast.literal_eval("-3.25+6.75j"), -3.25 + 6.75j) -+ self.assertEqual(ast.literal_eval("3.25-6.75j"), 3.25 - 6.75j) -+ self.assertEqual(ast.literal_eval("-3.25-6.75j"), -3.25 - 6.75j) -+ self.assertEqual(ast.literal_eval("(3+6j)"), 3 + 6j) -+ self.assertRaises(ValueError, ast.literal_eval, "-6j+3") -+ self.assertRaises(ValueError, ast.literal_eval, "-6j+3j") -+ self.assertRaises(ValueError, ast.literal_eval, "3+-6j") -+ self.assertRaises(ValueError, ast.literal_eval, "3+(0+6j)") -+ self.assertRaises(ValueError, ast.literal_eval, "-(3+6j)") -+ -+ def test_literal_eval_malformed_dict_nodes(self): -+ malformed = ast.Dict( -+ keys=[ast.Constant(1), ast.Constant(2)], values=[ast.Constant(3)] -+ ) -+ self.assertRaises(ValueError, ast.literal_eval, malformed) -+ malformed = ast.Dict( -+ keys=[ast.Constant(1)], values=[ast.Constant(2), ast.Constant(3)] -+ ) -+ self.assertRaises(ValueError, ast.literal_eval, malformed) -+ -+ def test_literal_eval_trailing_ws(self): -+ self.assertEqual(ast.literal_eval(" -1"), -1) -+ self.assertEqual(ast.literal_eval("\t\t-1"), -1) -+ self.assertEqual(ast.literal_eval(" \t -1"), -1) -+ self.assertRaises(IndentationError, ast.literal_eval, "\n -1") -+ -+ def test_literal_eval_malformed_lineno(self): -+ msg = r"malformed node or string on line 3:" -+ with self.assertRaisesRegex(ValueError, msg): -+ ast.literal_eval("{'a': 1,\n'b':2,\n'c':++3,\n'd':4}") -+ -+ node = ast.UnaryOp(ast.UAdd(), ast.UnaryOp(ast.UAdd(), ast.Constant(6))) -+ self.assertIsNone(getattr(node, "lineno", None)) -+ msg = r"malformed node or string:" -+ with self.assertRaisesRegex(ValueError, msg): -+ ast.literal_eval(node) -+ -+ def test_literal_eval_syntax_errors(self): -+ with self.assertRaisesRegex(SyntaxError, "unexpected indent"): -+ ast.literal_eval(r""" -+ \ -+ (\ -+ \ """) -+ -+ def test_bad_integer(self): -+ # issue13436: Bad error message with invalid numeric values -+ body = [ -+ ast.ImportFrom( -+ module="time", -+ names=[ast.alias(name="sleep")], -+ level=None, -+ lineno=None, -+ col_offset=None, -+ ) -+ ] -+ mod = ast.Module(body, []) -+ with self.assertRaises(ValueError) as cm: -+ compile(mod, "test", "exec") -+ self.assertIn("invalid integer value: None", str(cm.exception)) -+ -+ def test_level_as_none(self): -+ body = [ -+ ast.ImportFrom( -+ module="time", -+ names=[ast.alias(name="sleep", lineno=0, col_offset=0)], -+ level=None, -+ lineno=0, -+ col_offset=0, -+ ) -+ ] -+ mod = ast.Module(body, []) -+ code = compile(mod, "test", "exec") -+ ns = {} -+ exec(code, ns) -+ self.assertIn("sleep", ns) -+ -+ def test_recursion_direct(self): -+ e = ast.UnaryOp(op=ast.Not(), lineno=0, col_offset=0) -+ e.operand = e -+ with self.assertRaises(RecursionError): -+ with support.infinite_recursion(): -+ compile(ast.Expression(e), "", "eval") -+ -+ def test_recursion_indirect(self): -+ e = ast.UnaryOp(op=ast.Not(), lineno=0, col_offset=0) -+ f = ast.UnaryOp(op=ast.Not(), lineno=0, col_offset=0) -+ e.operand = f -+ f.operand = e -+ with self.assertRaises(RecursionError): -+ with support.infinite_recursion(): -+ compile(ast.Expression(e), "", "eval") -+ -+ -+class ASTValidatorTests(unittest.TestCase): -+ def mod(self, mod, msg=None, mode="exec", *, exc=ValueError): -+ mod.lineno = mod.col_offset = 0 -+ ast.fix_missing_locations(mod) -+ if msg is None: -+ compile(mod, "", mode) -+ else: -+ with self.assertRaises(exc) as cm: -+ compile(mod, "", mode) -+ self.assertIn(msg, str(cm.exception)) -+ -+ def expr(self, node, msg=None, *, exc=ValueError): -+ mod = ast.Module([ast.Expr(node)], []) -+ self.mod(mod, msg, exc=exc) -+ -+ def stmt(self, stmt, msg=None): -+ mod = ast.Module([stmt], []) -+ self.mod(mod, msg) -+ -+ def test_module(self): -+ m = ast.Interactive([ast.Expr(ast.Name("x", ast.Store()))]) -+ self.mod(m, "must have Load context", "single") -+ m = ast.Expression(ast.Name("x", ast.Store())) -+ self.mod(m, "must have Load context", "eval") -+ -+ def _check_arguments(self, fac, check): -+ def arguments( -+ args=None, -+ posonlyargs=None, -+ vararg=None, -+ kwonlyargs=None, -+ kwarg=None, -+ defaults=None, -+ kw_defaults=None, -+ ): -+ if args is None: -+ args = [] -+ if posonlyargs is None: -+ posonlyargs = [] -+ if kwonlyargs is None: -+ kwonlyargs = [] -+ if defaults is None: -+ defaults = [] -+ if kw_defaults is None: -+ kw_defaults = [] -+ args = ast.arguments( -+ args, posonlyargs, vararg, kwonlyargs, kw_defaults, kwarg, defaults -+ ) -+ return fac(args) -+ -+ args = [ast.arg("x", ast.Name("x", ast.Store()))] -+ check(arguments(args=args), "must have Load context") -+ check(arguments(posonlyargs=args), "must have Load context") -+ check(arguments(kwonlyargs=args), "must have Load context") -+ check( -+ arguments(defaults=[ast.Constant(3)]), "more positional defaults than args" -+ ) -+ check( -+ arguments(kw_defaults=[ast.Constant(4)]), -+ "length of kwonlyargs is not the same as kw_defaults", -+ ) -+ args = [ast.arg("x", ast.Name("x", ast.Load()))] -+ check( -+ arguments(args=args, defaults=[ast.Name("x", ast.Store())]), -+ "must have Load context", -+ ) -+ args = [ -+ ast.arg("a", ast.Name("x", ast.Load())), -+ ast.arg("b", ast.Name("y", ast.Load())), -+ ] -+ check( -+ arguments(kwonlyargs=args, kw_defaults=[None, ast.Name("x", ast.Store())]), -+ "must have Load context", -+ ) -+ -+ def test_funcdef(self): -+ a = ast.arguments([], [], None, [], [], None, []) -+ f = ast.FunctionDef("x", a, [], [], None, None, []) -+ self.stmt(f, "empty body on FunctionDef") -+ f = ast.FunctionDef( -+ "x", a, [ast.Pass()], [ast.Name("x", ast.Store())], None, None, [] -+ ) -+ self.stmt(f, "must have Load context") -+ f = ast.FunctionDef( -+ "x", a, [ast.Pass()], [], ast.Name("x", ast.Store()), None, [] -+ ) -+ self.stmt(f, "must have Load context") -+ f = ast.FunctionDef("x", ast.arguments(), [ast.Pass()]) -+ self.stmt(f) -+ -+ def fac(args): -+ return ast.FunctionDef("x", args, [ast.Pass()], [], None, None, []) -+ -+ self._check_arguments(fac, self.stmt) -+ -+ def test_funcdef_pattern_matching(self): -+ # gh-104799: New fields on FunctionDef should be added at the end -+ def matcher(node): -+ match node: -+ case ast.FunctionDef( -+ "foo", -+ ast.arguments(args=[ast.arg("bar")]), -+ [ast.Pass()], -+ [ast.Name("capybara", ast.Load())], -+ ast.Name("pacarana", ast.Load()), -+ ): -+ return True -+ case _: -+ return False -+ -+ code = """ -+ @capybara -+ def foo(bar) -> pacarana: -+ pass -+ """ -+ source = ast.parse(textwrap.dedent(code)) -+ funcdef = source.body[0] -+ self.assertIsInstance(funcdef, ast.FunctionDef) -+ self.assertTrue(matcher(funcdef)) -+ -+ def test_classdef(self): -+ def cls( -+ bases=None, keywords=None, body=None, decorator_list=None, type_params=None -+ ): -+ if bases is None: -+ bases = [] -+ if keywords is None: -+ keywords = [] -+ if body is None: -+ body = [ast.Pass()] -+ if decorator_list is None: -+ decorator_list = [] -+ if type_params is None: -+ type_params = [] -+ return ast.ClassDef( -+ "myclass", bases, keywords, body, decorator_list, type_params -+ ) -+ -+ self.stmt(cls(bases=[ast.Name("x", ast.Store())]), "must have Load context") -+ self.stmt( -+ cls(keywords=[ast.keyword("x", ast.Name("x", ast.Store()))]), -+ "must have Load context", -+ ) -+ self.stmt(cls(body=[]), "empty body on ClassDef") -+ self.stmt(cls(body=[None]), "None disallowed") -+ self.stmt( -+ cls(decorator_list=[ast.Name("x", ast.Store())]), "must have Load context" -+ ) -+ -+ def test_delete(self): -+ self.stmt(ast.Delete([]), "empty targets on Delete") -+ self.stmt(ast.Delete([None]), "None disallowed") -+ self.stmt(ast.Delete([ast.Name("x", ast.Load())]), "must have Del context") -+ -+ def test_assign(self): -+ self.stmt(ast.Assign([], ast.Constant(3)), "empty targets on Assign") -+ self.stmt(ast.Assign([None], ast.Constant(3)), "None disallowed") -+ self.stmt( -+ ast.Assign([ast.Name("x", ast.Load())], ast.Constant(3)), -+ "must have Store context", -+ ) -+ self.stmt( -+ ast.Assign([ast.Name("x", ast.Store())], ast.Name("y", ast.Store())), -+ "must have Load context", -+ ) -+ -+ def test_augassign(self): -+ aug = ast.AugAssign( -+ ast.Name("x", ast.Load()), ast.Add(), ast.Name("y", ast.Load()) -+ ) -+ self.stmt(aug, "must have Store context") -+ aug = ast.AugAssign( -+ ast.Name("x", ast.Store()), ast.Add(), ast.Name("y", ast.Store()) -+ ) -+ self.stmt(aug, "must have Load context") -+ -+ def test_for(self): -+ x = ast.Name("x", ast.Store()) -+ y = ast.Name("y", ast.Load()) -+ p = ast.Pass() -+ self.stmt(ast.For(x, y, [], []), "empty body on For") -+ self.stmt( -+ ast.For(ast.Name("x", ast.Load()), y, [p], []), "must have Store context" -+ ) -+ self.stmt( -+ ast.For(x, ast.Name("y", ast.Store()), [p], []), "must have Load context" -+ ) -+ e = ast.Expr(ast.Name("x", ast.Store())) -+ self.stmt(ast.For(x, y, [e], []), "must have Load context") -+ self.stmt(ast.For(x, y, [p], [e]), "must have Load context") -+ -+ def test_while(self): -+ self.stmt(ast.While(ast.Constant(3), [], []), "empty body on While") -+ self.stmt( -+ ast.While(ast.Name("x", ast.Store()), [ast.Pass()], []), -+ "must have Load context", -+ ) -+ self.stmt( -+ ast.While( -+ ast.Constant(3), [ast.Pass()], [ast.Expr(ast.Name("x", ast.Store()))] -+ ), -+ "must have Load context", -+ ) -+ -+ def test_if(self): -+ self.stmt(ast.If(ast.Constant(3), [], []), "empty body on If") -+ i = ast.If(ast.Name("x", ast.Store()), [ast.Pass()], []) -+ self.stmt(i, "must have Load context") -+ i = ast.If(ast.Constant(3), [ast.Expr(ast.Name("x", ast.Store()))], []) -+ self.stmt(i, "must have Load context") -+ i = ast.If( -+ ast.Constant(3), [ast.Pass()], [ast.Expr(ast.Name("x", ast.Store()))] -+ ) -+ self.stmt(i, "must have Load context") -+ -+ def test_with(self): -+ p = ast.Pass() -+ self.stmt(ast.With([], [p]), "empty items on With") -+ i = ast.withitem(ast.Constant(3), None) -+ self.stmt(ast.With([i], []), "empty body on With") -+ i = ast.withitem(ast.Name("x", ast.Store()), None) -+ self.stmt(ast.With([i], [p]), "must have Load context") -+ i = ast.withitem(ast.Constant(3), ast.Name("x", ast.Load())) -+ self.stmt(ast.With([i], [p]), "must have Store context") -+ -+ def test_raise(self): -+ r = ast.Raise(None, ast.Constant(3)) -+ self.stmt(r, "Raise with cause but no exception") -+ r = ast.Raise(ast.Name("x", ast.Store()), None) -+ self.stmt(r, "must have Load context") -+ r = ast.Raise(ast.Constant(4), ast.Name("x", ast.Store())) -+ self.stmt(r, "must have Load context") -+ -+ def test_try(self): -+ p = ast.Pass() -+ t = ast.Try([], [], [], [p]) -+ self.stmt(t, "empty body on Try") -+ t = ast.Try([ast.Expr(ast.Name("x", ast.Store()))], [], [], [p]) -+ self.stmt(t, "must have Load context") -+ t = ast.Try([p], [], [], []) -+ self.stmt(t, "Try has neither except handlers nor finalbody") -+ t = ast.Try([p], [], [p], [p]) -+ self.stmt(t, "Try has orelse but no except handlers") -+ t = ast.Try([p], [ast.ExceptHandler(None, "x", [])], [], []) -+ self.stmt(t, "empty body on ExceptHandler") -+ e = [ast.ExceptHandler(ast.Name("x", ast.Store()), "y", [p])] -+ self.stmt(ast.Try([p], e, [], []), "must have Load context") -+ e = [ast.ExceptHandler(None, "x", [p])] -+ t = ast.Try([p], e, [ast.Expr(ast.Name("x", ast.Store()))], [p]) -+ self.stmt(t, "must have Load context") -+ t = ast.Try([p], e, [p], [ast.Expr(ast.Name("x", ast.Store()))]) -+ self.stmt(t, "must have Load context") -+ -+ def test_try_star(self): -+ p = ast.Pass() -+ t = ast.TryStar([], [], [], [p]) -+ self.stmt(t, "empty body on TryStar") -+ t = ast.TryStar([ast.Expr(ast.Name("x", ast.Store()))], [], [], [p]) -+ self.stmt(t, "must have Load context") -+ t = ast.TryStar([p], [], [], []) -+ self.stmt(t, "TryStar has neither except handlers nor finalbody") -+ t = ast.TryStar([p], [], [p], [p]) -+ self.stmt(t, "TryStar has orelse but no except handlers") -+ t = ast.TryStar([p], [ast.ExceptHandler(None, "x", [])], [], []) -+ self.stmt(t, "empty body on ExceptHandler") -+ e = [ast.ExceptHandler(ast.Name("x", ast.Store()), "y", [p])] -+ self.stmt(ast.TryStar([p], e, [], []), "must have Load context") -+ e = [ast.ExceptHandler(None, "x", [p])] -+ t = ast.TryStar([p], e, [ast.Expr(ast.Name("x", ast.Store()))], [p]) -+ self.stmt(t, "must have Load context") -+ t = ast.TryStar([p], e, [p], [ast.Expr(ast.Name("x", ast.Store()))]) -+ self.stmt(t, "must have Load context") -+ -+ def test_assert(self): -+ self.stmt( -+ ast.Assert(ast.Name("x", ast.Store()), None), "must have Load context" -+ ) -+ assrt = ast.Assert(ast.Name("x", ast.Load()), ast.Name("y", ast.Store())) -+ self.stmt(assrt, "must have Load context") -+ -+ def test_import(self): -+ self.stmt(ast.Import([]), "empty names on Import") -+ -+ def test_importfrom(self): -+ imp = ast.ImportFrom(None, [ast.alias("x", None)], -42) -+ self.stmt(imp, "Negative ImportFrom level") -+ self.stmt(ast.ImportFrom(None, [], 0), "empty names on ImportFrom") -+ -+ def test_global(self): -+ self.stmt(ast.Global([]), "empty names on Global") -+ -+ def test_nonlocal(self): -+ self.stmt(ast.Nonlocal([]), "empty names on Nonlocal") -+ -+ def test_expr(self): -+ e = ast.Expr(ast.Name("x", ast.Store())) -+ self.stmt(e, "must have Load context") -+ -+ def test_boolop(self): -+ b = ast.BoolOp(ast.And(), []) -+ self.expr(b, "less than 2 values") -+ b = ast.BoolOp(ast.And(), [ast.Constant(3)]) -+ self.expr(b, "less than 2 values") -+ b = ast.BoolOp(ast.And(), [ast.Constant(4), None]) -+ self.expr(b, "None disallowed") -+ b = ast.BoolOp(ast.And(), [ast.Constant(4), ast.Name("x", ast.Store())]) -+ self.expr(b, "must have Load context") -+ -+ def test_unaryop(self): -+ u = ast.UnaryOp(ast.Not(), ast.Name("x", ast.Store())) -+ self.expr(u, "must have Load context") -+ -+ def test_lambda(self): -+ a = ast.arguments([], [], None, [], [], None, []) -+ self.expr(ast.Lambda(a, ast.Name("x", ast.Store())), "must have Load context") -+ -+ def fac(args): -+ return ast.Lambda(args, ast.Name("x", ast.Load())) -+ -+ self._check_arguments(fac, self.expr) -+ -+ def test_ifexp(self): -+ l = ast.Name("x", ast.Load()) -+ s = ast.Name("y", ast.Store()) -+ for args in (s, l, l), (l, s, l), (l, l, s): -+ self.expr(ast.IfExp(*args), "must have Load context") -+ -+ def test_dict(self): -+ d = ast.Dict([], [ast.Name("x", ast.Load())]) -+ self.expr(d, "same number of keys as values") -+ d = ast.Dict([ast.Name("x", ast.Load())], [None]) -+ self.expr(d, "None disallowed") -+ -+ def test_set(self): -+ self.expr(ast.Set([None]), "None disallowed") -+ s = ast.Set([ast.Name("x", ast.Store())]) -+ self.expr(s, "must have Load context") -+ -+ def _check_comprehension(self, fac): -+ self.expr(fac([]), "comprehension with no generators") -+ g = ast.comprehension( -+ ast.Name("x", ast.Load()), ast.Name("x", ast.Load()), [], 0 -+ ) -+ self.expr(fac([g]), "must have Store context") -+ g = ast.comprehension( -+ ast.Name("x", ast.Store()), ast.Name("x", ast.Store()), [], 0 -+ ) -+ self.expr(fac([g]), "must have Load context") -+ x = ast.Name("x", ast.Store()) -+ y = ast.Name("y", ast.Load()) -+ g = ast.comprehension(x, y, [None], 0) -+ self.expr(fac([g]), "None disallowed") -+ g = ast.comprehension(x, y, [ast.Name("x", ast.Store())], 0) -+ self.expr(fac([g]), "must have Load context") -+ -+ def _simple_comp(self, fac): -+ g = ast.comprehension( -+ ast.Name("x", ast.Store()), ast.Name("x", ast.Load()), [], 0 -+ ) -+ self.expr(fac(ast.Name("x", ast.Store()), [g]), "must have Load context") -+ -+ def wrap(gens): -+ return fac(ast.Name("x", ast.Store()), gens) -+ -+ self._check_comprehension(wrap) -+ -+ def test_listcomp(self): -+ self._simple_comp(ast.ListComp) -+ -+ def test_setcomp(self): -+ self._simple_comp(ast.SetComp) -+ -+ def test_generatorexp(self): -+ self._simple_comp(ast.GeneratorExp) -+ -+ def test_dictcomp(self): -+ g = ast.comprehension( -+ ast.Name("y", ast.Store()), ast.Name("p", ast.Load()), [], 0 -+ ) -+ c = ast.DictComp(ast.Name("x", ast.Store()), ast.Name("y", ast.Load()), [g]) -+ self.expr(c, "must have Load context") -+ c = ast.DictComp(ast.Name("x", ast.Load()), ast.Name("y", ast.Store()), [g]) -+ self.expr(c, "must have Load context") -+ -+ def factory(comps): -+ k = ast.Name("x", ast.Load()) -+ v = ast.Name("y", ast.Load()) -+ return ast.DictComp(k, v, comps) -+ -+ self._check_comprehension(factory) -+ -+ def test_yield(self): -+ self.expr(ast.Yield(ast.Name("x", ast.Store())), "must have Load") -+ self.expr(ast.YieldFrom(ast.Name("x", ast.Store())), "must have Load") -+ -+ def test_compare(self): -+ left = ast.Name("x", ast.Load()) -+ comp = ast.Compare(left, [ast.In()], []) -+ self.expr(comp, "no comparators") -+ comp = ast.Compare(left, [ast.In()], [ast.Constant(4), ast.Constant(5)]) -+ self.expr(comp, "different number of comparators and operands") -+ comp = ast.Compare(ast.Constant("blah"), [ast.In()], [left]) -+ self.expr(comp) -+ comp = ast.Compare(left, [ast.In()], [ast.Constant("blah")]) -+ self.expr(comp) -+ -+ def test_call(self): -+ func = ast.Name("x", ast.Load()) -+ args = [ast.Name("y", ast.Load())] -+ keywords = [ast.keyword("w", ast.Name("z", ast.Load()))] -+ call = ast.Call(ast.Name("x", ast.Store()), args, keywords) -+ self.expr(call, "must have Load context") -+ call = ast.Call(func, [None], keywords) -+ self.expr(call, "None disallowed") -+ bad_keywords = [ast.keyword("w", ast.Name("z", ast.Store()))] -+ call = ast.Call(func, args, bad_keywords) -+ self.expr(call, "must have Load context") -+ -+ def test_num(self): -+ with warnings.catch_warnings(record=True) as wlog: -+ warnings.filterwarnings("ignore", "", DeprecationWarning) -+ from ast import Num -+ -+ with warnings.catch_warnings(record=True) as wlog: -+ warnings.filterwarnings("always", "", DeprecationWarning) -+ -+ class subint(int): -+ pass -+ -+ class subfloat(float): -+ pass -+ -+ class subcomplex(complex): -+ pass -+ -+ for obj in "0", "hello": -+ self.expr(ast.Num(obj)) -+ for obj in subint(), subfloat(), subcomplex(): -+ self.expr(ast.Num(obj), "invalid type", exc=TypeError) -+ -+ self.assertEqual( -+ [str(w.message) for w in wlog], -+ [ -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ ], -+ ) -+ -+ def test_attribute(self): -+ attr = ast.Attribute(ast.Name("x", ast.Store()), "y", ast.Load()) -+ self.expr(attr, "must have Load context") -+ -+ def test_subscript(self): -+ sub = ast.Subscript(ast.Name("x", ast.Store()), ast.Constant(3), ast.Load()) -+ self.expr(sub, "must have Load context") -+ x = ast.Name("x", ast.Load()) -+ sub = ast.Subscript(x, ast.Name("y", ast.Store()), ast.Load()) -+ self.expr(sub, "must have Load context") -+ s = ast.Name("x", ast.Store()) -+ for args in (s, None, None), (None, s, None), (None, None, s): -+ sl = ast.Slice(*args) -+ self.expr(ast.Subscript(x, sl, ast.Load()), "must have Load context") -+ sl = ast.Tuple([], ast.Load()) -+ self.expr(ast.Subscript(x, sl, ast.Load())) -+ sl = ast.Tuple([s], ast.Load()) -+ self.expr(ast.Subscript(x, sl, ast.Load()), "must have Load context") -+ -+ def test_starred(self): -+ left = ast.List( -+ [ast.Starred(ast.Name("x", ast.Load()), ast.Store())], ast.Store() -+ ) -+ assign = ast.Assign([left], ast.Constant(4)) -+ self.stmt(assign, "must have Store context") -+ -+ def _sequence(self, fac): -+ self.expr(fac([None], ast.Load()), "None disallowed") -+ self.expr( -+ fac([ast.Name("x", ast.Store())], ast.Load()), "must have Load context" -+ ) -+ -+ def test_list(self): -+ self._sequence(ast.List) -+ -+ def test_tuple(self): -+ self._sequence(ast.Tuple) -+ -+ def test_nameconstant(self): -+ with warnings.catch_warnings(record=True) as wlog: -+ warnings.filterwarnings("ignore", "", DeprecationWarning) -+ from ast import NameConstant -+ -+ with warnings.catch_warnings(record=True) as wlog: -+ warnings.filterwarnings("always", "", DeprecationWarning) -+ self.expr(ast.NameConstant(4)) -+ -+ self.assertEqual( -+ [str(w.message) for w in wlog], -+ [ -+ "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", -+ ], -+ ) -+ -+ @support.requires_resource("cpu") -+ def test_stdlib_validates(self): -+ stdlib = os.path.dirname(ast.__file__) -+ tests = [fn for fn in os.listdir(stdlib) if fn.endswith(".py")] -+ tests.extend(["test/test_grammar.py", "test/test_unpack_ex.py"]) -+ for module in tests: -+ with self.subTest(module): -+ fn = os.path.join(stdlib, module) -+ with open(fn, "r", encoding="utf-8") as fp: -+ source = fp.read() -+ mod = ast.parse(source, fn) -+ compile(mod, fn, "exec") -+ -+ constant_1 = ast.Constant(1) -+ pattern_1 = ast.MatchValue(constant_1) -+ -+ constant_x = ast.Constant("x") -+ pattern_x = ast.MatchValue(constant_x) -+ -+ constant_true = ast.Constant(True) -+ pattern_true = ast.MatchSingleton(True) -+ -+ name_carter = ast.Name("carter", ast.Load()) -+ -+ _MATCH_PATTERNS = [ -+ ast.MatchValue( -+ ast.Attribute( -+ ast.Attribute(ast.Name("x", ast.Store()), "y", ast.Load()), -+ "z", -+ ast.Load(), -+ ) -+ ), -+ ast.MatchValue( -+ ast.Attribute( -+ ast.Attribute(ast.Name("x", ast.Load()), "y", ast.Store()), -+ "z", -+ ast.Load(), -+ ) -+ ), -+ ast.MatchValue(ast.Constant(...)), -+ ast.MatchValue(ast.Constant(True)), -+ ast.MatchValue(ast.Constant((1, 2, 3))), -+ ast.MatchSingleton("string"), -+ ast.MatchSequence([ast.MatchSingleton("string")]), -+ ast.MatchSequence([ast.MatchSequence([ast.MatchSingleton("string")])]), -+ ast.MatchMapping([constant_1, constant_true], [pattern_x]), -+ ast.MatchMapping( -+ [constant_true, constant_1], [pattern_x, pattern_1], rest="True" -+ ), -+ ast.MatchMapping( -+ [constant_true, ast.Starred(ast.Name("lol", ast.Load()), ast.Load())], -+ [pattern_x, pattern_1], -+ rest="legit", -+ ), -+ ast.MatchClass( -+ ast.Attribute(ast.Attribute(constant_x, "y", ast.Load()), "z", ast.Load()), -+ patterns=[], -+ kwd_attrs=[], -+ kwd_patterns=[], -+ ), -+ ast.MatchClass( -+ name_carter, patterns=[], kwd_attrs=["True"], kwd_patterns=[pattern_1] -+ ), -+ ast.MatchClass( -+ name_carter, patterns=[], kwd_attrs=[], kwd_patterns=[pattern_1] -+ ), -+ ast.MatchClass( -+ name_carter, -+ patterns=[ast.MatchSingleton("string")], -+ kwd_attrs=[], -+ kwd_patterns=[], -+ ), -+ ast.MatchClass( -+ name_carter, patterns=[ast.MatchStar()], kwd_attrs=[], kwd_patterns=[] -+ ), -+ ast.MatchClass( -+ name_carter, patterns=[], kwd_attrs=[], kwd_patterns=[ast.MatchStar()] -+ ), -+ ast.MatchClass( -+ constant_true, # invalid name -+ patterns=[], -+ kwd_attrs=["True"], -+ kwd_patterns=[pattern_1], -+ ), -+ ast.MatchSequence([ast.MatchStar("True")]), -+ ast.MatchAs(name="False"), -+ ast.MatchOr([]), -+ ast.MatchOr([pattern_1]), -+ ast.MatchOr([pattern_1, pattern_x, ast.MatchSingleton("xxx")]), -+ ast.MatchAs(name="_"), -+ ast.MatchStar(name="x"), -+ ast.MatchSequence([ast.MatchStar("_")]), -+ ast.MatchMapping([], [], rest="_"), -+ ] -+ -+ def test_match_validation_pattern(self): -+ name_x = ast.Name("x", ast.Load()) -+ for pattern in self._MATCH_PATTERNS: -+ with self.subTest(ast.dump(pattern, indent=4)): -+ node = ast.Match( -+ subject=name_x, -+ cases=[ast.match_case(pattern=pattern, body=[ast.Pass()])], -+ ) -+ node = ast.fix_missing_locations(node) -+ module = ast.Module([node], []) -+ with self.assertRaises(ValueError): -+ compile(module, "", "exec") -+ -+ -+class ConstantTests(unittest.TestCase): -+ """Tests on the ast.Constant node type.""" -+ -+ def compile_constant(self, value): -+ tree = ast.parse("x = 123") -+ -+ node = tree.body[0].value -+ new_node = ast.Constant(value=value) -+ ast.copy_location(new_node, node) -+ tree.body[0].value = new_node -+ -+ code = compile(tree, "", "exec") -+ -+ ns = {} -+ exec(code, ns) -+ return ns["x"] -+ -+ def test_validation(self): -+ with self.assertRaises(TypeError) as cm: -+ self.compile_constant([1, 2, 3]) -+ self.assertEqual(str(cm.exception), "got an invalid type in Constant: list") -+ -+ def test_singletons(self): -+ for const in (None, False, True, Ellipsis, b"", frozenset()): -+ with self.subTest(const=const): -+ value = self.compile_constant(const) -+ self.assertIs(value, const) -+ -+ def test_values(self): -+ nested_tuple = (1,) -+ nested_frozenset = frozenset({1}) -+ for level in range(3): -+ nested_tuple = (nested_tuple, 2) -+ nested_frozenset = frozenset({nested_frozenset, 2}) -+ values = ( -+ 123, -+ 123.0, -+ 123j, -+ "unicode", -+ b"bytes", -+ tuple("tuple"), -+ frozenset("frozenset"), -+ nested_tuple, -+ nested_frozenset, -+ ) -+ for value in values: -+ with self.subTest(value=value): -+ result = self.compile_constant(value) -+ self.assertEqual(result, value) -+ -+ def test_assign_to_constant(self): -+ tree = ast.parse("x = 1") -+ -+ target = tree.body[0].targets[0] -+ new_target = ast.Constant(value=1) -+ ast.copy_location(new_target, target) -+ tree.body[0].targets[0] = new_target -+ -+ with self.assertRaises(ValueError) as cm: -+ compile(tree, "string", "exec") -+ self.assertEqual( -+ str(cm.exception), -+ "expression which can't be assigned " "to in Store context", -+ ) -+ -+ def test_get_docstring(self): -+ tree = ast.parse("'docstring'\nx = 1") -+ self.assertEqual(ast.get_docstring(tree), "docstring") -+ -+ def get_load_const(self, tree): -+ # Compile to bytecode, disassemble and get parameter of LOAD_CONST -+ # instructions -+ co = compile(tree, "", "exec") -+ consts = [] -+ for instr in dis.get_instructions(co): -+ if instr.opname == "LOAD_CONST" or instr.opname == "RETURN_CONST": -+ consts.append(instr.argval) -+ return consts -+ -+ @support.cpython_only -+ def test_load_const(self): -+ consts = [None, True, False, 124, 2.0, 3j, "unicode", b"bytes", (1, 2, 3)] -+ -+ code = "\n".join(["x={!r}".format(const) for const in consts]) -+ code += "\nx = ..." -+ consts.extend((Ellipsis, None)) -+ -+ tree = ast.parse(code) -+ self.assertEqual(self.get_load_const(tree), consts) -+ -+ # Replace expression nodes with constants -+ for assign, const in zip(tree.body, consts): -+ assert isinstance(assign, ast.Assign), ast.dump(assign) -+ new_node = ast.Constant(value=const) -+ ast.copy_location(new_node, assign.value) -+ assign.value = new_node -+ -+ self.assertEqual(self.get_load_const(tree), consts) -+ -+ def test_literal_eval(self): -+ tree = ast.parse("1 + 2") -+ binop = tree.body[0].value -+ -+ new_left = ast.Constant(value=10) -+ ast.copy_location(new_left, binop.left) -+ binop.left = new_left -+ -+ new_right = ast.Constant(value=20j) -+ ast.copy_location(new_right, binop.right) -+ binop.right = new_right -+ -+ self.assertEqual(ast.literal_eval(binop), 10 + 20j) -+ -+ def test_string_kind(self): -+ c = ast.parse('"x"', mode="eval").body -+ self.assertEqual(c.value, "x") -+ self.assertEqual(c.kind, None) -+ -+ c = ast.parse('u"x"', mode="eval").body -+ self.assertEqual(c.value, "x") -+ self.assertEqual(c.kind, "u") -+ -+ c = ast.parse('r"x"', mode="eval").body -+ self.assertEqual(c.value, "x") -+ self.assertEqual(c.kind, None) -+ -+ c = ast.parse('b"x"', mode="eval").body -+ self.assertEqual(c.value, b"x") -+ self.assertEqual(c.kind, None) -+ -+ -+class EndPositionTests(unittest.TestCase): -+ """Tests for end position of AST nodes. -+ -+ Testing end positions of nodes requires a bit of extra care -+ because of how LL parsers work. -+ """ -+ -+ def _check_end_pos(self, ast_node, end_lineno, end_col_offset): -+ self.assertEqual(ast_node.end_lineno, end_lineno) -+ self.assertEqual(ast_node.end_col_offset, end_col_offset) -+ -+ def _check_content(self, source, ast_node, content): -+ self.assertEqual(ast.get_source_segment(source, ast_node), content) -+ -+ def _parse_value(self, s): -+ # Use duck-typing to support both single expression -+ # and a right hand side of an assignment statement. -+ return ast.parse(s).body[0].value -+ -+ def test_lambda(self): -+ s = "lambda x, *y: None" -+ lam = self._parse_value(s) -+ self._check_content(s, lam.body, "None") -+ self._check_content(s, lam.args.args[0], "x") -+ self._check_content(s, lam.args.vararg, "y") -+ -+ def test_func_def(self): -+ s = dedent(""" -+ def func(x: int, -+ *args: str, -+ z: float = 0, -+ **kwargs: Any) -> bool: -+ return True -+ """).strip() -+ fdef = ast.parse(s).body[0] -+ self._check_end_pos(fdef, 5, 15) -+ self._check_content(s, fdef.body[0], "return True") -+ self._check_content(s, fdef.args.args[0], "x: int") -+ self._check_content(s, fdef.args.args[0].annotation, "int") -+ self._check_content(s, fdef.args.kwarg, "kwargs: Any") -+ self._check_content(s, fdef.args.kwarg.annotation, "Any") -+ -+ def test_call(self): -+ s = "func(x, y=2, **kw)" -+ call = self._parse_value(s) -+ self._check_content(s, call.func, "func") -+ self._check_content(s, call.keywords[0].value, "2") -+ self._check_content(s, call.keywords[1].value, "kw") -+ -+ def test_call_noargs(self): -+ s = "x[0]()" -+ call = self._parse_value(s) -+ self._check_content(s, call.func, "x[0]") -+ self._check_end_pos(call, 1, 6) -+ -+ def test_class_def(self): -+ s = dedent(""" -+ class C(A, B): -+ x: int = 0 -+ """).strip() -+ cdef = ast.parse(s).body[0] -+ self._check_end_pos(cdef, 2, 14) -+ self._check_content(s, cdef.bases[1], "B") -+ self._check_content(s, cdef.body[0], "x: int = 0") -+ -+ def test_class_kw(self): -+ s = "class S(metaclass=abc.ABCMeta): pass" -+ cdef = ast.parse(s).body[0] -+ self._check_content(s, cdef.keywords[0].value, "abc.ABCMeta") -+ -+ def test_multi_line_str(self): -+ s = dedent(''' -+ x = """Some multi-line text. -+ -+ It goes on starting from same indent.""" -+ ''').strip() -+ assign = ast.parse(s).body[0] -+ self._check_end_pos(assign, 3, 40) -+ self._check_end_pos(assign.value, 3, 40) -+ -+ def test_continued_str(self): -+ s = dedent(""" -+ x = "first part" \\ -+ "second part" -+ """).strip() -+ assign = ast.parse(s).body[0] -+ self._check_end_pos(assign, 2, 13) -+ self._check_end_pos(assign.value, 2, 13) -+ -+ def test_suites(self): -+ # We intentionally put these into the same string to check -+ # that empty lines are not part of the suite. -+ s = dedent(""" -+ while True: -+ pass -+ -+ if one(): -+ x = None -+ elif other(): -+ y = None -+ else: -+ z = None -+ -+ for x, y in stuff: -+ assert True -+ -+ try: -+ raise RuntimeError -+ except TypeError as e: -+ pass -+ -+ pass -+ """).strip() -+ mod = ast.parse(s) -+ while_loop = mod.body[0] -+ if_stmt = mod.body[1] -+ for_loop = mod.body[2] -+ try_stmt = mod.body[3] -+ pass_stmt = mod.body[4] -+ -+ self._check_end_pos(while_loop, 2, 8) -+ self._check_end_pos(if_stmt, 9, 12) -+ self._check_end_pos(for_loop, 12, 15) -+ self._check_end_pos(try_stmt, 17, 8) -+ self._check_end_pos(pass_stmt, 19, 4) -+ -+ self._check_content(s, while_loop.test, "True") -+ self._check_content(s, if_stmt.body[0], "x = None") -+ self._check_content(s, if_stmt.orelse[0].test, "other()") -+ self._check_content(s, for_loop.target, "x, y") -+ self._check_content(s, try_stmt.body[0], "raise RuntimeError") -+ self._check_content(s, try_stmt.handlers[0].type, "TypeError") -+ -+ def test_fstring(self): -+ s = 'x = f"abc {x + y} abc"' -+ fstr = self._parse_value(s) -+ binop = fstr.values[1].value -+ self._check_content(s, binop, "x + y") -+ -+ def test_fstring_multi_line(self): -+ s = dedent(''' -+ f"""Some multi-line text. -+ { -+ arg_one -+ + -+ arg_two -+ } -+ It goes on...""" -+ ''').strip() -+ fstr = self._parse_value(s) -+ binop = fstr.values[1].value -+ self._check_end_pos(binop, 5, 7) -+ self._check_content(s, binop.left, "arg_one") -+ self._check_content(s, binop.right, "arg_two") -+ -+ def test_import_from_multi_line(self): -+ s = dedent(""" -+ from x.y.z import ( -+ a, b, c as c -+ ) -+ """).strip() -+ imp = ast.parse(s).body[0] -+ self._check_end_pos(imp, 3, 1) -+ self._check_end_pos(imp.names[2], 2, 16) -+ -+ def test_slices(self): -+ s1 = "f()[1, 2] [0]" -+ s2 = "x[ a.b: c.d]" -+ sm = dedent(""" -+ x[ a.b: f () , -+ g () : c.d -+ ] -+ """).strip() -+ i1, i2, im = map(self._parse_value, (s1, s2, sm)) -+ self._check_content(s1, i1.value, "f()[1, 2]") -+ self._check_content(s1, i1.value.slice, "1, 2") -+ self._check_content(s2, i2.slice.lower, "a.b") -+ self._check_content(s2, i2.slice.upper, "c.d") -+ self._check_content(sm, im.slice.elts[0].upper, "f ()") -+ self._check_content(sm, im.slice.elts[1].lower, "g ()") -+ self._check_end_pos(im, 3, 3) -+ -+ def test_binop(self): -+ s = dedent(""" -+ (1 * 2 + (3 ) + -+ 4 -+ ) -+ """).strip() -+ binop = self._parse_value(s) -+ self._check_end_pos(binop, 2, 6) -+ self._check_content(s, binop.right, "4") -+ self._check_content(s, binop.left, "1 * 2 + (3 )") -+ self._check_content(s, binop.left.right, "3") -+ -+ def test_boolop(self): -+ s = dedent(""" -+ if (one_condition and -+ (other_condition or yet_another_one)): -+ pass -+ """).strip() -+ bop = ast.parse(s).body[0].test -+ self._check_end_pos(bop, 2, 44) -+ self._check_content(s, bop.values[1], "other_condition or yet_another_one") -+ -+ def test_tuples(self): -+ s1 = "x = () ;" -+ s2 = "x = 1 , ;" -+ s3 = "x = (1 , 2 ) ;" -+ sm = dedent(""" -+ x = ( -+ a, b, -+ ) -+ """).strip() -+ t1, t2, t3, tm = map(self._parse_value, (s1, s2, s3, sm)) -+ self._check_content(s1, t1, "()") -+ self._check_content(s2, t2, "1 ,") -+ self._check_content(s3, t3, "(1 , 2 )") -+ self._check_end_pos(tm, 3, 1) -+ -+ def test_attribute_spaces(self): -+ s = "func(x. y .z)" -+ call = self._parse_value(s) -+ self._check_content(s, call, s) -+ self._check_content(s, call.args[0], "x. y .z") -+ -+ def test_redundant_parenthesis(self): -+ s = "( ( ( a + b ) ) )" -+ v = ast.parse(s).body[0].value -+ self.assertEqual(type(v).__name__, "BinOp") -+ self._check_content(s, v, "a + b") -+ s2 = "await " + s -+ v = ast.parse(s2).body[0].value.value -+ self.assertEqual(type(v).__name__, "BinOp") -+ self._check_content(s2, v, "a + b") -+ -+ def test_trailers_with_redundant_parenthesis(self): -+ tests = ( -+ ("( ( ( a ) ) ) ( )", "Call"), -+ ("( ( ( a ) ) ) ( b )", "Call"), -+ ("( ( ( a ) ) ) [ b ]", "Subscript"), -+ ("( ( ( a ) ) ) . b", "Attribute"), -+ ) -+ for s, t in tests: -+ with self.subTest(s): -+ v = ast.parse(s).body[0].value -+ self.assertEqual(type(v).__name__, t) -+ self._check_content(s, v, s) -+ s2 = "await " + s -+ v = ast.parse(s2).body[0].value.value -+ self.assertEqual(type(v).__name__, t) -+ self._check_content(s2, v, s) -+ -+ def test_displays(self): -+ s1 = "[{}, {1, }, {1, 2,} ]" -+ s2 = "{a: b, f (): g () ,}" -+ c1 = self._parse_value(s1) -+ c2 = self._parse_value(s2) -+ self._check_content(s1, c1.elts[0], "{}") -+ self._check_content(s1, c1.elts[1], "{1, }") -+ self._check_content(s1, c1.elts[2], "{1, 2,}") -+ self._check_content(s2, c2.keys[1], "f ()") -+ self._check_content(s2, c2.values[1], "g ()") -+ -+ def test_comprehensions(self): -+ s = dedent(""" -+ x = [{x for x, y in stuff -+ if cond.x} for stuff in things] -+ """).strip() -+ cmp = self._parse_value(s) -+ self._check_end_pos(cmp, 2, 37) -+ self._check_content(s, cmp.generators[0].iter, "things") -+ self._check_content(s, cmp.elt.generators[0].iter, "stuff") -+ self._check_content(s, cmp.elt.generators[0].ifs[0], "cond.x") -+ self._check_content(s, cmp.elt.generators[0].target, "x, y") -+ -+ def test_yield_await(self): -+ s = dedent(""" -+ async def f(): -+ yield x -+ await y -+ """).strip() -+ fdef = ast.parse(s).body[0] -+ self._check_content(s, fdef.body[0].value, "yield x") -+ self._check_content(s, fdef.body[1].value, "await y") -+ -+ def test_source_segment_multi(self): -+ s_orig = dedent(""" -+ x = ( -+ a, b, -+ ) + () -+ """).strip() -+ s_tuple = dedent(""" -+ ( -+ a, b, -+ ) -+ """).strip() -+ binop = self._parse_value(s_orig) -+ self.assertEqual(ast.get_source_segment(s_orig, binop.left), s_tuple) -+ -+ def test_source_segment_padded(self): -+ s_orig = dedent(""" -+ class C: -+ def fun(self) -> None: -+ "ЖЖЖЖЖ" -+ """).strip() -+ s_method = " def fun(self) -> None:\n" ' "ЖЖЖЖЖ"' -+ cdef = ast.parse(s_orig).body[0] -+ self.assertEqual( -+ ast.get_source_segment(s_orig, cdef.body[0], padded=True), s_method -+ ) -+ -+ def test_source_segment_endings(self): -+ s = "v = 1\r\nw = 1\nx = 1\n\ry = 1\rz = 1\r\n" -+ v, w, x, y, z = ast.parse(s).body -+ self._check_content(s, v, "v = 1") -+ self._check_content(s, w, "w = 1") -+ self._check_content(s, x, "x = 1") -+ self._check_content(s, y, "y = 1") -+ self._check_content(s, z, "z = 1") -+ -+ def test_source_segment_tabs(self): -+ s = dedent(""" -+ class C: -+ \t\f def fun(self) -> None: -+ \t\f pass -+ """).strip() -+ s_method = " \t\f def fun(self) -> None:\n" " \t\f pass" -+ -+ cdef = ast.parse(s).body[0] -+ self.assertEqual(ast.get_source_segment(s, cdef.body[0], padded=True), s_method) -+ -+ def test_source_segment_newlines(self): -+ s = "def f():\n pass\ndef g():\r pass\r\ndef h():\r\n pass\r\n" -+ f, g, h = ast.parse(s).body -+ self._check_content(s, f, "def f():\n pass") -+ self._check_content(s, g, "def g():\r pass") -+ self._check_content(s, h, "def h():\r\n pass") -+ -+ s = "def f():\n a = 1\r b = 2\r\n c = 3\n" -+ f = ast.parse(s).body[0] -+ self._check_content(s, f, s.rstrip()) -+ -+ def test_source_segment_missing_info(self): -+ s = "v = 1\r\nw = 1\nx = 1\n\ry = 1\r\n" -+ v, w, x, y = ast.parse(s).body -+ del v.lineno -+ del w.end_lineno -+ del x.col_offset -+ del y.end_col_offset -+ self.assertIsNone(ast.get_source_segment(s, v)) -+ self.assertIsNone(ast.get_source_segment(s, w)) -+ self.assertIsNone(ast.get_source_segment(s, x)) -+ self.assertIsNone(ast.get_source_segment(s, y)) -+ -+ -+class BaseNodeVisitorCases: -+ # Both `NodeVisitor` and `NodeTranformer` must raise these warnings: -+ def test_old_constant_nodes(self): -+ class Visitor(self.visitor_class): -+ def visit_Num(self, node): -+ log.append((node.lineno, "Num", node.n)) -+ -+ def visit_Str(self, node): -+ log.append((node.lineno, "Str", node.s)) -+ -+ def visit_Bytes(self, node): -+ log.append((node.lineno, "Bytes", node.s)) -+ -+ def visit_NameConstant(self, node): -+ log.append((node.lineno, "NameConstant", node.value)) -+ -+ def visit_Ellipsis(self, node): -+ log.append((node.lineno, "Ellipsis", ...)) -+ -+ mod = ast.parse( -+ dedent("""\ -+ i = 42 -+ f = 4.25 -+ c = 4.25j -+ s = 'string' -+ b = b'bytes' -+ t = True -+ n = None -+ e = ... -+ """) -+ ) -+ visitor = Visitor() -+ log = [] -+ with warnings.catch_warnings(record=True) as wlog: -+ warnings.filterwarnings("always", "", DeprecationWarning) -+ visitor.visit(mod) -+ self.assertEqual( -+ log, -+ [ -+ (1, "Num", 42), -+ (2, "Num", 4.25), -+ (3, "Num", 4.25j), -+ (4, "Str", "string"), -+ (5, "Bytes", b"bytes"), -+ (6, "NameConstant", True), -+ (7, "NameConstant", None), -+ (8, "Ellipsis", ...), -+ ], -+ ) -+ self.assertEqual( -+ [str(w.message) for w in wlog], -+ [ -+ "visit_Num is deprecated; add visit_Constant", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "visit_Num is deprecated; add visit_Constant", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "visit_Num is deprecated; add visit_Constant", -+ "Attribute n is deprecated and will be removed in Python 3.14; use value instead", -+ "visit_Str is deprecated; add visit_Constant", -+ "Attribute s is deprecated and will be removed in Python 3.14; use value instead", -+ "visit_Bytes is deprecated; add visit_Constant", -+ "Attribute s is deprecated and will be removed in Python 3.14; use value instead", -+ "visit_NameConstant is deprecated; add visit_Constant", -+ "visit_NameConstant is deprecated; add visit_Constant", -+ "visit_Ellipsis is deprecated; add visit_Constant", -+ ], -+ ) -+ -+ -+class NodeVisitorTests(BaseNodeVisitorCases, unittest.TestCase): -+ visitor_class = ast.NodeVisitor -+ -+ -+class NodeTransformerTests(ASTTestMixin, BaseNodeVisitorCases, unittest.TestCase): -+ visitor_class = ast.NodeTransformer -+ -+ def assertASTTransformation(self, tranformer_class, initial_code, expected_code): -+ initial_ast = ast.parse(dedent(initial_code)) -+ expected_ast = ast.parse(dedent(expected_code)) -+ -+ tranformer = tranformer_class() -+ result_ast = ast.fix_missing_locations(tranformer.visit(initial_ast)) -+ -+ self.assertASTEqual(result_ast, expected_ast) -+ -+ def test_node_remove_single(self): -+ code = "def func(arg) -> SomeType: ..." -+ expected = "def func(arg): ..." -+ -+ # Since `FunctionDef.returns` is defined as a single value, we test -+ # the `if isinstance(old_value, AST):` branch here. -+ class SomeTypeRemover(ast.NodeTransformer): -+ def visit_Name(self, node: ast.Name): -+ self.generic_visit(node) -+ if node.id == "SomeType": -+ return None -+ return node -+ -+ self.assertASTTransformation(SomeTypeRemover, code, expected) -+ -+ def test_node_remove_from_list(self): -+ code = """ -+ def func(arg): -+ print(arg) -+ yield arg -+ """ -+ expected = """ -+ def func(arg): -+ print(arg) -+ """ -+ -+ # Since `FunctionDef.body` is defined as a list, we test -+ # the `if isinstance(old_value, list):` branch here. -+ class YieldRemover(ast.NodeTransformer): -+ def visit_Expr(self, node: ast.Expr): -+ self.generic_visit(node) -+ if isinstance(node.value, ast.Yield): -+ return None # Remove `yield` from a function -+ return node -+ -+ self.assertASTTransformation(YieldRemover, code, expected) -+ -+ def test_node_return_list(self): -+ code = """ -+ class DSL(Base, kw1=True): ... -+ """ -+ expected = """ -+ class DSL(Base, kw1=True, kw2=True, kw3=False): ... -+ """ -+ -+ class ExtendKeywords(ast.NodeTransformer): -+ def visit_keyword(self, node: ast.keyword): -+ self.generic_visit(node) -+ if node.arg == "kw1": -+ return [ -+ node, -+ ast.keyword("kw2", ast.Constant(True)), -+ ast.keyword("kw3", ast.Constant(False)), -+ ] -+ return node -+ -+ self.assertASTTransformation(ExtendKeywords, code, expected) -+ -+ def test_node_mutate(self): -+ code = """ -+ def func(arg): -+ print(arg) -+ """ -+ expected = """ -+ def func(arg): -+ log(arg) -+ """ -+ -+ class PrintToLog(ast.NodeTransformer): -+ def visit_Call(self, node: ast.Call): -+ self.generic_visit(node) -+ if isinstance(node.func, ast.Name) and node.func.id == "print": -+ node.func.id = "log" -+ return node -+ -+ self.assertASTTransformation(PrintToLog, code, expected) -+ -+ def test_node_replace(self): -+ code = """ -+ def func(arg): -+ print(arg) -+ """ -+ expected = """ -+ def func(arg): -+ logger.log(arg, debug=True) -+ """ -+ -+ class PrintToLog(ast.NodeTransformer): -+ def visit_Call(self, node: ast.Call): -+ self.generic_visit(node) -+ if isinstance(node.func, ast.Name) and node.func.id == "print": -+ return ast.Call( -+ func=ast.Attribute( -+ ast.Name("logger", ctx=ast.Load()), -+ attr="log", -+ ctx=ast.Load(), -+ ), -+ args=node.args, -+ keywords=[ast.keyword("debug", ast.Constant(True))], -+ ) -+ return node -+ -+ self.assertASTTransformation(PrintToLog, code, expected) -+ -+ -+@support.cpython_only -+class ModuleStateTests(unittest.TestCase): -+ # bpo-41194, bpo-41261, bpo-41631: The _ast module uses a global state. -+ -+ def check_ast_module(self): -+ # Check that the _ast module still works as expected -+ code = "x + 1" -+ filename = "" -+ mode = "eval" -+ -+ # Create _ast.AST subclasses instances -+ ast_tree = compile(code, filename, mode, flags=ast.PyCF_ONLY_AST) -+ -+ # Call PyAST_Check() -+ code = compile(ast_tree, filename, mode) -+ self.assertIsInstance(code, types.CodeType) -+ -+ def test_reload_module(self): -+ # bpo-41194: Importing the _ast module twice must not crash. -+ with support.swap_item(sys.modules, "_ast", None): -+ del sys.modules["_ast"] -+ import _ast as ast1 -+ -+ del sys.modules["_ast"] -+ import _ast as ast2 -+ -+ self.check_ast_module() -+ -+ # Unloading the two _ast module instances must not crash. -+ del ast1 -+ del ast2 -+ support.gc_collect() -+ -+ self.check_ast_module() -+ -+ def test_sys_modules(self): -+ # bpo-41631: Test reproducing a Mercurial crash when PyAST_Check() -+ # imported the _ast module internally. -+ lazy_mod = object() -+ -+ def my_import(name, *args, **kw): -+ sys.modules[name] = lazy_mod -+ return lazy_mod -+ -+ with support.swap_item(sys.modules, "_ast", None): -+ del sys.modules["_ast"] -+ -+ with support.swap_attr(builtins, "__import__", my_import): -+ # Test that compile() does not import the _ast module -+ self.check_ast_module() -+ self.assertNotIn("_ast", sys.modules) -+ -+ # Sanity check of the test itself -+ import _ast -+ -+ self.assertIs(_ast, lazy_mod) -+ -+ def test_subinterpreter(self): -+ # bpo-41631: Importing and using the _ast module in a subinterpreter -+ # must not crash. -+ code = dedent(""" -+ import _ast -+ import ast -+ import gc -+ import sys -+ import types -+ -+ # Create _ast.AST subclasses instances and call PyAST_Check() -+ ast_tree = compile('x+1', '', 'eval', -+ flags=ast.PyCF_ONLY_AST) -+ code = compile(ast_tree, 'string', 'eval') -+ if not isinstance(code, types.CodeType): -+ raise AssertionError -+ -+ # Unloading the _ast module must not crash. -+ del ast, _ast -+ del sys.modules['ast'], sys.modules['_ast'] -+ gc.collect() -+ """) -+ res = support.run_in_subinterp(code) -+ self.assertEqual(res, 0) -+ -+ -+class ASTMainTests(unittest.TestCase): -+ # Tests `ast.main()` function. -+ -+ def test_cli_file_input(self): -+ code = "print(1, 2, 3)" -+ expected = ast.dump(ast.parse(code), indent=3) -+ -+ with os_helper.temp_dir() as tmp_dir: -+ filename = os.path.join(tmp_dir, "test_module.py") -+ with open(filename, "w", encoding="utf-8") as f: -+ f.write(code) -+ res, _ = script_helper.run_python_until_end("-m", "ast", filename) -+ -+ self.assertEqual(res.err, b"") -+ self.assertEqual(expected.splitlines(), res.out.decode("utf8").splitlines()) -+ self.assertEqual(res.rc, 0) ---- /dev/null -+++ b/Lib/test/test_ast/utils.py -@@ -0,0 +1,15 @@ -+def to_tuple(t): -+ if t is None or isinstance(t, (str, int, complex, float, bytes)) or t is Ellipsis: -+ return t -+ elif isinstance(t, list): -+ return [to_tuple(e) for e in t] -+ result = [t.__class__.__name__] -+ if hasattr(t, 'lineno') and hasattr(t, 'col_offset'): -+ result.append((t.lineno, t.col_offset)) -+ if hasattr(t, 'end_lineno') and hasattr(t, 'end_col_offset'): -+ result[-1] += (t.end_lineno, t.end_col_offset) -+ if t._fields is None: -+ return tuple(result) -+ for f in t._fields: -+ result.append(to_tuple(getattr(t, f))) -+ return tuple(result) -diff --git a/Lib/test/test_asyncio/test_eager_task_factory.py b/Lib/test/test_asyncio/test_eager_task_factory.py -index 346888735ff..58c06287bc3 100644 ---- a/Lib/test/test_asyncio/test_eager_task_factory.py -+++ b/Lib/test/test_asyncio/test_eager_task_factory.py -@@ -246,6 +246,18 @@ - _, out, err = assert_python_ok("-c", code) - self.assertFalse(err) - -+ def test_issue122332(self): -+ async def coro(): -+ pass -+ -+ async def run(): -+ task = self.loop.create_task(coro()) -+ await task -+ self.assertIsNone(task.get_coro()) -+ -+ self.run_coro(run()) -+ -+ - class AsyncTaskCounter: - def __init__(self, loop, *, task_class, eager): - self.suspense_count = 0 diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py -index f25580371a2..ffcde82b63e 100644 +index abf425f5ef0..ffcde82b63e 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1894,6 +1894,7 @@ @@ -22891,93 +2162,6 @@ index f25580371a2..ffcde82b63e 100644 def test_subprocess_shell_invalid_args(self): async def connect(cmd=None, **kwds): -@@ -2351,7 +2365,7 @@ - h = asyncio.Handle(cb, (), self.loop) - - cb_regex = r'' -- cb_regex = fr'functools.partialmethod\({cb_regex}, , \)\(\)' -+ cb_regex = fr'functools.partialmethod\({cb_regex}\)\(\)' - regex = fr'^$' - self.assertRegex(repr(h), regex) - -diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py -index 2184b2091f8..47daa0e9f41 100644 ---- a/Lib/test/test_asyncio/test_futures.py -+++ b/Lib/test/test_asyncio/test_futures.py -@@ -270,10 +270,6 @@ - f = self._new_future(loop=self.loop) - self.assertRaises(asyncio.InvalidStateError, f.exception) - -- # StopIteration cannot be raised into a Future - CPython issue26221 -- self.assertRaisesRegex(TypeError, "StopIteration .* cannot be raised", -- f.set_exception, StopIteration) -- - f.set_exception(exc) - self.assertFalse(f.cancelled()) - self.assertTrue(f.done()) -@@ -283,6 +279,25 @@ - self.assertRaises(asyncio.InvalidStateError, f.set_exception, None) - self.assertFalse(f.cancel()) - -+ def test_stop_iteration_exception(self, stop_iteration_class=StopIteration): -+ exc = stop_iteration_class() -+ f = self._new_future(loop=self.loop) -+ f.set_exception(exc) -+ self.assertFalse(f.cancelled()) -+ self.assertTrue(f.done()) -+ self.assertRaises(RuntimeError, f.result) -+ exc = f.exception() -+ cause = exc.__cause__ -+ self.assertIsInstance(exc, RuntimeError) -+ self.assertRegex(str(exc), 'StopIteration .* cannot be raised') -+ self.assertIsInstance(cause, stop_iteration_class) -+ -+ def test_stop_iteration_subclass_exception(self): -+ class MyStopIteration(StopIteration): -+ pass -+ -+ self.test_stop_iteration_exception(MyStopIteration) -+ - def test_exception_class(self): - f = self._new_future(loop=self.loop) - f.set_exception(RuntimeError) -@@ -641,6 +656,14 @@ - with self.assertRaises(AttributeError): - del fut._log_traceback - -+ def test_future_iter_get_referents_segfault(self): -+ # See https://github.com/python/cpython/issues/122695 -+ import _asyncio -+ it = iter(self._new_future(loop=self.loop)) -+ del it -+ evil = gc.get_referents(_asyncio) -+ gc.collect() -+ - - @unittest.skipUnless(hasattr(futures, '_CFuture'), - 'requires the C _asyncio module') -diff --git a/Lib/test/test_asyncio/test_sendfile.py b/Lib/test/test_asyncio/test_sendfile.py -index d33ff197bbf..2509d4382cd 100644 ---- a/Lib/test/test_asyncio/test_sendfile.py -+++ b/Lib/test/test_asyncio/test_sendfile.py -@@ -93,13 +93,10 @@ - - class SendfileBase: - -- # 256 KiB plus small unaligned to buffer chunk -- # Newer versions of Windows seems to have increased its internal -- # buffer and tries to send as much of the data as it can as it -- # has some form of buffering for this which is less than 256KiB -- # on newer server versions and Windows 11. -- # So DATA should be larger than 256 KiB to make this test reliable. -- DATA = b"x" * (1024 * 256 + 1) -+ # Linux >= 6.10 seems buffering up to 17 pages of data. -+ # So DATA should be large enough to make this test reliable even with a -+ # 64 KiB page configuration. -+ DATA = b"x" * (1024 * 17 * 64 + 1) - # Reduce socket buffer size to test on relative small data sets. - BUF_SIZE = 4 * 1024 # 4 KiB - diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py index 3c8cc5f3649..210990593ad 100644 --- a/Lib/test/test_asyncio/test_streams.py @@ -23039,336 +2223,6 @@ index 35c924a0cd6..9452213c685 100644 class TestFork(unittest.IsolatedAsyncioTestCase): async def test_fork_not_share_event_loop(self): -diff --git a/Lib/test/test_audit.py b/Lib/test/test_audit.py -index 9e3e03748da..9076448ccfd 100644 ---- a/Lib/test/test_audit.py -+++ b/Lib/test/test_audit.py -@@ -140,6 +140,7 @@ - ) - - -+ @support.requires_resource('network') - def test_http(self): - import_helper.import_module("http.client") - returncode, events, stderr = self.run_python("test_http_client") -diff --git a/Lib/test/test_capi/test_bytes.py b/Lib/test/test_capi/test_bytes.py -index bb5d724ff18..c692ee82d04 100644 ---- a/Lib/test/test_capi/test_bytes.py -+++ b/Lib/test/test_capi/test_bytes.py -@@ -52,6 +52,8 @@ - self.assertEqual(fromstringandsize(b'abc'), b'abc') - self.assertEqual(fromstringandsize(b'abc', 2), b'ab') - self.assertEqual(fromstringandsize(b'abc\0def'), b'abc\0def') -+ self.assertEqual(fromstringandsize(b'a'), b'a') -+ self.assertEqual(fromstringandsize(b'a', 1), b'a') - self.assertEqual(fromstringandsize(b'', 0), b'') - self.assertEqual(fromstringandsize(NULL, 0), b'') - self.assertEqual(len(fromstringandsize(NULL, 3)), 3) -diff --git a/Lib/test/test_capi/test_list.py b/Lib/test/test_capi/test_list.py -index 197da03e07f..7dc4d3b284b 100644 ---- a/Lib/test/test_capi/test_list.py -+++ b/Lib/test/test_capi/test_list.py -@@ -275,3 +275,7 @@ - self.assertRaises(SystemError, astuple, ()) - self.assertRaises(SystemError, astuple, object()) - self.assertRaises(SystemError, astuple, NULL) -+ -+ -+if __name__ == "__main__": -+ unittest.main() -diff --git a/Lib/test/test_capi/test_long.py b/Lib/test/test_capi/test_long.py -index 39fef24f807..18507ed7c3b 100644 ---- a/Lib/test/test_capi/test_long.py -+++ b/Lib/test/test_capi/test_long.py -@@ -160,201 +160,119 @@ - # CRASHES fromunicodeobject(NULL, 0) - # CRASHES fromunicodeobject(NULL, 16) - -+ def check_long_asint(self, func, min_val, max_val, *, -+ use_index=True, -+ mask=False, -+ negative_value_error=OverflowError): -+ # round trip (object -> C integer -> object) -+ values = (0, 1, 1234, max_val) -+ if min_val < 0: -+ values += (-1, min_val) -+ for value in values: -+ with self.subTest(value=value): -+ self.assertEqual(func(value), value) -+ self.assertEqual(func(IntSubclass(value)), value) -+ if use_index: -+ self.assertEqual(func(Index(value)), value) -+ -+ if use_index: -+ self.assertEqual(func(MyIndexAndInt()), 10) -+ else: -+ self.assertRaises(TypeError, func, Index(42)) -+ self.assertRaises(TypeError, func, MyIndexAndInt()) -+ -+ if mask: -+ self.assertEqual(func(min_val - 1), max_val) -+ self.assertEqual(func(max_val + 1), min_val) -+ self.assertEqual(func(-1 << 1000), 0) -+ self.assertEqual(func(1 << 1000), 0) -+ else: -+ self.assertRaises(negative_value_error, func, min_val - 1) -+ self.assertRaises(negative_value_error, func, -1 << 1000) -+ self.assertRaises(OverflowError, func, max_val + 1) -+ self.assertRaises(OverflowError, func, 1 << 1000) -+ self.assertRaises(TypeError, func, 1.0) -+ self.assertRaises(TypeError, func, b'2') -+ self.assertRaises(TypeError, func, '3') -+ self.assertRaises(SystemError, func, NULL) -+ -+ def check_long_asintandoverflow(self, func, min_val, max_val): -+ # round trip (object -> C integer -> object) -+ for value in (min_val, max_val, -1, 0, 1, 1234): -+ with self.subTest(value=value): -+ self.assertEqual(func(value), (value, 0)) -+ self.assertEqual(func(IntSubclass(value)), (value, 0)) -+ self.assertEqual(func(Index(value)), (value, 0)) -+ -+ self.assertEqual(func(MyIndexAndInt()), (10, 0)) -+ -+ self.assertEqual(func(min_val - 1), (-1, -1)) -+ self.assertEqual(func(max_val + 1), (-1, +1)) -+ -+ # CRASHES func(1.0) -+ # CRASHES func(NULL) -+ - def test_long_aslong(self): - # Test PyLong_AsLong() and PyLong_FromLong() - aslong = _testcapi.pylong_aslong - from _testcapi import LONG_MIN, LONG_MAX -- # round trip (object -> long -> object) -- for value in (LONG_MIN, LONG_MAX, -1, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(aslong(value), value) -- -- self.assertEqual(aslong(IntSubclass(42)), 42) -- self.assertEqual(aslong(Index(42)), 42) -- self.assertEqual(aslong(MyIndexAndInt()), 10) -- -- self.assertRaises(OverflowError, aslong, LONG_MIN - 1) -- self.assertRaises(OverflowError, aslong, LONG_MAX + 1) -- self.assertRaises(TypeError, aslong, 1.0) -- self.assertRaises(TypeError, aslong, b'2') -- self.assertRaises(TypeError, aslong, '3') -- self.assertRaises(SystemError, aslong, NULL) -+ self.check_long_asint(aslong, LONG_MIN, LONG_MAX) - - def test_long_aslongandoverflow(self): - # Test PyLong_AsLongAndOverflow() - aslongandoverflow = _testcapi.pylong_aslongandoverflow - from _testcapi import LONG_MIN, LONG_MAX -- # round trip (object -> long -> object) -- for value in (LONG_MIN, LONG_MAX, -1, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(aslongandoverflow(value), (value, 0)) -- -- self.assertEqual(aslongandoverflow(IntSubclass(42)), (42, 0)) -- self.assertEqual(aslongandoverflow(Index(42)), (42, 0)) -- self.assertEqual(aslongandoverflow(MyIndexAndInt()), (10, 0)) -- -- self.assertEqual(aslongandoverflow(LONG_MIN - 1), (-1, -1)) -- self.assertEqual(aslongandoverflow(LONG_MAX + 1), (-1, 1)) -- # CRASHES aslongandoverflow(1.0) -- # CRASHES aslongandoverflow(NULL) -+ self.check_long_asintandoverflow(aslongandoverflow, LONG_MIN, LONG_MAX) - - def test_long_asunsignedlong(self): - # Test PyLong_AsUnsignedLong() and PyLong_FromUnsignedLong() - asunsignedlong = _testcapi.pylong_asunsignedlong - from _testcapi import ULONG_MAX -- # round trip (object -> unsigned long -> object) -- for value in (ULONG_MAX, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(asunsignedlong(value), value) -- -- self.assertEqual(asunsignedlong(IntSubclass(42)), 42) -- self.assertRaises(TypeError, asunsignedlong, Index(42)) -- self.assertRaises(TypeError, asunsignedlong, MyIndexAndInt()) -- -- self.assertRaises(OverflowError, asunsignedlong, -1) -- self.assertRaises(OverflowError, asunsignedlong, ULONG_MAX + 1) -- self.assertRaises(TypeError, asunsignedlong, 1.0) -- self.assertRaises(TypeError, asunsignedlong, b'2') -- self.assertRaises(TypeError, asunsignedlong, '3') -- self.assertRaises(SystemError, asunsignedlong, NULL) -+ self.check_long_asint(asunsignedlong, 0, ULONG_MAX, -+ use_index=False) - - def test_long_asunsignedlongmask(self): - # Test PyLong_AsUnsignedLongMask() - asunsignedlongmask = _testcapi.pylong_asunsignedlongmask - from _testcapi import ULONG_MAX -- # round trip (object -> unsigned long -> object) -- for value in (ULONG_MAX, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(asunsignedlongmask(value), value) -- -- self.assertEqual(asunsignedlongmask(IntSubclass(42)), 42) -- self.assertEqual(asunsignedlongmask(Index(42)), 42) -- self.assertEqual(asunsignedlongmask(MyIndexAndInt()), 10) -- -- self.assertEqual(asunsignedlongmask(-1), ULONG_MAX) -- self.assertEqual(asunsignedlongmask(ULONG_MAX + 1), 0) -- self.assertRaises(TypeError, asunsignedlongmask, 1.0) -- self.assertRaises(TypeError, asunsignedlongmask, b'2') -- self.assertRaises(TypeError, asunsignedlongmask, '3') -- self.assertRaises(SystemError, asunsignedlongmask, NULL) -+ self.check_long_asint(asunsignedlongmask, 0, ULONG_MAX, mask=True) - - def test_long_aslonglong(self): - # Test PyLong_AsLongLong() and PyLong_FromLongLong() - aslonglong = _testcapi.pylong_aslonglong - from _testcapi import LLONG_MIN, LLONG_MAX -- # round trip (object -> long long -> object) -- for value in (LLONG_MIN, LLONG_MAX, -1, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(aslonglong(value), value) -- -- self.assertEqual(aslonglong(IntSubclass(42)), 42) -- self.assertEqual(aslonglong(Index(42)), 42) -- self.assertEqual(aslonglong(MyIndexAndInt()), 10) -- -- self.assertRaises(OverflowError, aslonglong, LLONG_MIN - 1) -- self.assertRaises(OverflowError, aslonglong, LLONG_MAX + 1) -- self.assertRaises(TypeError, aslonglong, 1.0) -- self.assertRaises(TypeError, aslonglong, b'2') -- self.assertRaises(TypeError, aslonglong, '3') -- self.assertRaises(SystemError, aslonglong, NULL) -+ self.check_long_asint(aslonglong, LLONG_MIN, LLONG_MAX) - - def test_long_aslonglongandoverflow(self): - # Test PyLong_AsLongLongAndOverflow() - aslonglongandoverflow = _testcapi.pylong_aslonglongandoverflow - from _testcapi import LLONG_MIN, LLONG_MAX -- # round trip (object -> long long -> object) -- for value in (LLONG_MIN, LLONG_MAX, -1, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(aslonglongandoverflow(value), (value, 0)) -- -- self.assertEqual(aslonglongandoverflow(IntSubclass(42)), (42, 0)) -- self.assertEqual(aslonglongandoverflow(Index(42)), (42, 0)) -- self.assertEqual(aslonglongandoverflow(MyIndexAndInt()), (10, 0)) -- -- self.assertEqual(aslonglongandoverflow(LLONG_MIN - 1), (-1, -1)) -- self.assertEqual(aslonglongandoverflow(LLONG_MAX + 1), (-1, 1)) -- # CRASHES aslonglongandoverflow(1.0) -- # CRASHES aslonglongandoverflow(NULL) -+ self.check_long_asintandoverflow(aslonglongandoverflow, LLONG_MIN, LLONG_MAX) - - def test_long_asunsignedlonglong(self): - # Test PyLong_AsUnsignedLongLong() and PyLong_FromUnsignedLongLong() - asunsignedlonglong = _testcapi.pylong_asunsignedlonglong - from _testcapi import ULLONG_MAX -- # round trip (object -> unsigned long long -> object) -- for value in (ULLONG_MAX, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(asunsignedlonglong(value), value) -- -- self.assertEqual(asunsignedlonglong(IntSubclass(42)), 42) -- self.assertRaises(TypeError, asunsignedlonglong, Index(42)) -- self.assertRaises(TypeError, asunsignedlonglong, MyIndexAndInt()) -- -- self.assertRaises(OverflowError, asunsignedlonglong, -1) -- self.assertRaises(OverflowError, asunsignedlonglong, ULLONG_MAX + 1) -- self.assertRaises(TypeError, asunsignedlonglong, 1.0) -- self.assertRaises(TypeError, asunsignedlonglong, b'2') -- self.assertRaises(TypeError, asunsignedlonglong, '3') -- self.assertRaises(SystemError, asunsignedlonglong, NULL) -+ self.check_long_asint(asunsignedlonglong, 0, ULLONG_MAX, use_index=False) - - def test_long_asunsignedlonglongmask(self): - # Test PyLong_AsUnsignedLongLongMask() - asunsignedlonglongmask = _testcapi.pylong_asunsignedlonglongmask - from _testcapi import ULLONG_MAX -- # round trip (object -> unsigned long long -> object) -- for value in (ULLONG_MAX, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(asunsignedlonglongmask(value), value) -- -- self.assertEqual(asunsignedlonglongmask(IntSubclass(42)), 42) -- self.assertEqual(asunsignedlonglongmask(Index(42)), 42) -- self.assertEqual(asunsignedlonglongmask(MyIndexAndInt()), 10) -- -- self.assertEqual(asunsignedlonglongmask(-1), ULLONG_MAX) -- self.assertEqual(asunsignedlonglongmask(ULLONG_MAX + 1), 0) -- self.assertRaises(TypeError, asunsignedlonglongmask, 1.0) -- self.assertRaises(TypeError, asunsignedlonglongmask, b'2') -- self.assertRaises(TypeError, asunsignedlonglongmask, '3') -- self.assertRaises(SystemError, asunsignedlonglongmask, NULL) -+ self.check_long_asint(asunsignedlonglongmask, 0, ULLONG_MAX, mask=True) - - def test_long_as_ssize_t(self): - # Test PyLong_AsSsize_t() and PyLong_FromSsize_t() - as_ssize_t = _testcapi.pylong_as_ssize_t - from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX -- # round trip (object -> Py_ssize_t -> object) -- for value in (PY_SSIZE_T_MIN, PY_SSIZE_T_MAX, -1, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(as_ssize_t(value), value) -- -- self.assertEqual(as_ssize_t(IntSubclass(42)), 42) -- self.assertRaises(TypeError, as_ssize_t, Index(42)) -- self.assertRaises(TypeError, as_ssize_t, MyIndexAndInt()) -- -- self.assertRaises(OverflowError, as_ssize_t, PY_SSIZE_T_MIN - 1) -- self.assertRaises(OverflowError, as_ssize_t, PY_SSIZE_T_MAX + 1) -- self.assertRaises(TypeError, as_ssize_t, 1.0) -- self.assertRaises(TypeError, as_ssize_t, b'2') -- self.assertRaises(TypeError, as_ssize_t, '3') -- self.assertRaises(SystemError, as_ssize_t, NULL) -+ self.check_long_asint(as_ssize_t, PY_SSIZE_T_MIN, PY_SSIZE_T_MAX, -+ use_index=False) - - def test_long_as_size_t(self): - # Test PyLong_AsSize_t() and PyLong_FromSize_t() - as_size_t = _testcapi.pylong_as_size_t - from _testcapi import SIZE_MAX -- # round trip (object -> size_t -> object) -- for value in (SIZE_MAX, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(as_size_t(value), value) -- -- self.assertEqual(as_size_t(IntSubclass(42)), 42) -- self.assertRaises(TypeError, as_size_t, Index(42)) -- self.assertRaises(TypeError, as_size_t, MyIndexAndInt()) -- -- self.assertRaises(OverflowError, as_size_t, -1) -- self.assertRaises(OverflowError, as_size_t, SIZE_MAX + 1) -- self.assertRaises(TypeError, as_size_t, 1.0) -- self.assertRaises(TypeError, as_size_t, b'2') -- self.assertRaises(TypeError, as_size_t, '3') -- self.assertRaises(SystemError, as_size_t, NULL) -+ self.check_long_asint(as_size_t, 0, SIZE_MAX, use_index=False) - - def test_long_asdouble(self): - # Test PyLong_AsDouble() -@@ -407,21 +325,7 @@ - bits = 8 * SIZEOF_PID_T - PID_T_MIN = -2**(bits-1) - PID_T_MAX = 2**(bits-1) - 1 -- # round trip (object -> long -> object) -- for value in (PID_T_MIN, PID_T_MAX, -1, 0, 1, 1234): -- with self.subTest(value=value): -- self.assertEqual(aspid(value), value) -- -- self.assertEqual(aspid(IntSubclass(42)), 42) -- self.assertEqual(aspid(Index(42)), 42) -- self.assertEqual(aspid(MyIndexAndInt()), 10) -- -- self.assertRaises(OverflowError, aspid, PID_T_MIN - 1) -- self.assertRaises(OverflowError, aspid, PID_T_MAX + 1) -- self.assertRaises(TypeError, aspid, 1.0) -- self.assertRaises(TypeError, aspid, b'2') -- self.assertRaises(TypeError, aspid, '3') -- self.assertRaises(SystemError, aspid, NULL) -+ self.check_long_asint(aspid, PID_T_MIN, PID_T_MAX) - - - if __name__ == "__main__": diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index 98c74a44e4c..40284774a58 100644 --- a/Lib/test/test_capi/test_misc.py @@ -23410,657 +2264,6 @@ index 98c74a44e4c..40284774a58 100644 spec = importlib.util.spec_from_loader(fullname, loader) module = importlib.util.module_from_spec(spec) loader.exec_module(module) ---- /dev/null -+++ b/Lib/test/test_capi/test_number.py -@@ -0,0 +1,335 @@ -+import itertools -+import operator -+import sys -+import unittest -+import warnings -+ -+from test.support import cpython_only, import_helper -+ -+_testcapi = import_helper.import_module('_testcapi') -+from _testcapi import PY_SSIZE_T_MAX, PY_SSIZE_T_MIN -+ -+try: -+ from _testbuffer import ndarray -+except ImportError: -+ ndarray = None -+ -+NULL = None -+ -+class BadDescr: -+ def __get__(self, obj, objtype=None): -+ raise RuntimeError -+ -+class WithDunder: -+ def _meth(self, *args): -+ if self.val: -+ return self.val -+ if self.exc: -+ raise self.exc -+ @classmethod -+ def with_val(cls, val): -+ obj = super().__new__(cls) -+ obj.val = val -+ obj.exc = None -+ setattr(cls, cls.methname, cls._meth) -+ return obj -+ -+ @classmethod -+ def with_exc(cls, exc): -+ obj = super().__new__(cls) -+ obj.val = None -+ obj.exc = exc -+ setattr(cls, cls.methname, cls._meth) -+ return obj -+ -+class HasBadAttr: -+ def __new__(cls): -+ obj = super().__new__(cls) -+ setattr(cls, cls.methname, BadDescr()) -+ return obj -+ -+ -+class IndexLike(WithDunder): -+ methname = '__index__' -+ -+class IntLike(WithDunder): -+ methname = '__int__' -+ -+class FloatLike(WithDunder): -+ methname = '__float__' -+ -+ -+def subclassof(base): -+ return type(base.__name__ + 'Subclass', (base,), {}) -+ -+ -+class SomeError(Exception): -+ pass -+ -+class OtherError(Exception): -+ pass -+ -+ -+class CAPITest(unittest.TestCase): -+ def test_check(self): -+ # Test PyNumber_Check() -+ check = _testcapi.number_check -+ -+ self.assertTrue(check(1)) -+ self.assertTrue(check(IndexLike.with_val(1))) -+ self.assertTrue(check(IntLike.with_val(99))) -+ self.assertTrue(check(0.5)) -+ self.assertTrue(check(FloatLike.with_val(4.25))) -+ self.assertTrue(check(1+2j)) -+ -+ self.assertFalse(check([])) -+ self.assertFalse(check("abc")) -+ self.assertFalse(check(object())) -+ self.assertFalse(check(NULL)) -+ -+ def test_unary_ops(self): -+ methmap = {'__neg__': _testcapi.number_negative, # PyNumber_Negative() -+ '__pos__': _testcapi.number_positive, # PyNumber_Positive() -+ '__abs__': _testcapi.number_absolute, # PyNumber_Absolute() -+ '__invert__': _testcapi.number_invert} # PyNumber_Invert() -+ -+ for name, func in methmap.items(): -+ # Generic object, has no tp_as_number structure -+ self.assertRaises(TypeError, func, object()) -+ -+ # C-API function accepts NULL -+ self.assertRaises(SystemError, func, NULL) -+ -+ # Behave as corresponding unary operation -+ op = getattr(operator, name) -+ for x in [0, 42, -1, 3.14, 1+2j]: -+ try: -+ op(x) -+ except TypeError: -+ self.assertRaises(TypeError, func, x) -+ else: -+ self.assertEqual(func(x), op(x)) -+ -+ def test_binary_ops(self): -+ methmap = {'__add__': _testcapi.number_add, # PyNumber_Add() -+ '__sub__': _testcapi.number_subtract, # PyNumber_Subtract() -+ '__mul__': _testcapi.number_multiply, # PyNumber_Multiply() -+ '__matmul__': _testcapi.number_matrixmultiply, # PyNumber_MatrixMultiply() -+ '__floordiv__': _testcapi.number_floordivide, # PyNumber_FloorDivide() -+ '__truediv__': _testcapi.number_truedivide, # PyNumber_TrueDivide() -+ '__mod__': _testcapi.number_remainder, # PyNumber_Remainder() -+ '__divmod__': _testcapi.number_divmod, # PyNumber_Divmod() -+ '__lshift__': _testcapi.number_lshift, # PyNumber_Lshift() -+ '__rshift__': _testcapi.number_rshift, # PyNumber_Rshift() -+ '__and__': _testcapi.number_and, # PyNumber_And() -+ '__xor__': _testcapi.number_xor, # PyNumber_Xor() -+ '__or__': _testcapi.number_or, # PyNumber_Or() -+ '__pow__': _testcapi.number_power, # PyNumber_Power() -+ '__iadd__': _testcapi.number_inplaceadd, # PyNumber_InPlaceAdd() -+ '__isub__': _testcapi.number_inplacesubtract, # PyNumber_InPlaceSubtract() -+ '__imul__': _testcapi.number_inplacemultiply, # PyNumber_InPlaceMultiply() -+ '__imatmul__': _testcapi.number_inplacematrixmultiply, # PyNumber_InPlaceMatrixMultiply() -+ '__ifloordiv__': _testcapi.number_inplacefloordivide, # PyNumber_InPlaceFloorDivide() -+ '__itruediv__': _testcapi.number_inplacetruedivide, # PyNumber_InPlaceTrueDivide() -+ '__imod__': _testcapi.number_inplaceremainder, # PyNumber_InPlaceRemainder() -+ '__ilshift__': _testcapi.number_inplacelshift, # PyNumber_InPlaceLshift() -+ '__irshift__': _testcapi.number_inplacershift, # PyNumber_InPlaceRshift() -+ '__iand__': _testcapi.number_inplaceand, # PyNumber_InPlaceAnd() -+ '__ixor__': _testcapi.number_inplacexor, # PyNumber_InPlaceXor() -+ '__ior__': _testcapi.number_inplaceor, # PyNumber_InPlaceOr() -+ '__ipow__': _testcapi.number_inplacepower, # PyNumber_InPlacePower() -+ } -+ -+ for name, func in methmap.items(): -+ cases = [0, 42, 3.14, -1, 123, 1+2j] -+ -+ # Generic object, has no tp_as_number structure -+ for x in cases: -+ self.assertRaises(TypeError, func, object(), x) -+ self.assertRaises(TypeError, func, x, object()) -+ -+ # Behave as corresponding binary operation -+ op = getattr(operator, name, divmod) -+ for x, y in itertools.combinations(cases, 2): -+ try: -+ op(x, y) -+ except (TypeError, ValueError, ZeroDivisionError) as exc: -+ self.assertRaises(exc.__class__, func, x, y) -+ else: -+ self.assertEqual(func(x, y), op(x, y)) -+ -+ # CRASHES func(NULL, object()) -+ # CRASHES func(object(), NULL) -+ -+ @unittest.skipIf(ndarray is None, "needs _testbuffer") -+ def test_misc_add(self): -+ # PyNumber_Add(), PyNumber_InPlaceAdd() -+ add = _testcapi.number_add -+ inplaceadd = _testcapi.number_inplaceadd -+ -+ # test sq_concat/sq_inplace_concat slots -+ a, b, r = [1, 2], [3, 4], [1, 2, 3, 4] -+ self.assertEqual(add(a, b), r) -+ self.assertEqual(a, [1, 2]) -+ self.assertRaises(TypeError, add, ndarray([1], (1,)), 2) -+ a, b, r = [1, 2], [3, 4], [1, 2, 3, 4] -+ self.assertEqual(inplaceadd(a, b), r) -+ self.assertEqual(a, r) -+ self.assertRaises(TypeError, inplaceadd, ndarray([1], (1,)), 2) -+ -+ @unittest.skipIf(ndarray is None, "needs _testbuffer") -+ def test_misc_multiply(self): -+ # PyNumber_Multiply(), PyNumber_InPlaceMultiply() -+ multiply = _testcapi.number_multiply -+ inplacemultiply = _testcapi.number_inplacemultiply -+ -+ # test sq_repeat/sq_inplace_repeat slots -+ a, b, r = [1], 2, [1, 1] -+ self.assertEqual(multiply(a, b), r) -+ self.assertEqual((a, b), ([1], 2)) -+ self.assertEqual(multiply(b, a), r) -+ self.assertEqual((a, b), ([1], 2)) -+ self.assertEqual(multiply([1], -1), []) -+ self.assertRaises(TypeError, multiply, ndarray([1], (1,)), 2) -+ self.assertRaises(TypeError, multiply, [1], 0.5) -+ self.assertRaises(OverflowError, multiply, [1], PY_SSIZE_T_MAX + 1) -+ self.assertRaises(MemoryError, multiply, [1, 2], PY_SSIZE_T_MAX//2 + 1) -+ a, b, r = [1], 2, [1, 1] -+ self.assertEqual(inplacemultiply(a, b), r) -+ self.assertEqual((a, b), (r, 2)) -+ a = [1] -+ self.assertEqual(inplacemultiply(b, a), r) -+ self.assertEqual((a, b), ([1], 2)) -+ self.assertRaises(TypeError, inplacemultiply, ndarray([1], (1,)), 2) -+ self.assertRaises(OverflowError, inplacemultiply, [1], PY_SSIZE_T_MAX + 1) -+ self.assertRaises(MemoryError, inplacemultiply, [1, 2], PY_SSIZE_T_MAX//2 + 1) -+ -+ def test_misc_power(self): -+ # PyNumber_Power() -+ power = _testcapi.number_power -+ -+ class HasPow(WithDunder): -+ methname = '__pow__' -+ -+ # ternary op -+ self.assertEqual(power(4, 11, 5), pow(4, 11, 5)) -+ self.assertRaises(TypeError, power, 4, 11, 1.25) -+ self.assertRaises(TypeError, power, 4, 11, HasPow.with_val(NotImplemented)) -+ self.assertRaises(TypeError, power, 4, 11, object()) -+ -+ @cpython_only -+ def test_rshift_print(self): -+ # This tests correct syntax hint for py2 redirection (>>). -+ rshift = _testcapi.number_rshift -+ -+ with self.assertRaises(TypeError) as context: -+ rshift(print, 42) -+ self.assertIn('Did you mean "print(, ' -+ 'file=)"?', str(context.exception)) -+ with self.assertRaises(TypeError) as context: -+ rshift(max, sys.stderr) -+ self.assertNotIn('Did you mean ', str(context.exception)) -+ with self.assertRaises(TypeError) as context: -+ rshift(1, "spam") -+ -+ def test_long(self): -+ # Test PyNumber_Long() -+ long = _testcapi.number_long -+ -+ self.assertEqual(long(42), 42) -+ self.assertEqual(long(1.25), 1) -+ self.assertEqual(long("42"), 42) -+ self.assertEqual(long(b"42"), 42) -+ self.assertEqual(long(bytearray(b"42")), 42) -+ self.assertEqual(long(memoryview(b"42")), 42) -+ self.assertEqual(long(IndexLike.with_val(99)), 99) -+ self.assertEqual(long(IntLike.with_val(99)), 99) -+ -+ self.assertRaises(TypeError, long, IntLike.with_val(1.0)) -+ with warnings.catch_warnings(): -+ warnings.simplefilter("error", DeprecationWarning) -+ self.assertRaises(DeprecationWarning, long, IntLike.with_val(True)) -+ with self.assertWarns(DeprecationWarning): -+ self.assertEqual(long(IntLike.with_val(True)), 1) -+ self.assertRaises(RuntimeError, long, IntLike.with_exc(RuntimeError)) -+ -+ self.assertRaises(TypeError, long, 1j) -+ self.assertRaises(TypeError, long, object()) -+ self.assertRaises(SystemError, long, NULL) -+ -+ def test_float(self): -+ # Test PyNumber_Float() -+ float_ = _testcapi.number_float -+ -+ self.assertEqual(float_(1.25), 1.25) -+ self.assertEqual(float_(123), 123.) -+ self.assertEqual(float_("1.25"), 1.25) -+ -+ self.assertEqual(float_(FloatLike.with_val(4.25)), 4.25) -+ self.assertEqual(float_(IndexLike.with_val(99)), 99.0) -+ self.assertEqual(float_(IndexLike.with_val(-1)), -1.0) -+ -+ self.assertRaises(TypeError, float_, FloatLike.with_val(687)) -+ with warnings.catch_warnings(): -+ warnings.simplefilter("error", DeprecationWarning) -+ self.assertRaises(DeprecationWarning, float_, FloatLike.with_val(subclassof(float)(4.25))) -+ with self.assertWarns(DeprecationWarning): -+ self.assertEqual(float_(FloatLike.with_val(subclassof(float)(4.25))), 4.25) -+ self.assertRaises(RuntimeError, float_, FloatLike.with_exc(RuntimeError)) -+ -+ self.assertRaises(TypeError, float_, IndexLike.with_val(1.25)) -+ self.assertRaises(OverflowError, float_, IndexLike.with_val(2**2000)) -+ -+ self.assertRaises(TypeError, float_, 1j) -+ self.assertRaises(TypeError, float_, object()) -+ self.assertRaises(SystemError, float_, NULL) -+ -+ def test_index(self): -+ # Test PyNumber_Index() -+ index = _testcapi.number_index -+ -+ self.assertEqual(index(11), 11) -+ -+ with warnings.catch_warnings(): -+ warnings.simplefilter("error", DeprecationWarning) -+ self.assertRaises(DeprecationWarning, index, IndexLike.with_val(True)) -+ with self.assertWarns(DeprecationWarning): -+ self.assertEqual(index(IndexLike.with_val(True)), 1) -+ self.assertRaises(TypeError, index, IndexLike.with_val(1.0)) -+ self.assertRaises(RuntimeError, index, IndexLike.with_exc(RuntimeError)) -+ -+ self.assertRaises(TypeError, index, 1.25) -+ self.assertRaises(TypeError, index, "42") -+ self.assertRaises(TypeError, index, object()) -+ self.assertRaises(SystemError, index, NULL) -+ -+ def test_tobase(self): -+ # Test PyNumber_ToBase() -+ tobase = _testcapi.number_tobase -+ -+ self.assertEqual(tobase(10, 2), bin(10)) -+ self.assertEqual(tobase(11, 8), oct(11)) -+ self.assertEqual(tobase(16, 10), str(16)) -+ self.assertEqual(tobase(13, 16), hex(13)) -+ -+ self.assertRaises(SystemError, tobase, NULL, 2) -+ self.assertRaises(SystemError, tobase, 2, 3) -+ self.assertRaises(TypeError, tobase, 1.25, 2) -+ self.assertRaises(TypeError, tobase, "42", 2) -+ -+ def test_asssizet(self): -+ # Test PyNumber_AsSsize_t() -+ asssizet = _testcapi.number_asssizet -+ -+ for n in [*range(-6, 7), PY_SSIZE_T_MIN, PY_SSIZE_T_MAX]: -+ self.assertEqual(asssizet(n, OverflowError), n) -+ self.assertEqual(asssizet(PY_SSIZE_T_MAX+10, NULL), PY_SSIZE_T_MAX) -+ self.assertEqual(asssizet(PY_SSIZE_T_MIN-10, NULL), PY_SSIZE_T_MIN) -+ -+ self.assertRaises(OverflowError, asssizet, PY_SSIZE_T_MAX + 10, OverflowError) -+ self.assertRaises(RuntimeError, asssizet, PY_SSIZE_T_MAX + 10, RuntimeError) -+ self.assertRaises(SystemError, asssizet, NULL, TypeError) -+ -+ -+if __name__ == "__main__": -+ unittest.main() -diff --git a/Lib/test/test_capi/test_set.py b/Lib/test/test_capi/test_set.py -index e9165e7e680..5131e67431b 100644 ---- a/Lib/test/test_capi/test_set.py -+++ b/Lib/test/test_capi/test_set.py -@@ -213,3 +213,7 @@ - clear(object()) - self.assertImmutable(clear) - # CRASHES: clear(NULL) -+ -+ -+if __name__ == "__main__": -+ unittest.main() ---- /dev/null -+++ b/Lib/test/test_capi/test_tuple.py -@@ -0,0 +1,261 @@ -+import unittest -+import sys -+from collections import namedtuple -+from test.support import import_helper -+ -+_testcapi = import_helper.import_module('_testcapi') -+_testlimitedcapi = _testcapi -+ -+NULL = None -+PY_SSIZE_T_MIN = _testcapi.PY_SSIZE_T_MIN -+PY_SSIZE_T_MAX = _testcapi.PY_SSIZE_T_MAX -+ -+class TupleSubclass(tuple): -+ pass -+ -+ -+class CAPITest(unittest.TestCase): -+ def test_check(self): -+ # Test PyTuple_Check() -+ check = _testlimitedcapi.tuple_check -+ -+ self.assertTrue(check((1, 2))) -+ self.assertTrue(check(())) -+ self.assertTrue(check(TupleSubclass((1, 2)))) -+ self.assertFalse(check({1: 2})) -+ self.assertFalse(check([1, 2])) -+ self.assertFalse(check(42)) -+ self.assertFalse(check(object())) -+ -+ # CRASHES check(NULL) -+ -+ def test_tuple_checkexact(self): -+ # Test PyTuple_CheckExact() -+ check = _testlimitedcapi.tuple_checkexact -+ -+ self.assertTrue(check((1, 2))) -+ self.assertTrue(check(())) -+ self.assertFalse(check(TupleSubclass((1, 2)))) -+ self.assertFalse(check({1: 2})) -+ self.assertFalse(check([1, 2])) -+ self.assertFalse(check(42)) -+ self.assertFalse(check(object())) -+ -+ # CRASHES check(NULL) -+ -+ def test_tuple_new(self): -+ # Test PyTuple_New() -+ tuple_new = _testlimitedcapi.tuple_new -+ size = _testlimitedcapi.tuple_size -+ checknull = _testcapi._check_tuple_item_is_NULL -+ -+ tup1 = tuple_new(0) -+ self.assertEqual(tup1, ()) -+ self.assertEqual(size(tup1), 0) -+ self.assertIs(type(tup1), tuple) -+ tup2 = tuple_new(1) -+ self.assertIs(type(tup2), tuple) -+ self.assertEqual(size(tup2), 1) -+ self.assertIsNot(tup2, tup1) -+ self.assertTrue(checknull(tup2, 0)) -+ -+ self.assertRaises(SystemError, tuple_new, -1) -+ self.assertRaises(SystemError, tuple_new, PY_SSIZE_T_MIN) -+ self.assertRaises(MemoryError, tuple_new, PY_SSIZE_T_MAX) -+ -+ def test_tuple_pack(self): -+ # Test PyTuple_Pack() -+ pack = _testlimitedcapi.tuple_pack -+ -+ self.assertEqual(pack(0), ()) -+ self.assertEqual(pack(1, [1]), ([1],)) -+ self.assertEqual(pack(2, [1], [2]), ([1], [2])) -+ -+ self.assertRaises(SystemError, pack, PY_SSIZE_T_MIN) -+ self.assertRaises(SystemError, pack, -1) -+ self.assertRaises(MemoryError, pack, PY_SSIZE_T_MAX) -+ -+ # CRASHES pack(1, NULL) -+ # CRASHES pack(2, [1]) -+ -+ def test_tuple_size(self): -+ # Test PyTuple_Size() -+ size = _testlimitedcapi.tuple_size -+ -+ self.assertEqual(size(()), 0) -+ self.assertEqual(size((1, 2)), 2) -+ self.assertEqual(size(TupleSubclass((1, 2))), 2) -+ -+ self.assertRaises(SystemError, size, []) -+ self.assertRaises(SystemError, size, 42) -+ self.assertRaises(SystemError, size, object()) -+ -+ # CRASHES size(NULL) -+ -+ def test_tuple_get_size(self): -+ # Test PyTuple_GET_SIZE() -+ size = _testcapi.tuple_get_size -+ -+ self.assertEqual(size(()), 0) -+ self.assertEqual(size((1, 2)), 2) -+ self.assertEqual(size(TupleSubclass((1, 2))), 2) -+ -+ def test_tuple_getitem(self): -+ # Test PyTuple_GetItem() -+ getitem = _testlimitedcapi.tuple_getitem -+ -+ tup = ([1], [2], [3]) -+ self.assertEqual(getitem(tup, 0), [1]) -+ self.assertEqual(getitem(tup, 2), [3]) -+ -+ tup2 = TupleSubclass(([1], [2], [3])) -+ self.assertEqual(getitem(tup2, 0), [1]) -+ self.assertEqual(getitem(tup2, 2), [3]) -+ -+ self.assertRaises(IndexError, getitem, tup, PY_SSIZE_T_MIN) -+ self.assertRaises(IndexError, getitem, tup, -1) -+ self.assertRaises(IndexError, getitem, tup, len(tup)) -+ self.assertRaises(IndexError, getitem, tup, PY_SSIZE_T_MAX) -+ self.assertRaises(SystemError, getitem, [1, 2, 3], 1) -+ self.assertRaises(SystemError, getitem, 42, 1) -+ -+ # CRASHES getitem(NULL, 0) -+ -+ def test_tuple_get_item(self): -+ # Test PyTuple_GET_ITEM() -+ get_item = _testcapi.tuple_get_item -+ -+ tup = ([1], [2], [3]) -+ self.assertEqual(get_item(tup, 0), [1]) -+ self.assertEqual(get_item(tup, 2), [3]) -+ -+ tup2 = TupleSubclass(([1], [2], [3])) -+ self.assertEqual(get_item(tup2, 0), [1]) -+ self.assertEqual(get_item(tup2, 2), [3]) -+ -+ # CRASHES get_item(NULL, 0) -+ -+ def test_tuple_getslice(self): -+ # Test PyTuple_GetSlice() -+ getslice = _testlimitedcapi.tuple_getslice -+ -+ # empty -+ tup = ([1], [2], [3]) -+ self.assertEqual(getslice(tup, PY_SSIZE_T_MIN, 0), ()) -+ self.assertEqual(getslice(tup, -1, 0), ()) -+ self.assertEqual(getslice(tup, 3, PY_SSIZE_T_MAX), ()) -+ self.assertEqual(getslice(tup, 1, 1), ()) -+ self.assertEqual(getslice(tup, 2, 1), ()) -+ tup = TupleSubclass(([1], [2], [3])) -+ self.assertEqual(getslice(tup, PY_SSIZE_T_MIN, 0), ()) -+ self.assertEqual(getslice(tup, -1, 0), ()) -+ self.assertEqual(getslice(tup, 3, PY_SSIZE_T_MAX), ()) -+ self.assertEqual(getslice(tup, 1, 1), ()) -+ self.assertEqual(getslice(tup, 2, 1), ()) -+ -+ # slice -+ tup = ([1], [2], [3], [4]) -+ self.assertEqual(getslice(tup, 1, 3), ([2], [3])) -+ tup = TupleSubclass(([1], [2], [3], [4])) -+ self.assertEqual(getslice(tup, 1, 3), ([2], [3])) -+ -+ # whole -+ tup = ([1], [2], [3]) -+ self.assertEqual(getslice(tup, 0, 3), tup) -+ self.assertEqual(getslice(tup, 0, 100), tup) -+ self.assertEqual(getslice(tup, -100, 100), tup) -+ tup = TupleSubclass(([1], [2], [3])) -+ self.assertEqual(getslice(tup, 0, 3), tup) -+ self.assertEqual(getslice(tup, 0, 100), tup) -+ self.assertEqual(getslice(tup, -100, 100), tup) -+ -+ self.assertRaises(SystemError, getslice, [[1], [2], [3]], 0, 0) -+ self.assertRaises(SystemError, getslice, 42, 0, 0) -+ -+ # CRASHES getslice(NULL, 0, 0) -+ -+ def test_tuple_setitem(self): -+ # Test PyTuple_SetItem() -+ setitem = _testlimitedcapi.tuple_setitem -+ checknull = _testcapi._check_tuple_item_is_NULL -+ -+ tup = ([1], [2]) -+ self.assertEqual(setitem(tup, 0, []), ([], [2])) -+ self.assertEqual(setitem(tup, 1, []), ([1], [])) -+ -+ tup2 = setitem(tup, 1, NULL) -+ self.assertTrue(checknull(tup2, 1)) -+ -+ tup2 = TupleSubclass(([1], [2])) -+ self.assertRaises(SystemError, setitem, tup2, 0, []) -+ -+ self.assertRaises(IndexError, setitem, tup, PY_SSIZE_T_MIN, []) -+ self.assertRaises(IndexError, setitem, tup, -1, []) -+ self.assertRaises(IndexError, setitem, tup, len(tup), []) -+ self.assertRaises(IndexError, setitem, tup, PY_SSIZE_T_MAX, []) -+ self.assertRaises(SystemError, setitem, [1], 0, []) -+ self.assertRaises(SystemError, setitem, 42, 0, []) -+ -+ # CRASHES setitem(NULL, 0, []) -+ -+ def test_tuple_set_item(self): -+ # Test PyTuple_SET_ITEM() -+ set_item = _testcapi.tuple_set_item -+ checknull = _testcapi._check_tuple_item_is_NULL -+ -+ tup = ([1], [2]) -+ self.assertEqual(set_item(tup, 0, []), ([], [2])) -+ self.assertEqual(set_item(tup, 1, []), ([1], [])) -+ -+ tup2 = set_item(tup, 1, NULL) -+ self.assertTrue(checknull(tup2, 1)) -+ -+ tup2 = TupleSubclass(([1], [2])) -+ self.assertIs(set_item(tup2, 0, []), tup2) -+ self.assertEqual(tup2, ([], [2])) -+ -+ # CRASHES set_item(tup, -1, []) -+ # CRASHES set_item(tup, len(tup), []) -+ # CRASHES set_item([1], 0, []) -+ # CRASHES set_item(NULL, 0, []) -+ -+ def test__tuple_resize(self): -+ # Test _PyTuple_Resize() -+ resize = _testcapi._tuple_resize -+ checknull = _testcapi._check_tuple_item_is_NULL -+ -+ a = () -+ b = resize(a, 0, False) -+ self.assertEqual(len(a), 0) -+ self.assertEqual(len(b), 0) -+ b = resize(a, 2, False) -+ self.assertEqual(len(a), 0) -+ self.assertEqual(len(b), 2) -+ self.assertTrue(checknull(b, 0)) -+ self.assertTrue(checknull(b, 1)) -+ -+ a = ([1], [2], [3]) -+ b = resize(a, 3) -+ self.assertEqual(b, a) -+ b = resize(a, 2) -+ self.assertEqual(b, a[:2]) -+ b = resize(a, 5) -+ self.assertEqual(len(b), 5) -+ self.assertEqual(b[:3], a) -+ self.assertTrue(checknull(b, 3)) -+ self.assertTrue(checknull(b, 4)) -+ -+ a = () -+ self.assertRaises(MemoryError, resize, a, PY_SSIZE_T_MAX) -+ self.assertRaises(SystemError, resize, a, -1) -+ self.assertRaises(SystemError, resize, a, PY_SSIZE_T_MIN) -+ # refcount > 1 -+ a = (1, 2, 3) -+ self.assertRaises(SystemError, resize, a, 3, False) -+ self.assertRaises(SystemError, resize, a, 0, False) -+ # non-tuple -+ self.assertRaises(SystemError, resize, [1, 2, 3], 0, False) -+ self.assertRaises(SystemError, resize, NULL, 0, False) -+ -+if __name__ == "__main__": -+ unittest.main() -diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py -index c114a62ce09..523b522a3af 100644 ---- a/Lib/test/test_clinic.py -+++ b/Lib/test/test_clinic.py -@@ -2093,11 +2093,27 @@ - self.assertEqual(ac_tester.vararg(1, 2, 3, 4), (1, (2, 3, 4))) - - def test_vararg_with_default(self): -- with self.assertRaises(TypeError): -- ac_tester.vararg_with_default() -- self.assertEqual(ac_tester.vararg_with_default(1, b=False), (1, (), False)) -- self.assertEqual(ac_tester.vararg_with_default(1, 2, 3, 4), (1, (2, 3, 4), False)) -- self.assertEqual(ac_tester.vararg_with_default(1, 2, 3, 4, b=True), (1, (2, 3, 4), True)) -+ fn = ac_tester.vararg_with_default -+ self.assertRaises(TypeError, fn) -+ self.assertRaises(TypeError, fn, 1, a=2) -+ self.assertEqual(fn(1, b=2), (1, (), True)) -+ self.assertEqual(fn(1, 2, 3, 4), (1, (2, 3, 4), False)) -+ self.assertEqual(fn(1, 2, 3, 4, b=5), (1, (2, 3, 4), True)) -+ self.assertEqual(fn(a=1), (1, (), False)) -+ self.assertEqual(fn(a=1, b=2), (1, (), True)) -+ -+ def test_vararg_with_default2(self): -+ fn = ac_tester.vararg_with_default2 -+ self.assertRaises(TypeError, fn) -+ self.assertRaises(TypeError, fn, 1, a=2) -+ self.assertEqual(fn(1, b=2), (1, (), 2, None)) -+ self.assertEqual(fn(1, b=2, c=3), (1, (), 2, 3)) -+ self.assertEqual(fn(1, 2, 3), (1, (2, 3), None, None)) -+ self.assertEqual(fn(1, 2, 3, b=4), (1, (2, 3), 4, None)) -+ self.assertEqual(fn(1, 2, 3, b=4, c=5), (1, (2, 3), 4, 5)) -+ self.assertEqual(fn(a=1), (1, (), None, None)) -+ self.assertEqual(fn(a=1, b=2), (1, (), 2, None)) -+ self.assertEqual(fn(a=1, b=2, c=3), (1, (), 2, 3)) - - def test_vararg_with_only_defaults(self): - self.assertEqual(ac_tester.vararg_with_only_defaults(), ((), None)) diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index 1b588826010..74879ba2a9f 100644 --- a/Lib/test/test_cmd_line_script.py @@ -24097,299 +2300,6 @@ index 1b588826010..74879ba2a9f 100644 name = os.fsdecode(os_helper.TESTFN_UNDECODABLE) elif os_helper.TESTFN_NONASCII: name = os_helper.TESTFN_NONASCII -diff --git a/Lib/test/test_code_module.py b/Lib/test/test_code_module.py -index 226bc3a853b..06a1ba494be 100644 ---- a/Lib/test/test_code_module.py -+++ b/Lib/test/test_code_module.py -@@ -1,5 +1,6 @@ - "Test InteractiveConsole and InteractiveInterpreter from code module" - import sys -+import traceback - import unittest - from textwrap import dedent - from contextlib import ExitStack -@@ -11,6 +12,7 @@ - - - class TestInteractiveConsole(unittest.TestCase): -+ maxDiff = None - - def setUp(self): - self.console = code.InteractiveConsole() -@@ -58,21 +60,151 @@ - raise AssertionError("no console stdout") - - def test_syntax_error(self): -- self.infunc.side_effect = ["undefined", EOFError('Finished')] -+ self.infunc.side_effect = ["def f():", -+ " x = ?", -+ "", -+ EOFError('Finished')] - self.console.interact() -- for call in self.stderr.method_calls: -- if 'NameError' in ''.join(call[1]): -- break -- else: -- raise AssertionError("No syntax error from console") -+ output = ''.join(''.join(call[1]) for call in self.stderr.method_calls) -+ output = output[output.index('(InteractiveConsole)'):] -+ output = output[:output.index('\nnow exiting')] -+ self.assertEqual(output.splitlines()[1:], [ -+ ' File "", line 2', -+ ' x = ?', -+ ' ^', -+ 'SyntaxError: invalid syntax']) -+ self.assertIs(self.sysmod.last_type, SyntaxError) -+ self.assertIs(type(self.sysmod.last_value), SyntaxError) -+ self.assertIsNone(self.sysmod.last_traceback) -+ self.assertIsNone(self.sysmod.last_value.__traceback__) -+ self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) -+ -+ def test_indentation_error(self): -+ self.infunc.side_effect = [" 1", EOFError('Finished')] -+ self.console.interact() -+ output = ''.join(''.join(call[1]) for call in self.stderr.method_calls) -+ output = output[output.index('(InteractiveConsole)'):] -+ output = output[:output.index('\nnow exiting')] -+ self.assertEqual(output.splitlines()[1:], [ -+ ' File "", line 1', -+ ' 1', -+ 'IndentationError: unexpected indent']) -+ self.assertIs(self.sysmod.last_type, IndentationError) -+ self.assertIs(type(self.sysmod.last_value), IndentationError) -+ self.assertIsNone(self.sysmod.last_traceback) -+ self.assertIsNone(self.sysmod.last_value.__traceback__) -+ self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) -+ -+ def test_unicode_error(self): -+ self.infunc.side_effect = ["'\ud800'", EOFError('Finished')] -+ self.console.interact() -+ output = ''.join(''.join(call[1]) for call in self.stderr.method_calls) -+ output = output[output.index('(InteractiveConsole)'):] -+ output = output[output.index('\n') + 1:] -+ self.assertTrue(output.startswith('UnicodeEncodeError: '), output) -+ self.assertIs(self.sysmod.last_type, UnicodeEncodeError) -+ self.assertIs(type(self.sysmod.last_value), UnicodeEncodeError) -+ self.assertIsNone(self.sysmod.last_traceback) -+ self.assertIsNone(self.sysmod.last_value.__traceback__) -+ self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) - - def test_sysexcepthook(self): -- self.infunc.side_effect = ["raise ValueError('')", -+ self.infunc.side_effect = ["def f():", -+ " raise ValueError('BOOM!')", -+ "", -+ "f()", -+ EOFError('Finished')] -+ hook = mock.Mock() -+ self.sysmod.excepthook = hook -+ self.console.interact() -+ hook.assert_called() -+ hook.assert_called_with(self.sysmod.last_type, -+ self.sysmod.last_value, -+ self.sysmod.last_traceback) -+ self.assertIs(self.sysmod.last_type, ValueError) -+ self.assertIs(type(self.sysmod.last_value), ValueError) -+ self.assertIs(self.sysmod.last_traceback, self.sysmod.last_value.__traceback__) -+ self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) -+ self.assertEqual(traceback.format_exception(self.sysmod.last_exc), [ -+ 'Traceback (most recent call last):\n', -+ ' File "", line 1, in \n', -+ ' File "", line 2, in f\n', -+ 'ValueError: BOOM!\n']) -+ -+ def test_sysexcepthook_syntax_error(self): -+ self.infunc.side_effect = ["def f():", -+ " x = ?", -+ "", - EOFError('Finished')] - hook = mock.Mock() - self.sysmod.excepthook = hook - self.console.interact() -- self.assertTrue(hook.called) -+ hook.assert_called() -+ hook.assert_called_with(self.sysmod.last_type, -+ self.sysmod.last_value, -+ self.sysmod.last_traceback) -+ self.assertIs(self.sysmod.last_type, SyntaxError) -+ self.assertIs(type(self.sysmod.last_value), SyntaxError) -+ self.assertIsNone(self.sysmod.last_traceback) -+ self.assertIsNone(self.sysmod.last_value.__traceback__) -+ self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) -+ self.assertEqual(traceback.format_exception(self.sysmod.last_exc), [ -+ ' File "", line 2\n', -+ ' x = ?\n', -+ ' ^\n', -+ 'SyntaxError: invalid syntax\n']) -+ -+ def test_sysexcepthook_indentation_error(self): -+ self.infunc.side_effect = [" 1", EOFError('Finished')] -+ hook = mock.Mock() -+ self.sysmod.excepthook = hook -+ self.console.interact() -+ hook.assert_called() -+ hook.assert_called_with(self.sysmod.last_type, -+ self.sysmod.last_value, -+ self.sysmod.last_traceback) -+ self.assertIs(self.sysmod.last_type, IndentationError) -+ self.assertIs(type(self.sysmod.last_value), IndentationError) -+ self.assertIsNone(self.sysmod.last_traceback) -+ self.assertIsNone(self.sysmod.last_value.__traceback__) -+ self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) -+ self.assertEqual(traceback.format_exception(self.sysmod.last_exc), [ -+ ' File "", line 1\n', -+ ' 1\n', -+ 'IndentationError: unexpected indent\n']) -+ -+ def test_sysexcepthook_crashing_doesnt_close_repl(self): -+ self.infunc.side_effect = ["1/0", "a = 123", "print(a)", EOFError('Finished')] -+ self.sysmod.excepthook = 1 -+ self.console.interact() -+ self.assertEqual(['write', ('123', ), {}], self.stdout.method_calls[0]) -+ error = "".join(call.args[0] for call in self.stderr.method_calls if call[0] == 'write') -+ self.assertIn("Error in sys.excepthook:", error) -+ self.assertEqual(error.count("'int' object is not callable"), 1) -+ self.assertIn("Original exception was:", error) -+ self.assertIn("division by zero", error) -+ -+ def test_sysexcepthook_raising_BaseException(self): -+ self.infunc.side_effect = ["1/0", "a = 123", "print(a)", EOFError('Finished')] -+ s = "not so fast" -+ def raise_base(*args, **kwargs): -+ raise BaseException(s) -+ self.sysmod.excepthook = raise_base -+ self.console.interact() -+ self.assertEqual(['write', ('123', ), {}], self.stdout.method_calls[0]) -+ error = "".join(call.args[0] for call in self.stderr.method_calls if call[0] == 'write') -+ self.assertIn("Error in sys.excepthook:", error) -+ self.assertEqual(error.count("not so fast"), 1) -+ self.assertIn("Original exception was:", error) -+ self.assertIn("division by zero", error) -+ -+ def test_sysexcepthook_raising_SystemExit_gets_through(self): -+ self.infunc.side_effect = ["1/0"] -+ def raise_base(*args, **kwargs): -+ raise SystemExit -+ self.sysmod.excepthook = raise_base -+ with self.assertRaises(SystemExit): -+ self.console.interact() - - def test_banner(self): - # with banner -@@ -131,6 +263,11 @@ - ValueError - """) - self.assertIn(expected, output) -+ self.assertIs(self.sysmod.last_type, ValueError) -+ self.assertIs(type(self.sysmod.last_value), ValueError) -+ self.assertIs(self.sysmod.last_traceback, self.sysmod.last_value.__traceback__) -+ self.assertIsNotNone(self.sysmod.last_traceback) -+ self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) - - def test_context_tb(self): - self.infunc.side_effect = ["try: ham\nexcept: eggs\n", -@@ -149,6 +286,11 @@ - NameError: name 'eggs' is not defined - """) - self.assertIn(expected, output) -+ self.assertIs(self.sysmod.last_type, NameError) -+ self.assertIs(type(self.sysmod.last_value), NameError) -+ self.assertIs(self.sysmod.last_traceback, self.sysmod.last_value.__traceback__) -+ self.assertIsNotNone(self.sysmod.last_traceback) -+ self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) - - - if __name__ == "__main__": -diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py -index 6ed7fe2b065..72bf87d10e4 100644 ---- a/Lib/test/test_compile.py -+++ b/Lib/test/test_compile.py -@@ -10,7 +10,7 @@ - import textwrap - import warnings - from test import support --from test.support import (script_helper, requires_debug_ranges, -+from test.support import (script_helper, requires_debug_ranges, run_code, - requires_specialization, C_RECURSION_LIMIT) - from test.support.os_helper import FakePath - -@@ -1065,7 +1065,7 @@ - x - in - y) -- genexp_lines = [0, 2, 0] -+ genexp_lines = [0, 4, 2, 0, 4] - - genexp_code = return_genexp.__code__.co_consts[1] - code_lines = self.get_code_lines(genexp_code) -@@ -1431,7 +1431,7 @@ - self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', - line=1, end_line=2, column=1, end_column=8, occurrence=1) - self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_CONST', -- line=1, end_line=6, column=0, end_column=32, occurrence=1) -+ line=4, end_line=4, column=7, end_column=14, occurrence=1) - - def test_multiline_async_generator_expression(self): - snippet = textwrap.dedent("""\ -@@ -1829,6 +1829,33 @@ - code, "LOAD_GLOBAL", line=3, end_line=3, column=4, end_column=9 - ) - -+ def test_lambda_return_position(self): -+ snippets = [ -+ "f = lambda: x", -+ "f = lambda: 42", -+ "f = lambda: 1 + 2", -+ "f = lambda: a + b", -+ ] -+ for snippet in snippets: -+ with self.subTest(snippet=snippet): -+ lamb = run_code(snippet)["f"] -+ positions = lamb.__code__.co_positions() -+ # assert that all positions are within the lambda -+ for i, pos in enumerate(positions): -+ with self.subTest(i=i, pos=pos): -+ start_line, end_line, start_col, end_col = pos -+ if i == 0 and start_col == end_col == 0: -+ # ignore the RESUME in the beginning -+ continue -+ self.assertEqual(start_line, 1) -+ self.assertEqual(end_line, 1) -+ code_start = snippet.find(":") + 2 -+ code_end = len(snippet) -+ self.assertGreaterEqual(start_col, code_start) -+ self.assertLessEqual(end_col, code_end) -+ self.assertGreaterEqual(end_col, start_col) -+ self.assertLessEqual(end_col, code_end) -+ - - class TestExpressionStackSize(unittest.TestCase): - # These tests check that the computed stack size for a code object -diff --git a/Lib/test/test_compiler_codegen.py b/Lib/test/test_compiler_codegen.py -index ea57df9cd24..a1794980dcd 100644 ---- a/Lib/test/test_compiler_codegen.py -+++ b/Lib/test/test_compiler_codegen.py -@@ -39,6 +39,7 @@ - ('GET_ITER', None, 1), - loop_lbl := self.Label(), - ('FOR_ITER', exit_lbl := self.Label(), 1), -+ ('NOP', None, 1, 1), - ('STORE_NAME', 1, 1), - ('PUSH_NULL', None, 2), - ('LOAD_NAME', 2, 2), -diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py -index 3385955d7f3..7625f2d6fc8 100644 ---- a/Lib/test/test_complex.py -+++ b/Lib/test/test_complex.py -@@ -618,7 +618,7 @@ - def test_hash(self): - for x in range(-30, 30): - self.assertEqual(hash(x), hash(complex(x, 0))) -- x /= 3.0 # now check against floating point -+ x /= 3.0 # now check against floating-point - self.assertEqual(hash(x), hash(complex(x, 0.))) - - self.assertNotEqual(hash(2000005 - 1j), -1) diff --git a/Lib/test/test_concurrent_futures/test_thread_pool.py b/Lib/test/test_concurrent_futures/test_thread_pool.py index 812f989d8f3..dfcf9e16e40 100644 --- a/Lib/test/test_concurrent_futures/test_thread_pool.py @@ -24402,799 +2312,20 @@ index 812f989d8f3..dfcf9e16e40 100644 @unittest.skipUnless(hasattr(os, 'register_at_fork'), 'need os.register_at_fork') @support.requires_resource('cpu') def test_hang_global_shutdown_lock(self): -diff --git a/Lib/test/test_cprofile.py b/Lib/test/test_cprofile.py -index 27e8a767903..14d69b6f5f3 100644 ---- a/Lib/test/test_cprofile.py -+++ b/Lib/test/test_cprofile.py -@@ -30,6 +30,43 @@ - - self.assertEqual(cm.unraisable.exc_type, TypeError) - -+ def test_evil_external_timer(self): -+ # gh-120289 -+ # Disabling profiler in external timer should not crash -+ import _lsprof -+ class EvilTimer(): -+ def __init__(self, disable_count): -+ self.count = 0 -+ self.disable_count = disable_count -+ -+ def __call__(self): -+ self.count += 1 -+ if self.count == self.disable_count: -+ profiler_with_evil_timer.disable() -+ return self.count -+ -+ # this will trigger external timer to disable profiler at -+ # call event - in initContext in _lsprof.c -+ with support.catch_unraisable_exception() as cm: -+ profiler_with_evil_timer = _lsprof.Profiler(EvilTimer(1)) -+ profiler_with_evil_timer.enable() -+ # Make a call to trigger timer -+ (lambda: None)() -+ profiler_with_evil_timer.disable() -+ profiler_with_evil_timer.clear() -+ self.assertEqual(cm.unraisable.exc_type, RuntimeError) -+ -+ # this will trigger external timer to disable profiler at -+ # return event - in Stop in _lsprof.c -+ with support.catch_unraisable_exception() as cm: -+ profiler_with_evil_timer = _lsprof.Profiler(EvilTimer(2)) -+ profiler_with_evil_timer.enable() -+ # Make a call to trigger timer -+ (lambda: None)() -+ profiler_with_evil_timer.disable() -+ profiler_with_evil_timer.clear() -+ self.assertEqual(cm.unraisable.exc_type, RuntimeError) -+ - def test_profile_enable_disable(self): - prof = self.profilerclass() - # Make sure we clean ourselves up if the test fails for some reason. -@@ -83,8 +120,8 @@ - - for func, (cc, nc, _, _, _) in pr.stats.items(): - if func[2] == "": -- self.assertEqual(cc, 1) -- self.assertEqual(nc, 1) -+ self.assertEqual(cc, 2) -+ self.assertEqual(nc, 2) - - - class TestCommandLine(unittest.TestCase): -diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py -index adb89d0df92..57db59185d9 100644 ---- a/Lib/test/test_csv.py -+++ b/Lib/test/test_csv.py -@@ -425,6 +425,8 @@ - quoting=csv.QUOTE_NONNUMERIC) - self._read_test(['1,@,3,@,5'], [['1', ',3,', '5']], quotechar='@') - self._read_test(['1,\0,3,\0,5'], [['1', ',3,', '5']], quotechar='\0') -+ self._read_test(['1\\.5,\\.5,.5'], [[1.5, 0.5, 0.5]], -+ quoting=csv.QUOTE_NONNUMERIC, escapechar='\\') - - def test_read_skipinitialspace(self): - self._read_test(['no space, space, spaces,\ttab'], -diff --git a/Lib/test/test_ctypes/test_arrays.py b/Lib/test/test_ctypes/test_arrays.py -index 78aead26da7..4f5a66e39be 100644 ---- a/Lib/test/test_ctypes/test_arrays.py -+++ b/Lib/test/test_ctypes/test_arrays.py -@@ -212,7 +212,7 @@ - class EmptyStruct(Structure): - _fields_ = [] - -- obj = (EmptyStruct * 2)() # bpo37188: Floating point exception -+ obj = (EmptyStruct * 2)() # bpo37188: Floating-point exception - self.assertEqual(sizeof(obj), 0) - - def test_empty_element_array(self): -@@ -220,7 +220,7 @@ - _type_ = c_int - _length_ = 0 - -- obj = (EmptyArray * 2)() # bpo37188: Floating point exception -+ obj = (EmptyArray * 2)() # bpo37188: Floating-point exception - self.assertEqual(sizeof(obj), 0) - - def test_bpo36504_signed_int_overflow(self): -diff --git a/Lib/test/test_dataclasses/__init__.py b/Lib/test/test_dataclasses/__init__.py -index e15b34570ef..7557c08566e 100644 ---- a/Lib/test/test_dataclasses/__init__.py -+++ b/Lib/test/test_dataclasses/__init__.py -@@ -3560,6 +3560,38 @@ - self.assertEqual(A().__dict__, {}) - A() - -+ @support.cpython_only -+ def test_slots_with_wrong_init_subclass(self): -+ # TODO: This test is for a kinda-buggy behavior. -+ # Ideally, it should be fixed and `__init_subclass__` -+ # should be fully supported in the future versions. -+ # See https://github.com/python/cpython/issues/91126 -+ class WrongSuper: -+ def __init_subclass__(cls, arg): -+ pass -+ -+ with self.assertRaisesRegex( -+ TypeError, -+ "missing 1 required positional argument: 'arg'", -+ ): -+ @dataclass(slots=True) -+ class WithWrongSuper(WrongSuper, arg=1): -+ pass -+ -+ class CorrectSuper: -+ args = [] -+ def __init_subclass__(cls, arg="default"): -+ cls.args.append(arg) -+ -+ @dataclass(slots=True) -+ class WithCorrectSuper(CorrectSuper): -+ pass -+ -+ # __init_subclass__ is called twice: once for `WithCorrectSuper` -+ # and once for `WithCorrectSuper__slots__` new class -+ # that we create internally. -+ self.assertEqual(CorrectSuper.args, ["default", "default"]) -+ - - class TestDescriptors(unittest.TestCase): - def test_set_name(self): -diff --git a/Lib/test/test_datetime.py b/Lib/test/test_datetime.py -index 3859733a4fe..005187f13e6 100644 ---- a/Lib/test/test_datetime.py -+++ b/Lib/test/test_datetime.py -@@ -1,5 +1,6 @@ - import unittest +diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py +index 203dd6fe57d..6d734d05245 100644 +--- a/Lib/test/test_fcntl.py ++++ b/Lib/test/test_fcntl.py +@@ -6,7 +6,9 @@ + import struct import sys -+import functools - - from test.support.import_helper import import_fresh_module - -@@ -39,21 +40,26 @@ - for cls in test_classes: - cls.__name__ += suffix - cls.__qualname__ += suffix -- @classmethod -- def setUpClass(cls_, module=module): -- cls_._save_sys_modules = sys.modules.copy() -- sys.modules[TESTS] = module -- sys.modules['datetime'] = module.datetime_module -- if hasattr(module, '_pydatetime'): -- sys.modules['_pydatetime'] = module._pydatetime -- sys.modules['_strptime'] = module._strptime -- @classmethod -- def tearDownClass(cls_): -- sys.modules.clear() -- sys.modules.update(cls_._save_sys_modules) -- cls.setUpClass = setUpClass -- cls.tearDownClass = tearDownClass -- tests.addTests(loader.loadTestsFromTestCase(cls)) -+ -+ @functools.wraps(cls, updated=()) -+ class Wrapper(cls): -+ @classmethod -+ def setUpClass(cls_, module=module): -+ cls_._save_sys_modules = sys.modules.copy() -+ sys.modules[TESTS] = module -+ sys.modules['datetime'] = module.datetime_module -+ if hasattr(module, '_pydatetime'): -+ sys.modules['_pydatetime'] = module._pydatetime -+ sys.modules['_strptime'] = module._strptime -+ super().setUpClass() -+ -+ @classmethod -+ def tearDownClass(cls_): -+ super().tearDownClass() -+ sys.modules.clear() -+ sys.modules.update(cls_._save_sys_modules) -+ -+ tests.addTests(loader.loadTestsFromTestCase(Wrapper)) - return tests - - -diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py -index fbc6ce8282d..dfe17d4724c 100644 ---- a/Lib/test/test_dict.py -+++ b/Lib/test/test_dict.py -@@ -1476,6 +1476,24 @@ - gc.collect() - self.assertTrue(gc.is_tracked(next(it))) - -+ def test_store_evilattr(self): -+ class EvilAttr: -+ def __init__(self, d): -+ self.d = d -+ -+ def __del__(self): -+ if 'attr' in self.d: -+ del self.d['attr'] -+ gc.collect() -+ -+ class Obj: -+ pass -+ -+ obj = Obj() -+ obj.__dict__ = {} -+ for _ in range(10): -+ obj.attr = EvilAttr(obj.__dict__) -+ - def test_str_nonstr(self): - # cpython uses a different lookup function if the dict only contains - # `str` keys. Make sure the unoptimized path is used when a non-`str` -diff --git a/Lib/test/test_dictcomps.py b/Lib/test/test_dictcomps.py -index 472e3dfa0d8..26b56dac503 100644 ---- a/Lib/test/test_dictcomps.py -+++ b/Lib/test/test_dictcomps.py -@@ -1,5 +1,8 @@ -+import traceback import unittest - -+from test.support import BrokenIter -+ - # For scope testing. - g = "Global variable" - -@@ -127,6 +130,41 @@ - self.assertEqual({i: i*i for i in [*range(4)]}, expected) - self.assertEqual({i: i*i for i in (*range(4),)}, expected) - -+ def test_exception_locations(self): -+ # The location of an exception raised from __init__ or -+ # __next__ should should be the iterator expression -+ def init_raises(): -+ try: -+ {x:x for x in BrokenIter(init_raises=True)} -+ except Exception as e: -+ return e -+ -+ def next_raises(): -+ try: -+ {x:x for x in BrokenIter(next_raises=True)} -+ except Exception as e: -+ return e -+ -+ def iter_raises(): -+ try: -+ {x:x for x in BrokenIter(iter_raises=True)} -+ except Exception as e: -+ return e -+ -+ for func, expected in [(init_raises, "BrokenIter(init_raises=True)"), -+ (next_raises, "BrokenIter(next_raises=True)"), -+ (iter_raises, "BrokenIter(iter_raises=True)"), -+ ]: -+ with self.subTest(func): -+ exc = func() -+ f = traceback.extract_tb(exc.__traceback__)[0] -+ indent = 16 -+ co = func.__code__ -+ self.assertEqual(f.lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.end_lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.line[f.colno - indent : f.end_colno - indent], -+ expected) -+ - - if __name__ == "__main__": - unittest.main() -diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py -index fc8d87974e6..ef8aa0d53c5 100644 ---- a/Lib/test/test_email/test_email.py -+++ b/Lib/test/test_email/test_email.py -@@ -16,6 +16,7 @@ - - import email - import email.policy -+import email.utils - - from email.charset import Charset - from email.generator import Generator, DecodedGenerator, BytesGenerator -@@ -3352,15 +3353,137 @@ - ], - ) - -+ def test_parsing_errors(self): -+ """Test for parsing errors from CVE-2023-27043 and CVE-2019-16056""" -+ alice = 'alice@example.org' -+ bob = 'bob@example.com' -+ empty = ('', '') -+ -+ # Test utils.getaddresses() and utils.parseaddr() on malformed email -+ # addresses: default behavior (strict=True) rejects malformed address, -+ # and strict=False which tolerates malformed address. -+ for invalid_separator, expected_non_strict in ( -+ ('(', [(f'<{bob}>', alice)]), -+ (')', [('', alice), empty, ('', bob)]), -+ ('<', [('', alice), empty, ('', bob), empty]), -+ ('>', [('', alice), empty, ('', bob)]), -+ ('[', [('', f'{alice}[<{bob}>]')]), -+ (']', [('', alice), empty, ('', bob)]), -+ ('@', [empty, empty, ('', bob)]), -+ (';', [('', alice), empty, ('', bob)]), -+ (':', [('', alice), ('', bob)]), -+ ('.', [('', alice + '.'), ('', bob)]), -+ ('"', [('', alice), ('', f'<{bob}>')]), -+ ): -+ address = f'{alice}{invalid_separator}<{bob}>' -+ with self.subTest(address=address): -+ self.assertEqual(utils.getaddresses([address]), -+ [empty]) -+ self.assertEqual(utils.getaddresses([address], strict=False), -+ expected_non_strict) -+ -+ self.assertEqual(utils.parseaddr([address]), -+ empty) -+ self.assertEqual(utils.parseaddr([address], strict=False), -+ ('', address)) -+ -+ # Comma (',') is treated differently depending on strict parameter. -+ # Comma without quotes. -+ address = f'{alice},<{bob}>' -+ self.assertEqual(utils.getaddresses([address]), -+ [('', alice), ('', bob)]) -+ self.assertEqual(utils.getaddresses([address], strict=False), -+ [('', alice), ('', bob)]) -+ self.assertEqual(utils.parseaddr([address]), -+ empty) -+ self.assertEqual(utils.parseaddr([address], strict=False), -+ ('', address)) -+ -+ # Real name between quotes containing comma. -+ address = '"Alice, alice@example.org" ' -+ expected_strict = ('Alice, alice@example.org', 'bob@example.com') -+ self.assertEqual(utils.getaddresses([address]), [expected_strict]) -+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict]) -+ self.assertEqual(utils.parseaddr([address]), expected_strict) -+ self.assertEqual(utils.parseaddr([address], strict=False), -+ ('', address)) -+ -+ # Valid parenthesis in comments. -+ address = 'alice@example.org (Alice)' -+ expected_strict = ('Alice', 'alice@example.org') -+ self.assertEqual(utils.getaddresses([address]), [expected_strict]) -+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict]) -+ self.assertEqual(utils.parseaddr([address]), expected_strict) -+ self.assertEqual(utils.parseaddr([address], strict=False), -+ ('', address)) -+ -+ # Invalid parenthesis in comments. -+ address = 'alice@example.org )Alice(' -+ self.assertEqual(utils.getaddresses([address]), [empty]) -+ self.assertEqual(utils.getaddresses([address], strict=False), -+ [('', 'alice@example.org'), ('', ''), ('', 'Alice')]) -+ self.assertEqual(utils.parseaddr([address]), empty) -+ self.assertEqual(utils.parseaddr([address], strict=False), -+ ('', address)) -+ -+ # Two addresses with quotes separated by comma. -+ address = '"Jane Doe" , "John Doe" ' -+ self.assertEqual(utils.getaddresses([address]), -+ [('Jane Doe', 'jane@example.net'), -+ ('John Doe', 'john@example.net')]) -+ self.assertEqual(utils.getaddresses([address], strict=False), -+ [('Jane Doe', 'jane@example.net'), -+ ('John Doe', 'john@example.net')]) -+ self.assertEqual(utils.parseaddr([address]), empty) -+ self.assertEqual(utils.parseaddr([address], strict=False), -+ ('', address)) -+ -+ # Test email.utils.supports_strict_parsing attribute -+ self.assertEqual(email.utils.supports_strict_parsing, True) -+ - def test_getaddresses_nasty(self): -- eq = self.assertEqual -- eq(utils.getaddresses(['foo: ;']), [('', '')]) -- eq(utils.getaddresses( -- ['[]*-- =~$']), -- [('', ''), ('', ''), ('', '*--')]) -- eq(utils.getaddresses( -- ['foo: ;', '"Jason R. Mastaler" ']), -- [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]) -+ for addresses, expected in ( -+ (['"Sürname, Firstname" '], -+ [('Sürname, Firstname', 'to@example.com')]), -+ -+ (['foo: ;'], -+ [('', '')]), -+ -+ (['foo: ;', '"Jason R. Mastaler" '], -+ [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]), -+ -+ ([r'Pete(A nice \) chap) '], -+ [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]), -+ -+ (['(Empty list)(start)Undisclosed recipients :(nobody(I know))'], -+ [('', '')]), -+ -+ (['Mary <@machine.tld:mary@example.net>, , jdoe@test . example'], -+ [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]), -+ -+ (['John Doe '], -+ [('John Doe (comment)', 'jdoe@machine.example')]), -+ -+ (['"Mary Smith: Personal Account" '], -+ [('Mary Smith: Personal Account', 'smith@home.example')]), -+ -+ (['Undisclosed recipients:;'], -+ [('', '')]), -+ -+ ([r', "Giant; \"Big\" Box" '], -+ [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]), -+ ): -+ with self.subTest(addresses=addresses): -+ self.assertEqual(utils.getaddresses(addresses), -+ expected) -+ self.assertEqual(utils.getaddresses(addresses, strict=False), -+ expected) -+ -+ addresses = ['[]*-- =~$'] -+ self.assertEqual(utils.getaddresses(addresses), -+ [('', '')]) -+ self.assertEqual(utils.getaddresses(addresses, strict=False), -+ [('', ''), ('', ''), ('', '*--')]) - - def test_getaddresses_embedded_comment(self): - """Test proper handling of a nested comment""" -@@ -3551,6 +3674,54 @@ - m = cls(*constructor, policy=email.policy.default) - self.assertIs(m.policy, email.policy.default) - -+ def test_iter_escaped_chars(self): -+ self.assertEqual(list(utils._iter_escaped_chars(r'a\\b\"c\\"d')), -+ [(0, 'a'), -+ (2, '\\\\'), -+ (3, 'b'), -+ (5, '\\"'), -+ (6, 'c'), -+ (8, '\\\\'), -+ (9, '"'), -+ (10, 'd')]) -+ self.assertEqual(list(utils._iter_escaped_chars('a\\')), -+ [(0, 'a'), (1, '\\')]) -+ -+ def test_strip_quoted_realnames(self): -+ def check(addr, expected): -+ self.assertEqual(utils._strip_quoted_realnames(addr), expected) -+ -+ check('"Jane Doe" , "John Doe" ', -+ ' , ') -+ check(r'"Jane \"Doe\"." ', -+ ' ') -+ -+ # special cases -+ check(r'before"name"after', 'beforeafter') -+ check(r'before"name"', 'before') -+ check(r'b"name"', 'b') # single char -+ check(r'"name"after', 'after') -+ check(r'"name"a', 'a') # single char -+ check(r'"name"', '') -+ -+ # no change -+ for addr in ( -+ 'Jane Doe , John Doe ', -+ 'lone " quote', -+ ): -+ self.assertEqual(utils._strip_quoted_realnames(addr), addr) -+ -+ -+ def test_check_parenthesis(self): -+ addr = 'alice@example.net' -+ self.assertTrue(utils._check_parenthesis(f'{addr} (Alice)')) -+ self.assertFalse(utils._check_parenthesis(f'{addr} )Alice(')) -+ self.assertFalse(utils._check_parenthesis(f'{addr} (Alice))')) -+ self.assertFalse(utils._check_parenthesis(f'{addr} ((Alice)')) -+ -+ # Ignore real name between quotes -+ self.assertTrue(utils._check_parenthesis(f'")Alice((" {addr}')) -+ - - # Test the iterator/generators - class TestIterators(TestEmailBase): -diff --git a/Lib/test/test_email/test_generator.py b/Lib/test/test_email/test_generator.py -index bfff1051262..c75a842c335 100644 ---- a/Lib/test/test_email/test_generator.py -+++ b/Lib/test/test_email/test_generator.py -@@ -6,6 +6,7 @@ - from email.generator import Generator, BytesGenerator - from email.headerregistry import Address - from email import policy -+import email.errors - from test.test_email import TestEmailBase, parameterize - - -@@ -249,6 +250,44 @@ - g.flatten(msg) - self.assertEqual(s.getvalue(), self.typ(expected)) - -+ def test_keep_encoded_newlines(self): -+ msg = self.msgmaker(self.typ(textwrap.dedent("""\ -+ To: nobody -+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com -+ -+ None -+ """))) -+ expected = textwrap.dedent("""\ -+ To: nobody -+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com -+ -+ None -+ """) -+ s = self.ioclass() -+ g = self.genclass(s, policy=self.policy.clone(max_line_length=80)) -+ g.flatten(msg) -+ self.assertEqual(s.getvalue(), self.typ(expected)) -+ -+ def test_keep_long_encoded_newlines(self): -+ msg = self.msgmaker(self.typ(textwrap.dedent("""\ -+ To: nobody -+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com -+ -+ None -+ """))) -+ expected = textwrap.dedent("""\ -+ To: nobody -+ Subject: Bad subject -+ =?utf-8?q?=0A?=Bcc: -+ injection@example.com -+ -+ None -+ """) -+ s = self.ioclass() -+ g = self.genclass(s, policy=self.policy.clone(max_line_length=30)) -+ g.flatten(msg) -+ self.assertEqual(s.getvalue(), self.typ(expected)) -+ - - class TestGenerator(TestGeneratorBase, TestEmailBase): - -@@ -273,6 +312,29 @@ - g.flatten(msg) - self.assertEqual(s.getvalue(), self.typ(expected)) - -+ def test_verify_generated_headers(self): -+ """gh-121650: by default the generator prevents header injection""" -+ class LiteralHeader(str): -+ name = 'Header' -+ def fold(self, **kwargs): -+ return self -+ -+ for text in ( -+ 'Value\r\nBad Injection\r\n', -+ 'NoNewLine' -+ ): -+ with self.subTest(text=text): -+ message = message_from_string( -+ "Header: Value\r\n\r\nBody", -+ policy=self.policy, -+ ) -+ -+ del message['Header'] -+ message['Header'] = LiteralHeader(text) -+ -+ with self.assertRaises(email.errors.HeaderWriteError): -+ message.as_string() -+ - - class TestBytesGenerator(TestGeneratorBase, TestEmailBase): - -@@ -294,6 +356,19 @@ - g.flatten(msg) - self.assertEqual(s.getvalue(), expected) - -+ def test_defaults_handle_spaces_when_encoded_words_is_folded_in_middle(self): -+ source = ('A very long long long long long long long long long long long long ' -+ 'long long long long long long long long long long long súmmäry') -+ expected = ('Subject: A very long long long long long long long long long long long long\n' -+ ' long long long long long long long long long long long =?utf-8?q?s=C3=BAmm?=\n' -+ ' =?utf-8?q?=C3=A4ry?=\n\n').encode('ascii') -+ msg = EmailMessage() -+ msg['Subject'] = source -+ s = io.BytesIO() -+ g = BytesGenerator(s) -+ g.flatten(msg) -+ self.assertEqual(s.getvalue(), expected) -+ - def test_defaults_handle_spaces_at_start_of_subject(self): - source = " Уведомление" - expected = b"Subject: =?utf-8?b?0KPQstC10LTQvtC80LvQtdC90LjQtQ==?=\n\n" -diff --git a/Lib/test/test_email/test_policy.py b/Lib/test/test_email/test_policy.py -index c6b9c80efe1..baa35fd68e4 100644 ---- a/Lib/test/test_email/test_policy.py -+++ b/Lib/test/test_email/test_policy.py -@@ -26,6 +26,7 @@ - 'raise_on_defect': False, - 'mangle_from_': True, - 'message_factory': None, -+ 'verify_generated_headers': True, - } - # These default values are the ones set on email.policy.default. - # If any of these defaults change, the docs must be updated. -@@ -294,6 +295,31 @@ - with self.assertRaises(email.errors.HeaderParseError): - policy.fold("Subject", subject) - -+ def test_verify_generated_headers(self): -+ """Turning protection off allows header injection""" -+ policy = email.policy.default.clone(verify_generated_headers=False) -+ for text in ( -+ 'Header: Value\r\nBad: Injection\r\n', -+ 'Header: NoNewLine' -+ ): -+ with self.subTest(text=text): -+ message = email.message_from_string( -+ "Header: Value\r\n\r\nBody", -+ policy=policy, -+ ) -+ class LiteralHeader(str): -+ name = 'Header' -+ def fold(self, **kwargs): -+ return self -+ -+ del message['Header'] -+ message['Header'] = LiteralHeader(text) -+ -+ self.assertEqual( -+ message.as_string(), -+ f"{text}\nBody", -+ ) -+ - # XXX: Need subclassing tests. - # For adding subclassed objects, make sure the usual rules apply (subclass - # wins), but that the order still works (right overrides left). -diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py -index 24617ab24c6..13713cf37b8 100644 ---- a/Lib/test/test_embed.py -+++ b/Lib/test/test_embed.py -@@ -5,6 +5,7 @@ - - from collections import namedtuple - import contextlib -+import io - import json - import os - import os.path -@@ -389,6 +390,70 @@ - out, err = self.run_embedded_interpreter("test_repeated_init_exec", code) - self.assertEqual(out, '9\n' * INIT_LOOPS) - -+ def test_static_types_inherited_slots(self): -+ script = textwrap.dedent(""" -+ import test.support -+ -+ results = {} -+ def add(cls, slot, own): -+ value = getattr(cls, slot) -+ try: -+ subresults = results[cls.__name__] -+ except KeyError: -+ subresults = results[cls.__name__] = {} -+ subresults[slot] = [repr(value), own] -+ -+ for cls in test.support.iter_builtin_types(): -+ for slot, own in test.support.iter_slot_wrappers(cls): -+ add(cls, slot, own) -+ """) -+ -+ ns = {} -+ exec(script, ns, ns) -+ all_expected = ns['results'] -+ del ns -+ -+ script += textwrap.dedent(""" -+ import json -+ import sys -+ text = json.dumps(results) -+ print(text, file=sys.stderr) -+ """) -+ out, err = self.run_embedded_interpreter( -+ "test_repeated_init_exec", script, script) -+ results = err.split('--- Loop #')[1:] -+ results = [res.rpartition(' ---\n')[-1] for res in results] -+ -+ self.maxDiff = None -+ for i, text in enumerate(results, start=1): -+ result = json.loads(text) -+ for classname, expected in all_expected.items(): -+ with self.subTest(loop=i, cls=classname): -+ slots = result.pop(classname) -+ self.assertEqual(slots, expected) -+ self.assertEqual(result, {}) -+ self.assertEqual(out, '') -+ -+ def test_getargs_reset_static_parser(self): -+ # Test _PyArg_Parser initializations via _PyArg_UnpackKeywords() -+ # https://github.com/python/cpython/issues/122334 -+ code = textwrap.dedent(""" -+ try: -+ import _ssl -+ except ModuleNotFoundError: -+ _ssl = None -+ if _ssl is not None: -+ _ssl.txt2obj(txt='1.3') -+ print('1') -+ -+ import _queue -+ _queue.SimpleQueue().put_nowait(item=None) -+ print('2') -+ """) -+ out, err = self.run_embedded_interpreter("test_repeated_init_exec", code) -+ self.assertEqual(out, '1\n2\n' * INIT_LOOPS) -+ -+ - class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): - maxDiff = 4096 - UTF8_MODE_ERRORS = ('surrogatepass' if MS_WINDOWS else 'surrogateescape') -diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py -index ccba0f91c86..7e3952251c7 100644 ---- a/Lib/test/test_enum.py -+++ b/Lib/test/test_enum.py -@@ -1455,6 +1455,27 @@ - spam = nonmember(SpamEnumIsInner) - self.assertTrue(SpamEnum.spam is SpamEnumIsInner) - -+ def test_using_members_as_nonmember(self): -+ class Example(Flag): -+ A = 1 -+ B = 2 -+ ALL = nonmember(A | B) -+ -+ self.assertEqual(Example.A.value, 1) -+ self.assertEqual(Example.B.value, 2) -+ self.assertEqual(Example.ALL, 3) -+ self.assertIs(type(Example.ALL), int) -+ -+ class Example(Flag): -+ A = auto() -+ B = auto() -+ ALL = nonmember(A | B) -+ -+ self.assertEqual(Example.A.value, 1) -+ self.assertEqual(Example.B.value, 2) -+ self.assertEqual(Example.ALL, 3) -+ self.assertIs(type(Example.ALL), int) -+ - def test_nested_classes_in_enum_with_member(self): - """Support locally-defined nested classes.""" - class Outer(Enum): -diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py -index b738ec6a032..b2a24ca2186 100644 ---- a/Lib/test/test_exceptions.py -+++ b/Lib/test/test_exceptions.py -@@ -1820,6 +1820,8 @@ - except self.failureException: - with support.captured_stderr() as err: - sys.__excepthook__(*sys.exc_info()) -+ else: -+ self.fail("assertRaisesRegex should have failed.") - - self.assertIn("aab", err.getvalue()) - -diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py -index d0473500a17..dc817d93bc5 100644 ---- a/Lib/test/test_faulthandler.py -+++ b/Lib/test/test_faulthandler.py -@@ -236,7 +236,7 @@ - faulthandler._sigfpe() - """, - 3, -- 'Floating point exception') -+ 'Floating-point exception') - - @unittest.skipIf(_testcapi is None, 'need _testcapi') - @unittest.skipUnless(hasattr(signal, 'SIGBUS'), 'need signal.SIGBUS') -diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py -index 203dd6fe57d..6d734d05245 100644 ---- a/Lib/test/test_fcntl.py -+++ b/Lib/test/test_fcntl.py -@@ -6,7 +6,9 @@ - import struct - import sys - import unittest --from test.support import verbose, cpython_only, get_pagesize -+from test.support import ( -+ cpython_only, get_pagesize, is_apple, requires_subprocess, verbose -+) - from test.support.import_helper import import_module - from test.support.os_helper import TESTFN, unlink +-from test.support import verbose, cpython_only, get_pagesize ++from test.support import ( ++ cpython_only, get_pagesize, is_apple, requires_subprocess, verbose ++) + from test.support.import_helper import import_module + from test.support.os_helper import TESTFN, unlink @@ -56,8 +58,10 @@ else: @@ -25225,2245 +2356,6 @@ index 203dd6fe57d..6d734d05245 100644 def test_lockf_share(self): self.f = open(TESTFN, 'wb+') cmd = fcntl.LOCK_SH | fcntl.LOCK_NB -diff --git a/Lib/test/test_filecmp.py b/Lib/test/test_filecmp.py -index 9b5ac12bccc..41d6059f8c5 100644 ---- a/Lib/test/test_filecmp.py -+++ b/Lib/test/test_filecmp.py -@@ -111,6 +111,39 @@ - (['file'], ['file2'], []), - "Comparing mismatched directories fails") - -+ def test_cmpfiles_invalid_names(self): -+ # See https://github.com/python/cpython/issues/122400. -+ for file, desc in [ -+ ('\x00', 'NUL bytes filename'), -+ (__file__ + '\x00', 'filename with embedded NUL bytes'), -+ ("\uD834\uDD1E.py", 'surrogate codes (MUSICAL SYMBOL G CLEF)'), -+ ('a' * 1_000_000, 'very long filename'), -+ ]: -+ for other_dir in [self.dir, self.dir_same, self.dir_diff]: -+ with self.subTest(f'cmpfiles: {desc}', other_dir=other_dir): -+ res = filecmp.cmpfiles(self.dir, other_dir, [file]) -+ self.assertTupleEqual(res, ([], [], [file])) -+ -+ def test_dircmp_invalid_names(self): -+ for bad_dir, desc in [ -+ ('\x00', 'NUL bytes dirname'), -+ (f'Top{os.sep}Mid\x00', 'dirname with embedded NUL bytes'), -+ ("\uD834\uDD1E", 'surrogate codes (MUSICAL SYMBOL G CLEF)'), -+ ('a' * 1_000_000, 'very long dirname'), -+ ]: -+ d1 = filecmp.dircmp(self.dir, bad_dir) -+ d2 = filecmp.dircmp(bad_dir, self.dir) -+ for target in [ -+ # attributes where os.listdir() raises OSError or ValueError -+ 'left_list', 'right_list', -+ 'left_only', 'right_only', 'common', -+ ]: -+ with self.subTest(f'dircmp(ok, bad): {desc}', target=target): -+ with self.assertRaises((OSError, ValueError)): -+ getattr(d1, target) -+ with self.subTest(f'dircmp(bad, ok): {desc}', target=target): -+ with self.assertRaises((OSError, ValueError)): -+ getattr(d2, target) - - def _assert_lists(self, actual, expected): - """Assert that two lists are equal, up to ordering.""" -diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py -index 6fa49dbc0b7..5998ee55b97 100644 ---- a/Lib/test/test_format.py -+++ b/Lib/test/test_format.py -@@ -35,7 +35,7 @@ - # when 'limit' is specified, it determines how many characters - # must match exactly; lengths must always match. - # ex: limit=5, '12345678' matches '12345___' -- # (mainly for floating point format tests for which an exact match -+ # (mainly for floating-point format tests for which an exact match - # can't be guaranteed due to rounding and representation errors) - elif output and limit is not None and ( - len(result)!=len(output) or result[:limit]!=output[:limit]): -diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py -index bbfb1b2703a..53200eb7c9c 100644 ---- a/Lib/test/test_fractions.py -+++ b/Lib/test/test_fractions.py -@@ -922,21 +922,21 @@ - self.assertTypedEquals(Root(4) ** F(2, 1), Root(4, F(1))) - self.assertTypedEquals(Root(4) ** F(-2, 1), Root(4, -F(1))) - self.assertTypedEquals(Root(4) ** F(-2, 3), Root(4, -3.0)) -- self.assertEqual(F(3, 2) ** SymbolicReal('X'), SymbolicReal('1.5 ** X')) -+ self.assertEqual(F(3, 2) ** SymbolicReal('X'), SymbolicReal('3/2 ** X')) - self.assertEqual(SymbolicReal('X') ** F(3, 2), SymbolicReal('X ** 1.5')) - -- self.assertTypedEquals(F(3, 2) ** Rect(2, 0), Polar(2.25, 0.0)) -- self.assertTypedEquals(F(1, 1) ** Rect(2, 3), Polar(1.0, 0.0)) -+ self.assertTypedEquals(F(3, 2) ** Rect(2, 0), Polar(F(9,4), 0.0)) -+ self.assertTypedEquals(F(1, 1) ** Rect(2, 3), Polar(F(1), 0.0)) - self.assertTypedEquals(F(3, 2) ** RectComplex(2, 0), Polar(2.25, 0.0)) - self.assertTypedEquals(F(1, 1) ** RectComplex(2, 3), Polar(1.0, 0.0)) - self.assertTypedEquals(Polar(4, 2) ** F(3, 2), Polar(8.0, 3.0)) - self.assertTypedEquals(Polar(4, 2) ** F(3, 1), Polar(64, 6)) - self.assertTypedEquals(Polar(4, 2) ** F(-3, 1), Polar(0.015625, -6)) - self.assertTypedEquals(Polar(4, 2) ** F(-3, 2), Polar(0.125, -3.0)) -- self.assertEqual(F(3, 2) ** SymbolicComplex('X'), SymbolicComplex('1.5 ** X')) -+ self.assertEqual(F(3, 2) ** SymbolicComplex('X'), SymbolicComplex('3/2 ** X')) - self.assertEqual(SymbolicComplex('X') ** F(3, 2), SymbolicComplex('X ** 1.5')) - -- self.assertEqual(F(3, 2) ** Symbolic('X'), Symbolic('1.5 ** X')) -+ self.assertEqual(F(3, 2) ** Symbolic('X'), Symbolic('3/2 ** X')) - self.assertEqual(Symbolic('X') ** F(3, 2), Symbolic('X ** 1.5')) - - def testMixingWithDecimal(self): -diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py -index fb364e9c684..7fc37023828 100644 ---- a/Lib/test/test_fstring.py -+++ b/Lib/test/test_fstring.py -@@ -8,6 +8,7 @@ - # Unicode identifiers in tests is allowed by PEP 3131. - - import ast -+import datetime - import os - import re - import types -@@ -18,7 +19,7 @@ - from test.support.os_helper import temp_cwd - from test.support.script_helper import assert_python_failure, assert_python_ok - --a_global = 'global variable' -+a_global = "global variable" - - # You could argue that I'm too strict in looking for specific error - # values with assertRaisesRegex, but without it it's way too easy to -@@ -27,6 +28,7 @@ - # worthwhile tradeoff. When I switched to this method, I found many - # examples where I wasn't testing what I thought I was. - -+ - class TestCase(unittest.TestCase): - def assertAllRaise(self, exception_type, regex, error_strings): - for str in error_strings: -@@ -38,43 +40,45 @@ - # Make sure __format__ is looked up on the type, not the instance. - class X: - def __format__(self, spec): -- return 'class' -+ return "class" - - x = X() - - # Add a bound __format__ method to the 'y' instance, but not - # the 'x' instance. - y = X() -- y.__format__ = types.MethodType(lambda self, spec: 'instance', y) -+ y.__format__ = types.MethodType(lambda self, spec: "instance", y) - -- self.assertEqual(f'{y}', format(y)) -- self.assertEqual(f'{y}', 'class') -+ self.assertEqual(f"{y}", format(y)) -+ self.assertEqual(f"{y}", "class") - self.assertEqual(format(x), format(y)) - - # __format__ is not called this way, but still make sure it - # returns what we expect (so we can make sure we're bypassing - # it). -- self.assertEqual(x.__format__(''), 'class') -- self.assertEqual(y.__format__(''), 'instance') -+ self.assertEqual(x.__format__(""), "class") -+ self.assertEqual(y.__format__(""), "instance") - - # This is how __format__ is actually called. -- self.assertEqual(type(x).__format__(x, ''), 'class') -- self.assertEqual(type(y).__format__(y, ''), 'class') -+ self.assertEqual(type(x).__format__(x, ""), "class") -+ self.assertEqual(type(y).__format__(y, ""), "class") - - def test_ast(self): - # Inspired by http://bugs.python.org/issue24975 - class X: - def __init__(self): - self.called = False -+ - def __call__(self): - self.called = True - return 4 -+ - x = X() - expr = """ - a = 10 - f'{a * x()}'""" - t = ast.parse(expr) -- c = compile(t, '', 'exec') -+ c = compile(t, "", "exec") - - # Make sure x was not called. - self.assertFalse(x.called) -@@ -280,7 +284,6 @@ - self.assertEqual(binop.right.col_offset, 27) - - def test_ast_numbers_fstring_with_formatting(self): -- - t = ast.parse('f"Here is that pesky {xxx:.3f} again"') - self.assertEqual(len(t.body), 1) - self.assertEqual(t.body[0].lineno, 1) -@@ -436,24 +439,12 @@ - x, y = t.body - - # Check the single quoted string offsets first. -- offsets = [ -- (elt.col_offset, elt.end_col_offset) -- for elt in x.value.elts -- ] -- self.assertTrue(all( -- offset == (4, 10) -- for offset in offsets -- )) -+ offsets = [(elt.col_offset, elt.end_col_offset) for elt in x.value.elts] -+ self.assertTrue(all(offset == (4, 10) for offset in offsets)) - - # Check the triple quoted string offsets. -- offsets = [ -- (elt.col_offset, elt.end_col_offset) -- for elt in y.value.elts -- ] -- self.assertTrue(all( -- offset == (4, 14) -- for offset in offsets -- )) -+ offsets = [(elt.col_offset, elt.end_col_offset) for elt in y.value.elts] -+ self.assertTrue(all(offset == (4, 14) for offset in offsets)) - - expr = """ - x = ( -@@ -516,463 +507,573 @@ - - def test_docstring(self): - def f(): -- f'''Not a docstring''' -+ f"""Not a docstring""" -+ - self.assertIsNone(f.__doc__) -+ - def g(): -- '''Not a docstring''' \ -- f'' -+ """Not a docstring""" f"" -+ - self.assertIsNone(g.__doc__) - - def test_literal_eval(self): -- with self.assertRaisesRegex(ValueError, 'malformed node or string'): -+ with self.assertRaisesRegex(ValueError, "malformed node or string"): - ast.literal_eval("f'x'") - - def test_ast_compile_time_concat(self): -- x = [''] -+ x = [""] - - expr = """x[0] = 'foo' f'{3}'""" - t = ast.parse(expr) -- c = compile(t, '', 'exec') -+ c = compile(t, "", "exec") - exec(c) -- self.assertEqual(x[0], 'foo3') -+ self.assertEqual(x[0], "foo3") - - def test_compile_time_concat_errors(self): -- self.assertAllRaise(SyntaxError, -- 'cannot mix bytes and nonbytes literals', -- [r"""f'' b''""", -- r"""b'' f''""", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ "cannot mix bytes and nonbytes literals", -+ [ -+ r"""f'' b''""", -+ r"""b'' f''""", -+ ], -+ ) - - def test_literal(self): -- self.assertEqual(f'', '') -- self.assertEqual(f'a', 'a') -- self.assertEqual(f' ', ' ') -+ self.assertEqual(f"", "") -+ self.assertEqual(f"a", "a") -+ self.assertEqual(f" ", " ") - - def test_unterminated_string(self): -- self.assertAllRaise(SyntaxError, 'unterminated string', -- [r"""f'{"x'""", -- r"""f'{"x}'""", -- r"""f'{("x'""", -- r"""f'{("x}'""", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ "unterminated string", -+ [ -+ r"""f'{"x'""", -+ r"""f'{"x}'""", -+ r"""f'{("x'""", -+ r"""f'{("x}'""", -+ ], -+ ) - - @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI") - def test_mismatched_parens(self): -- self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' " -- r"does not match opening parenthesis '\('", -- ["f'{((}'", -- ]) -- self.assertAllRaise(SyntaxError, r"closing parenthesis '\)' " -- r"does not match opening parenthesis '\['", -- ["f'{a[4)}'", -- ]) -- self.assertAllRaise(SyntaxError, r"closing parenthesis '\]' " -- r"does not match opening parenthesis '\('", -- ["f'{a(4]}'", -- ]) -- self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' " -- r"does not match opening parenthesis '\['", -- ["f'{a[4}'", -- ]) -- self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' " -- r"does not match opening parenthesis '\('", -- ["f'{a(4}'", -- ]) -- self.assertRaises(SyntaxError, eval, "f'{" + "("*500 + "}'") -+ self.assertAllRaise( -+ SyntaxError, -+ r"closing parenthesis '\}' " r"does not match opening parenthesis '\('", -+ [ -+ "f'{((}'", -+ ], -+ ) -+ self.assertAllRaise( -+ SyntaxError, -+ r"closing parenthesis '\)' " r"does not match opening parenthesis '\['", -+ [ -+ "f'{a[4)}'", -+ ], -+ ) -+ self.assertAllRaise( -+ SyntaxError, -+ r"closing parenthesis '\]' " r"does not match opening parenthesis '\('", -+ [ -+ "f'{a(4]}'", -+ ], -+ ) -+ self.assertAllRaise( -+ SyntaxError, -+ r"closing parenthesis '\}' " r"does not match opening parenthesis '\['", -+ [ -+ "f'{a[4}'", -+ ], -+ ) -+ self.assertAllRaise( -+ SyntaxError, -+ r"closing parenthesis '\}' " r"does not match opening parenthesis '\('", -+ [ -+ "f'{a(4}'", -+ ], -+ ) -+ self.assertRaises(SyntaxError, eval, "f'{" + "(" * 500 + "}'") - - @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI") - def test_fstring_nested_too_deeply(self): -- self.assertAllRaise(SyntaxError, -- "f-string: expressions nested too deeply", -- ['f"{1+2:{1+2:{1+1:{1}}}}"']) -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expressions nested too deeply", -+ ['f"{1+2:{1+2:{1+1:{1}}}}"'], -+ ) - - def create_nested_fstring(n): - if n == 0: - return "1+1" -- prev = create_nested_fstring(n-1) -+ prev = create_nested_fstring(n - 1) - return f'f"{{{prev}}}"' - -- self.assertAllRaise(SyntaxError, -- "too many nested f-strings", -- [create_nested_fstring(160)]) -+ self.assertAllRaise( -+ SyntaxError, "too many nested f-strings", [create_nested_fstring(160)] -+ ) - - def test_syntax_error_in_nested_fstring(self): - # See gh-104016 for more information on this crash -- self.assertAllRaise(SyntaxError, -- "invalid syntax", -- ['f"{1 1:' + ('{f"1:' * 199)]) -+ self.assertAllRaise( -+ SyntaxError, "invalid syntax", ['f"{1 1:' + ('{f"1:' * 199)] -+ ) - - def test_double_braces(self): -- self.assertEqual(f'{{', '{') -- self.assertEqual(f'a{{', 'a{') -- self.assertEqual(f'{{b', '{b') -- self.assertEqual(f'a{{b', 'a{b') -- self.assertEqual(f'}}', '}') -- self.assertEqual(f'a}}', 'a}') -- self.assertEqual(f'}}b', '}b') -- self.assertEqual(f'a}}b', 'a}b') -- self.assertEqual(f'{{}}', '{}') -- self.assertEqual(f'a{{}}', 'a{}') -- self.assertEqual(f'{{b}}', '{b}') -- self.assertEqual(f'{{}}c', '{}c') -- self.assertEqual(f'a{{b}}', 'a{b}') -- self.assertEqual(f'a{{}}c', 'a{}c') -- self.assertEqual(f'{{b}}c', '{b}c') -- self.assertEqual(f'a{{b}}c', 'a{b}c') -- -- self.assertEqual(f'{{{10}', '{10') -- self.assertEqual(f'}}{10}', '}10') -- self.assertEqual(f'}}{{{10}', '}{10') -- self.assertEqual(f'}}a{{{10}', '}a{10') -- -- self.assertEqual(f'{10}{{', '10{') -- self.assertEqual(f'{10}}}', '10}') -- self.assertEqual(f'{10}}}{{', '10}{') -- self.assertEqual(f'{10}}}a{{' '}', '10}a{}') -+ self.assertEqual(f"{{", "{") -+ self.assertEqual(f"a{{", "a{") -+ self.assertEqual(f"{{b", "{b") -+ self.assertEqual(f"a{{b", "a{b") -+ self.assertEqual(f"}}", "}") -+ self.assertEqual(f"a}}", "a}") -+ self.assertEqual(f"}}b", "}b") -+ self.assertEqual(f"a}}b", "a}b") -+ self.assertEqual(f"{{}}", "{}") -+ self.assertEqual(f"a{{}}", "a{}") -+ self.assertEqual(f"{{b}}", "{b}") -+ self.assertEqual(f"{{}}c", "{}c") -+ self.assertEqual(f"a{{b}}", "a{b}") -+ self.assertEqual(f"a{{}}c", "a{}c") -+ self.assertEqual(f"{{b}}c", "{b}c") -+ self.assertEqual(f"a{{b}}c", "a{b}c") -+ -+ self.assertEqual(f"{{{10}", "{10") -+ self.assertEqual(f"}}{10}", "}10") -+ self.assertEqual(f"}}{{{10}", "}{10") -+ self.assertEqual(f"}}a{{{10}", "}a{10") -+ -+ self.assertEqual(f"{10}{{", "10{") -+ self.assertEqual(f"{10}}}", "10}") -+ self.assertEqual(f"{10}}}{{", "10}{") -+ self.assertEqual(f"{10}}}a{{" "}", "10}a{}") - - # Inside of strings, don't interpret doubled brackets. -- self.assertEqual(f'{"{{}}"}', '{{}}') -+ self.assertEqual(f'{"{{}}"}', "{{}}") - -- self.assertAllRaise(TypeError, 'unhashable type', -- ["f'{ {{}} }'", # dict in a set -- ]) -+ self.assertAllRaise( -+ TypeError, -+ "unhashable type", -+ [ -+ "f'{ {{}} }'", # dict in a set -+ ], -+ ) - - def test_compile_time_concat(self): -- x = 'def' -- self.assertEqual('abc' f'## {x}ghi', 'abc## defghi') -- self.assertEqual('abc' f'{x}' 'ghi', 'abcdefghi') -- self.assertEqual('abc' f'{x}' 'gh' f'i{x:4}', 'abcdefghidef ') -- self.assertEqual('{x}' f'{x}', '{x}def') -- self.assertEqual('{x' f'{x}', '{xdef') -- self.assertEqual('{x}' f'{x}', '{x}def') -- self.assertEqual('{{x}}' f'{x}', '{{x}}def') -- self.assertEqual('{{x' f'{x}', '{{xdef') -- self.assertEqual('x}}' f'{x}', 'x}}def') -- self.assertEqual(f'{x}' 'x}}', 'defx}}') -- self.assertEqual(f'{x}' '', 'def') -- self.assertEqual('' f'{x}' '', 'def') -- self.assertEqual('' f'{x}', 'def') -- self.assertEqual(f'{x}' '2', 'def2') -- self.assertEqual('1' f'{x}' '2', '1def2') -- self.assertEqual('1' f'{x}', '1def') -- self.assertEqual(f'{x}' f'-{x}', 'def-def') -- self.assertEqual('' f'', '') -- self.assertEqual('' f'' '', '') -- self.assertEqual('' f'' '' f'', '') -- self.assertEqual(f'', '') -- self.assertEqual(f'' '', '') -- self.assertEqual(f'' '' f'', '') -- self.assertEqual(f'' '' f'' '', '') -+ x = "def" -+ self.assertEqual("abc" f"## {x}ghi", "abc## defghi") -+ self.assertEqual("abc" f"{x}" "ghi", "abcdefghi") -+ self.assertEqual("abc" f"{x}" "gh" f"i{x:4}", "abcdefghidef ") -+ self.assertEqual("{x}" f"{x}", "{x}def") -+ self.assertEqual("{x" f"{x}", "{xdef") -+ self.assertEqual("{x}" f"{x}", "{x}def") -+ self.assertEqual("{{x}}" f"{x}", "{{x}}def") -+ self.assertEqual("{{x" f"{x}", "{{xdef") -+ self.assertEqual("x}}" f"{x}", "x}}def") -+ self.assertEqual(f"{x}" "x}}", "defx}}") -+ self.assertEqual(f"{x}" "", "def") -+ self.assertEqual("" f"{x}" "", "def") -+ self.assertEqual("" f"{x}", "def") -+ self.assertEqual(f"{x}" "2", "def2") -+ self.assertEqual("1" f"{x}" "2", "1def2") -+ self.assertEqual("1" f"{x}", "1def") -+ self.assertEqual(f"{x}" f"-{x}", "def-def") -+ self.assertEqual("" f"", "") -+ self.assertEqual("" f"" "", "") -+ self.assertEqual("" f"" "" f"", "") -+ self.assertEqual(f"", "") -+ self.assertEqual(f"" "", "") -+ self.assertEqual(f"" "" f"", "") -+ self.assertEqual(f"" "" f"" "", "") - - # This is not really [f'{'] + [f'}'] since we treat the inside - # of braces as a purely new context, so it is actually f'{ and - # then eval(' f') (a valid expression) and then }' which would - # constitute a valid f-string. -- self.assertEqual(f'{' f'}', ' f') -+ self.assertEqual(f'{' f'}', " f") - -- self.assertAllRaise(SyntaxError, "expecting '}'", -- ['''f'{3' f"}"''', # can't concat to get a valid f-string -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ "expecting '}'", -+ [ -+ '''f'{3' f"}"''', # can't concat to get a valid f-string -+ ], -+ ) - - def test_comments(self): - # These aren't comments, since they're in strings. -- d = {'#': 'hash'} -- self.assertEqual(f'{"#"}', '#') -- self.assertEqual(f'{d["#"]}', 'hash') -- -- self.assertAllRaise(SyntaxError, "'{' was never closed", -- ["f'{1#}'", # error because everything after '#' is a comment -- "f'{#}'", -- "f'one: {1#}'", -- "f'{1# one} {2 this is a comment still#}'", -- ]) -- self.assertAllRaise(SyntaxError, r"f-string: unmatched '\)'", -- ["f'{)#}'", # When wrapped in parens, this becomes -- # '()#)'. Make sure that doesn't compile. -- ]) -- self.assertEqual(f'''A complex trick: { -+ d = {"#": "hash"} -+ self.assertEqual(f'{"#"}', "#") -+ self.assertEqual(f'{d["#"]}', "hash") -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "'{' was never closed", -+ [ -+ "f'{1#}'", # error because everything after '#' is a comment -+ "f'{#}'", -+ "f'one: {1#}'", -+ "f'{1# one} {2 this is a comment still#}'", -+ ], -+ ) -+ self.assertAllRaise( -+ SyntaxError, -+ r"f-string: unmatched '\)'", -+ [ -+ "f'{)#}'", # When wrapped in parens, this becomes -+ # '()#)'. Make sure that doesn't compile. -+ ], -+ ) -+ self.assertEqual( -+ f"""A complex trick: { - 2 # two --}''', 'A complex trick: 2') -- self.assertEqual(f''' -+}""", -+ "A complex trick: 2", -+ ) -+ self.assertEqual( -+ f""" - { - 40 # fourty - + # plus - 2 # two --}''', '\n42') -- self.assertEqual(f''' -+}""", -+ "\n42", -+ ) -+ self.assertEqual( -+ f""" - { - 40 # fourty - + # plus - 2 # two --}''', '\n42') -+}""", -+ "\n42", -+ ) - -- self.assertEqual(f''' -+ self.assertEqual( -+ f""" - # this is not a comment - { # the following operation it's - 3 # this is a number --* 2}''', '\n# this is not a comment\n6') -- self.assertEqual(f''' -+* 2}""", -+ "\n# this is not a comment\n6", -+ ) -+ self.assertEqual( -+ f""" - {# f'a {comment}' - 86 # constant - # nothing more --}''', '\n86') -- -- self.assertAllRaise(SyntaxError, r"f-string: valid expression required before '}'", -- ["""f''' -+}""", -+ "\n86", -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ r"f-string: valid expression required before '}'", -+ [ -+ """f''' - { - # only a comment - }''' --""", # this is equivalent to f'{}' -- ]) -+""", # this is equivalent to f'{}' -+ ], -+ ) - - def test_many_expressions(self): - # Create a string with many expressions in it. Note that - # because we have a space in here as a literal, we're actually - # going to use twice as many ast nodes: one for each literal - # plus one for each expression. -- def build_fstr(n, extra=''): -- return "f'" + ('{x} ' * n) + extra + "'" -+ def build_fstr(n, extra=""): -+ return "f'" + ("{x} " * n) + extra + "'" - -- x = 'X' -+ x = "X" - width = 1 - - # Test around 256. - for i in range(250, 260): -- self.assertEqual(eval(build_fstr(i)), (x+' ')*i) -+ self.assertEqual(eval(build_fstr(i)), (x + " ") * i) - - # Test concatenating 2 largs fstrings. -- self.assertEqual(eval(build_fstr(255)*256), (x+' ')*(255*256)) -+ self.assertEqual(eval(build_fstr(255) * 256), (x + " ") * (255 * 256)) - -- s = build_fstr(253, '{x:{width}} ') -- self.assertEqual(eval(s), (x+' ')*254) -+ s = build_fstr(253, "{x:{width}} ") -+ self.assertEqual(eval(s), (x + " ") * 254) - - # Test lots of expressions and constants, concatenated. - s = "f'{1}' 'x' 'y'" * 1024 -- self.assertEqual(eval(s), '1xy' * 1024) -+ self.assertEqual(eval(s), "1xy" * 1024) - - def test_format_specifier_expressions(self): - width = 10 - precision = 4 -- value = decimal.Decimal('12.34567') -- self.assertEqual(f'result: {value:{width}.{precision}}', 'result: 12.35') -- self.assertEqual(f'result: {value:{width!r}.{precision}}', 'result: 12.35') -- self.assertEqual(f'result: {value:{width:0}.{precision:1}}', 'result: 12.35') -- self.assertEqual(f'result: {value:{1}{0:0}.{precision:1}}', 'result: 12.35') -- self.assertEqual(f'result: {value:{ 1}{ 0:0}.{ precision:1}}', 'result: 12.35') -- self.assertEqual(f'{10:#{1}0x}', ' 0xa') -- self.assertEqual(f'{10:{"#"}1{0}{"x"}}', ' 0xa') -- self.assertEqual(f'{-10:-{"#"}1{0}x}', ' -0xa') -- self.assertEqual(f'{-10:{"-"}#{1}0{"x"}}', ' -0xa') -- self.assertEqual(f'{10:#{3 != {4:5} and width}x}', ' 0xa') -- self.assertEqual(f'result: {value:{width:{0}}.{precision:1}}', 'result: 12.35') -- -- self.assertAllRaise(SyntaxError, "f-string: expecting ':' or '}'", -- ["""f'{"s"!r{":10"}}'""", -- # This looks like a nested format spec. -- ]) -- -- self.assertAllRaise(SyntaxError, -- "f-string: expecting a valid expression after '{'", -- [# Invalid syntax inside a nested spec. -- "f'{4:{/5}}'", -- ]) -- -- self.assertAllRaise(SyntaxError, 'f-string: invalid conversion character', -- [# No expansion inside conversion or for -- # the : or ! itself. -- """f'{"s"!{"r"}}'""", -- ]) -+ value = decimal.Decimal("12.34567") -+ self.assertEqual(f"result: {value:{width}.{precision}}", "result: 12.35") -+ self.assertEqual(f"result: {value:{width!r}.{precision}}", "result: 12.35") -+ self.assertEqual( -+ f"result: {value:{width:0}.{precision:1}}", "result: 12.35" -+ ) -+ self.assertEqual( -+ f"result: {value:{1}{0:0}.{precision:1}}", "result: 12.35" -+ ) -+ self.assertEqual( -+ f"result: {value:{ 1}{ 0:0}.{ precision:1}}", "result: 12.35" -+ ) -+ self.assertEqual(f"{10:#{1}0x}", " 0xa") -+ self.assertEqual(f'{10:{"#"}1{0}{"x"}}', " 0xa") -+ self.assertEqual(f'{-10:-{"#"}1{0}x}', " -0xa") -+ self.assertEqual(f'{-10:{"-"}#{1}0{"x"}}', " -0xa") -+ self.assertEqual(f"{10:#{3 != {4:5} and width}x}", " 0xa") -+ self.assertEqual( -+ f"result: {value:{width:{0}}.{precision:1}}", "result: 12.35" -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expecting ':' or '}'", -+ [ -+ """f'{"s"!r{":10"}}'""", -+ # This looks like a nested format spec. -+ ], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expecting a valid expression after '{'", -+ [ # Invalid syntax inside a nested spec. -+ "f'{4:{/5}}'", -+ ], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: invalid conversion character", -+ [ # No expansion inside conversion or for -+ # the : or ! itself. -+ """f'{"s"!{"r"}}'""", -+ ], -+ ) - - def test_custom_format_specifier(self): - class CustomFormat: - def __format__(self, format_spec): - return format_spec - -- self.assertEqual(f'{CustomFormat():\n}', '\n') -- self.assertEqual(f'{CustomFormat():\u2603}', '☃') -+ self.assertEqual(f"{CustomFormat():\n}", "\n") -+ self.assertEqual(f"{CustomFormat():\u2603}", "☃") - with self.assertWarns(SyntaxWarning): -- exec(r'f"{F():¯\_(ツ)_/¯}"', {'F': CustomFormat}) -+ exec(r'f"{F():¯\_(ツ)_/¯}"', {"F": CustomFormat}) - - def test_side_effect_order(self): - class X: - def __init__(self): - self.i = 0 -+ - def __format__(self, spec): - self.i += 1 - return str(self.i) - - x = X() -- self.assertEqual(f'{x} {x}', '1 2') -+ self.assertEqual(f"{x} {x}", "1 2") - - def test_missing_expression(self): -- self.assertAllRaise(SyntaxError, -- "f-string: valid expression required before '}'", -- ["f'{}'", -- "f'{ }'" -- "f' {} '", -- "f'{10:{ }}'", -- "f' { } '", -- -- # The Python parser ignores also the following -- # whitespace characters in additional to a space. -- "f'''{\t\f\r\n}'''", -- ]) -- -- self.assertAllRaise(SyntaxError, -- "f-string: valid expression required before '!'", -- ["f'{!r}'", -- "f'{ !r}'", -- "f'{!}'", -- "f'''{\t\f\r\n!a}'''", -- -- # Catch empty expression before the -- # missing closing brace. -- "f'{!'", -- "f'{!s:'", -- -- # Catch empty expression before the -- # invalid conversion. -- "f'{!x}'", -- "f'{ !xr}'", -- "f'{!x:}'", -- "f'{!x:a}'", -- "f'{ !xr:}'", -- "f'{ !xr:a}'", -- ]) -- -- self.assertAllRaise(SyntaxError, -- "f-string: valid expression required before ':'", -- ["f'{:}'", -- "f'{ :!}'", -- "f'{:2}'", -- "f'''{\t\f\r\n:a}'''", -- "f'{:'", -- ]) -- -- self.assertAllRaise(SyntaxError, -- "f-string: valid expression required before '='", -- ["f'{=}'", -- "f'{ =}'", -- "f'{ =:}'", -- "f'{ =!}'", -- "f'''{\t\f\r\n=}'''", -- "f'{='", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: valid expression required before '}'", -+ [ -+ "f'{}'", -+ "f'{ }'" "f' {} '", -+ "f'{10:{ }}'", -+ "f' { } '", -+ # The Python parser ignores also the following -+ # whitespace characters in additional to a space. -+ "f'''{\t\f\r\n}'''", -+ ], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: valid expression required before '!'", -+ [ -+ "f'{!r}'", -+ "f'{ !r}'", -+ "f'{!}'", -+ "f'''{\t\f\r\n!a}'''", -+ # Catch empty expression before the -+ # missing closing brace. -+ "f'{!'", -+ "f'{!s:'", -+ # Catch empty expression before the -+ # invalid conversion. -+ "f'{!x}'", -+ "f'{ !xr}'", -+ "f'{!x:}'", -+ "f'{!x:a}'", -+ "f'{ !xr:}'", -+ "f'{ !xr:a}'", -+ ], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: valid expression required before ':'", -+ [ -+ "f'{:}'", -+ "f'{ :!}'", -+ "f'{:2}'", -+ "f'''{\t\f\r\n:a}'''", -+ "f'{:'", -+ "F'{[F'{:'}[F'{:'}]]]", -+ ], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: valid expression required before '='", -+ [ -+ "f'{=}'", -+ "f'{ =}'", -+ "f'{ =:}'", -+ "f'{ =!}'", -+ "f'''{\t\f\r\n=}'''", -+ "f'{='", -+ ], -+ ) - - # Different error message is raised for other whitespace characters. -- self.assertAllRaise(SyntaxError, r"invalid non-printable character U\+00A0", -- ["f'''{\xa0}'''", -- "\xa0", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ r"invalid non-printable character U\+00A0", -+ [ -+ "f'''{\xa0}'''", -+ "\xa0", -+ ], -+ ) - - def test_parens_in_expressions(self): -- self.assertEqual(f'{3,}', '(3,)') -- -- self.assertAllRaise(SyntaxError, -- "f-string: expecting a valid expression after '{'", -- ["f'{,}'", -- ]) -- -- self.assertAllRaise(SyntaxError, r"f-string: unmatched '\)'", -- ["f'{3)+(4}'", -- ]) -+ self.assertEqual(f"{3,}", "(3,)") -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expecting a valid expression after '{'", -+ [ -+ "f'{,}'", -+ ], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ r"f-string: unmatched '\)'", -+ [ -+ "f'{3)+(4}'", -+ ], -+ ) - - def test_newlines_before_syntax_error(self): -- self.assertAllRaise(SyntaxError, -- "f-string: expecting a valid expression after '{'", -- ["f'{.}'", "\nf'{.}'", "\n\nf'{.}'"]) -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expecting a valid expression after '{'", -+ ["f'{.}'", "\nf'{.}'", "\n\nf'{.}'"], -+ ) - - def test_backslashes_in_string_part(self): -- self.assertEqual(f'\t', '\t') -- self.assertEqual(r'\t', '\\t') -- self.assertEqual(rf'\t', '\\t') -- self.assertEqual(f'{2}\t', '2\t') -- self.assertEqual(f'{2}\t{3}', '2\t3') -- self.assertEqual(f'\t{3}', '\t3') -- -- self.assertEqual(f'\u0394', '\u0394') -- self.assertEqual(r'\u0394', '\\u0394') -- self.assertEqual(rf'\u0394', '\\u0394') -- self.assertEqual(f'{2}\u0394', '2\u0394') -- self.assertEqual(f'{2}\u0394{3}', '2\u03943') -- self.assertEqual(f'\u0394{3}', '\u03943') -- -- self.assertEqual(f'\U00000394', '\u0394') -- self.assertEqual(r'\U00000394', '\\U00000394') -- self.assertEqual(rf'\U00000394', '\\U00000394') -- self.assertEqual(f'{2}\U00000394', '2\u0394') -- self.assertEqual(f'{2}\U00000394{3}', '2\u03943') -- self.assertEqual(f'\U00000394{3}', '\u03943') -- -- self.assertEqual(f'\N{GREEK CAPITAL LETTER DELTA}', '\u0394') -- self.assertEqual(f'{2}\N{GREEK CAPITAL LETTER DELTA}', '2\u0394') -- self.assertEqual(f'{2}\N{GREEK CAPITAL LETTER DELTA}{3}', '2\u03943') -- self.assertEqual(f'\N{GREEK CAPITAL LETTER DELTA}{3}', '\u03943') -- self.assertEqual(f'2\N{GREEK CAPITAL LETTER DELTA}', '2\u0394') -- self.assertEqual(f'2\N{GREEK CAPITAL LETTER DELTA}3', '2\u03943') -- self.assertEqual(f'\N{GREEK CAPITAL LETTER DELTA}3', '\u03943') -- -- self.assertEqual(f'\x20', ' ') -- self.assertEqual(r'\x20', '\\x20') -- self.assertEqual(rf'\x20', '\\x20') -- self.assertEqual(f'{2}\x20', '2 ') -- self.assertEqual(f'{2}\x20{3}', '2 3') -- self.assertEqual(f'\x20{3}', ' 3') -- -- self.assertEqual(f'2\x20', '2 ') -- self.assertEqual(f'2\x203', '2 3') -- self.assertEqual(f'\x203', ' 3') -+ self.assertEqual(f"\t", "\t") -+ self.assertEqual(r"\t", "\\t") -+ self.assertEqual(rf"\t", "\\t") -+ self.assertEqual(f"{2}\t", "2\t") -+ self.assertEqual(f"{2}\t{3}", "2\t3") -+ self.assertEqual(f"\t{3}", "\t3") -+ -+ self.assertEqual(f"\u0394", "\u0394") -+ self.assertEqual(r"\u0394", "\\u0394") -+ self.assertEqual(rf"\u0394", "\\u0394") -+ self.assertEqual(f"{2}\u0394", "2\u0394") -+ self.assertEqual(f"{2}\u0394{3}", "2\u03943") -+ self.assertEqual(f"\u0394{3}", "\u03943") -+ -+ self.assertEqual(f"\U00000394", "\u0394") -+ self.assertEqual(r"\U00000394", "\\U00000394") -+ self.assertEqual(rf"\U00000394", "\\U00000394") -+ self.assertEqual(f"{2}\U00000394", "2\u0394") -+ self.assertEqual(f"{2}\U00000394{3}", "2\u03943") -+ self.assertEqual(f"\U00000394{3}", "\u03943") -+ -+ self.assertEqual(f"\N{GREEK CAPITAL LETTER DELTA}", "\u0394") -+ self.assertEqual(f"{2}\N{GREEK CAPITAL LETTER DELTA}", "2\u0394") -+ self.assertEqual(f"{2}\N{GREEK CAPITAL LETTER DELTA}{3}", "2\u03943") -+ self.assertEqual(f"\N{GREEK CAPITAL LETTER DELTA}{3}", "\u03943") -+ self.assertEqual(f"2\N{GREEK CAPITAL LETTER DELTA}", "2\u0394") -+ self.assertEqual(f"2\N{GREEK CAPITAL LETTER DELTA}3", "2\u03943") -+ self.assertEqual(f"\N{GREEK CAPITAL LETTER DELTA}3", "\u03943") -+ -+ self.assertEqual(f"\x20", " ") -+ self.assertEqual(r"\x20", "\\x20") -+ self.assertEqual(rf"\x20", "\\x20") -+ self.assertEqual(f"{2}\x20", "2 ") -+ self.assertEqual(f"{2}\x20{3}", "2 3") -+ self.assertEqual(f"\x20{3}", " 3") -+ -+ self.assertEqual(f"2\x20", "2 ") -+ self.assertEqual(f"2\x203", "2 3") -+ self.assertEqual(f"\x203", " 3") - - with self.assertWarns(SyntaxWarning): # invalid escape sequence - value = eval(r"f'\{6*7}'") -- self.assertEqual(value, '\\42') -+ self.assertEqual(value, "\\42") - with self.assertWarns(SyntaxWarning): # invalid escape sequence - value = eval(r"f'\g'") -- self.assertEqual(value, '\\g') -- self.assertEqual(f'\\{6*7}', '\\42') -- self.assertEqual(fr'\{6*7}', '\\42') -+ self.assertEqual(value, "\\g") -+ self.assertEqual(f"\\{6*7}", "\\42") -+ self.assertEqual(rf"\{6*7}", "\\42") - -- AMPERSAND = 'spam' -+ AMPERSAND = "spam" - # Get the right unicode character (&), or pick up local variable - # depending on the number of backslashes. -- self.assertEqual(f'\N{AMPERSAND}', '&') -- self.assertEqual(f'\\N{AMPERSAND}', '\\Nspam') -- self.assertEqual(fr'\N{AMPERSAND}', '\\Nspam') -- self.assertEqual(f'\\\N{AMPERSAND}', '\\&') -+ self.assertEqual(f"\N{AMPERSAND}", "&") -+ self.assertEqual(f"\\N{AMPERSAND}", "\\Nspam") -+ self.assertEqual(rf"\N{AMPERSAND}", "\\Nspam") -+ self.assertEqual(f"\\\N{AMPERSAND}", "\\&") - - def test_misformed_unicode_character_name(self): - # These test are needed because unicode names are parsed - # differently inside f-strings. -- self.assertAllRaise(SyntaxError, r"\(unicode error\) 'unicodeescape' codec can't decode bytes in position .*: malformed \\N character escape", -- [r"f'\N'", -- r"f'\N '", -- r"f'\N '", # See bpo-46503. -- r"f'\N{'", -- r"f'\N{GREEK CAPITAL LETTER DELTA'", -- -- # Here are the non-f-string versions, -- # which should give the same errors. -- r"'\N'", -- r"'\N '", -- r"'\N '", -- r"'\N{'", -- r"'\N{GREEK CAPITAL LETTER DELTA'", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ r"\(unicode error\) 'unicodeescape' codec can't decode bytes in position .*: malformed \\N character escape", -+ [ -+ r"f'\N'", -+ r"f'\N '", -+ r"f'\N '", # See bpo-46503. -+ r"f'\N{'", -+ r"f'\N{GREEK CAPITAL LETTER DELTA'", -+ # Here are the non-f-string versions, -+ # which should give the same errors. -+ r"'\N'", -+ r"'\N '", -+ r"'\N '", -+ r"'\N{'", -+ r"'\N{GREEK CAPITAL LETTER DELTA'", -+ ], -+ ) - - def test_backslashes_in_expression_part(self): -- self.assertEqual(f"{( -+ self.assertEqual( -+ f"{( - 1 + - 2 -- )}", "3") -- -- self.assertEqual("\N{LEFT CURLY BRACKET}", '{') -- self.assertEqual(f'{"\N{LEFT CURLY BRACKET}"}', '{') -- self.assertEqual(rf'{"\N{LEFT CURLY BRACKET}"}', '{') -- -- self.assertAllRaise(SyntaxError, -- "f-string: valid expression required before '}'", -- ["f'{\n}'", -- ]) -+ )}", -+ "3", -+ ) -+ -+ self.assertEqual("\N{LEFT CURLY BRACKET}", "{") -+ self.assertEqual(f'{"\N{LEFT CURLY BRACKET}"}', "{") -+ self.assertEqual(rf'{"\N{LEFT CURLY BRACKET}"}', "{") -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: valid expression required before '}'", -+ [ -+ "f'{\n}'", -+ ], -+ ) - - def test_invalid_backslashes_inside_fstring_context(self): - # All of these variations are invalid python syntax, -@@ -990,23 +1091,27 @@ - r"\\"[0], - ] - ] -- self.assertAllRaise(SyntaxError, 'unexpected character after line continuation', -- cases) -+ self.assertAllRaise( -+ SyntaxError, "unexpected character after line continuation", cases -+ ) - - def test_no_escapes_for_braces(self): - """ - Only literal curly braces begin an expression. - """ - # \x7b is '{'. -- self.assertEqual(f'\x7b1+1}}', '{1+1}') -- self.assertEqual(f'\x7b1+1', '{1+1') -- self.assertEqual(f'\u007b1+1', '{1+1') -- self.assertEqual(f'\N{LEFT CURLY BRACKET}1+1\N{RIGHT CURLY BRACKET}', '{1+1}') -+ self.assertEqual(f"\x7b1+1}}", "{1+1}") -+ self.assertEqual(f"\x7b1+1", "{1+1") -+ self.assertEqual(f"\u007b1+1", "{1+1") -+ self.assertEqual(f"\N{LEFT CURLY BRACKET}1+1\N{RIGHT CURLY BRACKET}", "{1+1}") - - def test_newlines_in_expressions(self): -- self.assertEqual(f'{0}', '0') -- self.assertEqual(rf'''{3+ --4}''', '7') -+ self.assertEqual(f"{0}", "0") -+ self.assertEqual( -+ rf"""{3+ -+4}""", -+ "7", -+ ) - - def test_lambda(self): - x = 5 -@@ -1017,57 +1122,61 @@ - # lambda doesn't work without parens, because the colon - # makes the parser think it's a format_spec - # emit warning if we can match a format_spec -- self.assertAllRaise(SyntaxError, -- "f-string: lambda expressions are not allowed " -- "without parentheses", -- ["f'{lambda x:x}'", -- "f'{lambda :x}'", -- "f'{lambda *arg, :x}'", -- "f'{1, lambda:x}'", -- "f'{lambda x:}'", -- "f'{lambda :}'", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: lambda expressions are not allowed " "without parentheses", -+ [ -+ "f'{lambda x:x}'", -+ "f'{lambda :x}'", -+ "f'{lambda *arg, :x}'", -+ "f'{1, lambda:x}'", -+ "f'{lambda x:}'", -+ "f'{lambda :}'", -+ ], -+ ) - # Ensure the detection of invalid lambdas doesn't trigger detection - # for valid lambdas in the second error pass - with self.assertRaisesRegex(SyntaxError, "invalid syntax"): - compile("lambda name_3=f'{name_4}': {name_3}\n1 $ 1", "", "exec") - - # but don't emit the paren warning in general cases -- with self.assertRaisesRegex(SyntaxError, "f-string: expecting a valid expression after '{'"): -+ with self.assertRaisesRegex( -+ SyntaxError, "f-string: expecting a valid expression after '{'" -+ ): - eval("f'{+ lambda:None}'") - - def test_valid_prefixes(self): -- self.assertEqual(F'{1}', "1") -- self.assertEqual(FR'{2}', "2") -- self.assertEqual(fR'{3}', "3") -+ self.assertEqual(f"{1}", "1") -+ self.assertEqual(Rf"{2}", "2") -+ self.assertEqual(Rf"{3}", "3") - - def test_roundtrip_raw_quotes(self): -- self.assertEqual(fr"\'", "\\'") -- self.assertEqual(fr'\"', '\\"') -- self.assertEqual(fr'\"\'', '\\"\\\'') -- self.assertEqual(fr'\'\"', '\\\'\\"') -- self.assertEqual(fr'\"\'\"', '\\"\\\'\\"') -- self.assertEqual(fr'\'\"\'', '\\\'\\"\\\'') -- self.assertEqual(fr'\"\'\"\'', '\\"\\\'\\"\\\'') -+ self.assertEqual(rf"\'", "\\'") -+ self.assertEqual(rf"\"", '\\"') -+ self.assertEqual(rf"\"\'", "\\\"\\'") -+ self.assertEqual(rf"\'\"", "\\'\\\"") -+ self.assertEqual(rf"\"\'\"", '\\"\\\'\\"') -+ self.assertEqual(rf"\'\"\'", "\\'\\\"\\'") -+ self.assertEqual(rf"\"\'\"\'", "\\\"\\'\\\"\\'") - - def test_fstring_backslash_before_double_bracket(self): - deprecated_cases = [ -- (r"f'\{{\}}'", '\\{\\}'), -- (r"f'\{{'", '\\{'), -- (r"f'\{{{1+1}'", '\\{2'), -- (r"f'\}}{1+1}'", '\\}2'), -- (r"f'{1+1}\}}'", '2\\}') -+ (r"f'\{{\}}'", "\\{\\}"), -+ (r"f'\{{'", "\\{"), -+ (r"f'\{{{1+1}'", "\\{2"), -+ (r"f'\}}{1+1}'", "\\}2"), -+ (r"f'{1+1}\}}'", "2\\}"), - ] - for case, expected_result in deprecated_cases: - with self.subTest(case=case, expected_result=expected_result): - with self.assertWarns(SyntaxWarning): - result = eval(case) - self.assertEqual(result, expected_result) -- self.assertEqual(fr'\{{\}}', '\\{\\}') -- self.assertEqual(fr'\{{', '\\{') -- self.assertEqual(fr'\{{{1+1}', '\\{2') -- self.assertEqual(fr'\}}{1+1}', '\\}2') -- self.assertEqual(fr'{1+1}\}}', '2\\}') -+ self.assertEqual(rf"\{{\}}", "\\{\\}") -+ self.assertEqual(rf"\{{", "\\{") -+ self.assertEqual(rf"\{{{1+1}", "\\{2") -+ self.assertEqual(rf"\}}{1+1}", "\\}2") -+ self.assertEqual(rf"{1+1}\}}", "2\\}") - - def test_fstring_backslash_before_double_bracket_warns_once(self): - with self.assertWarns(SyntaxWarning) as w: -@@ -1076,18 +1185,18 @@ - self.assertEqual(w.warnings[0].category, SyntaxWarning) - - def test_fstring_backslash_prefix_raw(self): -- self.assertEqual(f'\\', '\\') -- self.assertEqual(f'\\\\', '\\\\') -- self.assertEqual(fr'\\', r'\\') -- self.assertEqual(fr'\\\\', r'\\\\') -- self.assertEqual(rf'\\', r'\\') -- self.assertEqual(rf'\\\\', r'\\\\') -- self.assertEqual(Rf'\\', R'\\') -- self.assertEqual(Rf'\\\\', R'\\\\') -- self.assertEqual(fR'\\', R'\\') -- self.assertEqual(fR'\\\\', R'\\\\') -- self.assertEqual(FR'\\', R'\\') -- self.assertEqual(FR'\\\\', R'\\\\') -+ self.assertEqual(f"\\", "\\") -+ self.assertEqual(f"\\\\", "\\\\") -+ self.assertEqual(rf"\\", r"\\") -+ self.assertEqual(rf"\\\\", r"\\\\") -+ self.assertEqual(rf"\\", r"\\") -+ self.assertEqual(rf"\\\\", r"\\\\") -+ self.assertEqual(Rf"\\", R"\\") -+ self.assertEqual(Rf"\\\\", R"\\\\") -+ self.assertEqual(Rf"\\", R"\\") -+ self.assertEqual(Rf"\\\\", R"\\\\") -+ self.assertEqual(Rf"\\", R"\\") -+ self.assertEqual(Rf"\\\\", R"\\\\") - - def test_fstring_format_spec_greedy_matching(self): - self.assertEqual(f"{1:}}}", "1}") -@@ -1097,8 +1206,8 @@ - # Not terribly useful, but make sure the yield turns - # a function into a generator - def fn(y): -- f'y:{yield y*2}' -- f'{yield}' -+ f"y:{yield y*2}" -+ f"{yield}" - - g = fn(4) - self.assertEqual(next(g), 8) -@@ -1106,15 +1215,15 @@ - - def test_yield_send(self): - def fn(x): -- yield f'x:{yield (lambda i: x * i)}' -+ yield f"x:{yield (lambda i: x * i)}" - - g = fn(10) - the_lambda = next(g) - self.assertEqual(the_lambda(4), 40) -- self.assertEqual(g.send('string'), 'x:string') -+ self.assertEqual(g.send("string"), "x:string") - - def test_expressions_with_triple_quoted_strings(self): -- self.assertEqual(f"{'''x'''}", 'x') -+ self.assertEqual(f"{'''x'''}", "x") - self.assertEqual(f"{'''eric's'''}", "eric's") - - # Test concatenation within an expression -@@ -1127,263 +1236,302 @@ - - def test_multiple_vars(self): - x = 98 -- y = 'abc' -- self.assertEqual(f'{x}{y}', '98abc') -+ y = "abc" -+ self.assertEqual(f"{x}{y}", "98abc") - -- self.assertEqual(f'X{x}{y}', 'X98abc') -- self.assertEqual(f'{x}X{y}', '98Xabc') -- self.assertEqual(f'{x}{y}X', '98abcX') -+ self.assertEqual(f"X{x}{y}", "X98abc") -+ self.assertEqual(f"{x}X{y}", "98Xabc") -+ self.assertEqual(f"{x}{y}X", "98abcX") - -- self.assertEqual(f'X{x}Y{y}', 'X98Yabc') -- self.assertEqual(f'X{x}{y}Y', 'X98abcY') -- self.assertEqual(f'{x}X{y}Y', '98XabcY') -+ self.assertEqual(f"X{x}Y{y}", "X98Yabc") -+ self.assertEqual(f"X{x}{y}Y", "X98abcY") -+ self.assertEqual(f"{x}X{y}Y", "98XabcY") - -- self.assertEqual(f'X{x}Y{y}Z', 'X98YabcZ') -+ self.assertEqual(f"X{x}Y{y}Z", "X98YabcZ") - - def test_closure(self): - def outer(x): - def inner(): -- return f'x:{x}' -+ return f"x:{x}" -+ - return inner - -- self.assertEqual(outer('987')(), 'x:987') -- self.assertEqual(outer(7)(), 'x:7') -+ self.assertEqual(outer("987")(), "x:987") -+ self.assertEqual(outer(7)(), "x:7") - - def test_arguments(self): - y = 2 -+ - def f(x, width): -- return f'x={x*y:{width}}' -+ return f"x={x*y:{width}}" - -- self.assertEqual(f('foo', 10), 'x=foofoo ') -- x = 'bar' -- self.assertEqual(f(10, 10), 'x= 20') -+ self.assertEqual(f("foo", 10), "x=foofoo ") -+ x = "bar" -+ self.assertEqual(f(10, 10), "x= 20") - - def test_locals(self): - value = 123 -- self.assertEqual(f'v:{value}', 'v:123') -+ self.assertEqual(f"v:{value}", "v:123") - - def test_missing_variable(self): - with self.assertRaises(NameError): -- f'v:{value}' -+ f"v:{value}" - - def test_missing_format_spec(self): - class O: - def __format__(self, spec): - if not spec: -- return '*' -+ return "*" - return spec - -- self.assertEqual(f'{O():x}', 'x') -- self.assertEqual(f'{O()}', '*') -- self.assertEqual(f'{O():}', '*') -+ self.assertEqual(f"{O():x}", "x") -+ self.assertEqual(f"{O()}", "*") -+ self.assertEqual(f"{O():}", "*") - -- self.assertEqual(f'{3:}', '3') -- self.assertEqual(f'{3!s:}', '3') -+ self.assertEqual(f"{3:}", "3") -+ self.assertEqual(f"{3!s:}", "3") - - def test_global(self): -- self.assertEqual(f'g:{a_global}', 'g:global variable') -- self.assertEqual(f'g:{a_global!r}', "g:'global variable'") -+ self.assertEqual(f"g:{a_global}", "g:global variable") -+ self.assertEqual(f"g:{a_global!r}", "g:'global variable'") - -- a_local = 'local variable' -- self.assertEqual(f'g:{a_global} l:{a_local}', -- 'g:global variable l:local variable') -- self.assertEqual(f'g:{a_global!r}', -- "g:'global variable'") -- self.assertEqual(f'g:{a_global} l:{a_local!r}', -- "g:global variable l:'local variable'") -+ a_local = "local variable" -+ self.assertEqual( -+ f"g:{a_global} l:{a_local}", "g:global variable l:local variable" -+ ) -+ self.assertEqual(f"g:{a_global!r}", "g:'global variable'") -+ self.assertEqual( -+ f"g:{a_global} l:{a_local!r}", "g:global variable l:'local variable'" -+ ) - -- self.assertIn("module 'unittest' from", f'{unittest}') -+ self.assertIn("module 'unittest' from", f"{unittest}") - - def test_shadowed_global(self): -- a_global = 'really a local' -- self.assertEqual(f'g:{a_global}', 'g:really a local') -- self.assertEqual(f'g:{a_global!r}', "g:'really a local'") -- -- a_local = 'local variable' -- self.assertEqual(f'g:{a_global} l:{a_local}', -- 'g:really a local l:local variable') -- self.assertEqual(f'g:{a_global!r}', -- "g:'really a local'") -- self.assertEqual(f'g:{a_global} l:{a_local!r}', -- "g:really a local l:'local variable'") -+ a_global = "really a local" -+ self.assertEqual(f"g:{a_global}", "g:really a local") -+ self.assertEqual(f"g:{a_global!r}", "g:'really a local'") -+ -+ a_local = "local variable" -+ self.assertEqual( -+ f"g:{a_global} l:{a_local}", "g:really a local l:local variable" -+ ) -+ self.assertEqual(f"g:{a_global!r}", "g:'really a local'") -+ self.assertEqual( -+ f"g:{a_global} l:{a_local!r}", "g:really a local l:'local variable'" -+ ) - - def test_call(self): - def foo(x): -- return 'x=' + str(x) -+ return "x=" + str(x) - -- self.assertEqual(f'{foo(10)}', 'x=10') -+ self.assertEqual(f"{foo(10)}", "x=10") - - def test_nested_fstrings(self): - y = 5 -- self.assertEqual(f'{f"{0}"*3}', '000') -- self.assertEqual(f'{f"{y}"*3}', '555') -+ self.assertEqual(f'{f"{0}"*3}', "000") -+ self.assertEqual(f'{f"{y}"*3}', "555") - - def test_invalid_string_prefixes(self): -- single_quote_cases = ["fu''", -- "uf''", -- "Fu''", -- "fU''", -- "Uf''", -- "uF''", -- "ufr''", -- "urf''", -- "fur''", -- "fru''", -- "rfu''", -- "ruf''", -- "FUR''", -- "Fur''", -- "fb''", -- "fB''", -- "Fb''", -- "FB''", -- "bf''", -- "bF''", -- "Bf''", -- "BF''",] -+ single_quote_cases = [ -+ "fu''", -+ "uf''", -+ "Fu''", -+ "fU''", -+ "Uf''", -+ "uF''", -+ "ufr''", -+ "urf''", -+ "fur''", -+ "fru''", -+ "rfu''", -+ "ruf''", -+ "FUR''", -+ "Fur''", -+ "fb''", -+ "fB''", -+ "Fb''", -+ "FB''", -+ "bf''", -+ "bF''", -+ "Bf''", -+ "BF''", -+ ] - double_quote_cases = [case.replace("'", '"') for case in single_quote_cases] -- self.assertAllRaise(SyntaxError, 'invalid syntax', -- single_quote_cases + double_quote_cases) -+ self.assertAllRaise( -+ SyntaxError, "invalid syntax", single_quote_cases + double_quote_cases -+ ) - - def test_leading_trailing_spaces(self): -- self.assertEqual(f'{ 3}', '3') -- self.assertEqual(f'{ 3}', '3') -- self.assertEqual(f'{3 }', '3') -- self.assertEqual(f'{3 }', '3') -+ self.assertEqual(f"{ 3}", "3") -+ self.assertEqual(f"{ 3}", "3") -+ self.assertEqual(f"{3 }", "3") -+ self.assertEqual(f"{3 }", "3") - -- self.assertEqual(f'expr={ {x: y for x, y in [(1, 2), ]}}', -- 'expr={1: 2}') -- self.assertEqual(f'expr={ {x: y for x, y in [(1, 2), ]} }', -- 'expr={1: 2}') -+ self.assertEqual(f"expr={ {x: y for x, y in [(1, 2), ]}}", "expr={1: 2}") -+ self.assertEqual(f"expr={ {x: y for x, y in [(1, 2), ]} }", "expr={1: 2}") - - def test_not_equal(self): - # There's a special test for this because there's a special - # case in the f-string parser to look for != as not ending an - # expression. Normally it would, while looking for !s or !r. - -- self.assertEqual(f'{3!=4}', 'True') -- self.assertEqual(f'{3!=4:}', 'True') -- self.assertEqual(f'{3!=4!s}', 'True') -- self.assertEqual(f'{3!=4!s:.3}', 'Tru') -+ self.assertEqual(f"{3!=4}", "True") -+ self.assertEqual(f"{3!=4:}", "True") -+ self.assertEqual(f"{3!=4!s}", "True") -+ self.assertEqual(f"{3!=4!s:.3}", "Tru") - - def test_equal_equal(self): - # Because an expression ending in = has special meaning, - # there's a special test for ==. Make sure it works. - -- self.assertEqual(f'{0==1}', 'False') -+ self.assertEqual(f"{0==1}", "False") - - def test_conversions(self): -- self.assertEqual(f'{3.14:10.10}', ' 3.14') -- self.assertEqual(f'{3.14!s:10.10}', '3.14 ') -- self.assertEqual(f'{3.14!r:10.10}', '3.14 ') -- self.assertEqual(f'{3.14!a:10.10}', '3.14 ') -+ self.assertEqual(f"{3.14:10.10}", " 3.14") -+ self.assertEqual(f"{3.14!s:10.10}", "3.14 ") -+ self.assertEqual(f"{3.14!r:10.10}", "3.14 ") -+ self.assertEqual(f"{3.14!a:10.10}", "3.14 ") - -- self.assertEqual(f'{"a"}', 'a') -+ self.assertEqual(f'{"a"}', "a") - self.assertEqual(f'{"a"!r}', "'a'") - self.assertEqual(f'{"a"!a}', "'a'") - - # Conversions can have trailing whitespace after them since it - # does not provide any significance - self.assertEqual(f"{3!s }", "3") -- self.assertEqual(f'{3.14!s :10.10}', '3.14 ') -+ self.assertEqual(f"{3.14!s :10.10}", "3.14 ") - - # Not a conversion. - self.assertEqual(f'{"a!r"}', "a!r") - - # Not a conversion, but show that ! is allowed in a format spec. -- self.assertEqual(f'{3.14:!<10.10}', '3.14!!!!!!') -- -- self.assertAllRaise(SyntaxError, "f-string: expecting '}'", -- ["f'{3!'", -- "f'{3!s'", -- "f'{3!g'", -- ]) -- -- self.assertAllRaise(SyntaxError, 'f-string: missing conversion character', -- ["f'{3!}'", -- "f'{3!:'", -- "f'{3!:}'", -- ]) -- -- for conv_identifier in 'g', 'A', 'G', 'ä', 'ɐ': -- self.assertAllRaise(SyntaxError, -- "f-string: invalid conversion character %r: " -- "expected 's', 'r', or 'a'" % conv_identifier, -- ["f'{3!" + conv_identifier + "}'"]) -- -- for conv_non_identifier in '3', '!': -- self.assertAllRaise(SyntaxError, -- "f-string: invalid conversion character", -- ["f'{3!" + conv_non_identifier + "}'"]) -- -- for conv in ' s', ' s ': -- self.assertAllRaise(SyntaxError, -- "f-string: conversion type must come right after the" -- " exclamanation mark", -- ["f'{3!" + conv + "}'"]) -- -- self.assertAllRaise(SyntaxError, -- "f-string: invalid conversion character 'ss': " -- "expected 's', 'r', or 'a'", -- ["f'{3!ss}'", -- "f'{3!ss:}'", -- "f'{3!ss:s}'", -- ]) -+ self.assertEqual(f"{3.14:!<10.10}", "3.14!!!!!!") -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expecting '}'", -+ [ -+ "f'{3!'", -+ "f'{3!s'", -+ "f'{3!g'", -+ ], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: missing conversion character", -+ [ -+ "f'{3!}'", -+ "f'{3!:'", -+ "f'{3!:}'", -+ ], -+ ) -+ -+ for conv_identifier in "g", "A", "G", "ä", "ɐ": -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: invalid conversion character %r: " -+ "expected 's', 'r', or 'a'" % conv_identifier, -+ ["f'{3!" + conv_identifier + "}'"], -+ ) -+ -+ for conv_non_identifier in "3", "!": -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: invalid conversion character", -+ ["f'{3!" + conv_non_identifier + "}'"], -+ ) -+ -+ for conv in " s", " s ": -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: conversion type must come right after the" -+ " exclamanation mark", -+ ["f'{3!" + conv + "}'"], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: invalid conversion character 'ss': " "expected 's', 'r', or 'a'", -+ [ -+ "f'{3!ss}'", -+ "f'{3!ss:}'", -+ "f'{3!ss:s}'", -+ ], -+ ) - - def test_assignment(self): -- self.assertAllRaise(SyntaxError, r'invalid syntax', -- ["f'' = 3", -- "f'{0}' = x", -- "f'{x}' = x", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ r"invalid syntax", -+ [ -+ "f'' = 3", -+ "f'{0}' = x", -+ "f'{x}' = x", -+ ], -+ ) - - def test_del(self): -- self.assertAllRaise(SyntaxError, 'invalid syntax', -- ["del f''", -- "del '' f''", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ "invalid syntax", -+ [ -+ "del f''", -+ "del '' f''", -+ ], -+ ) - - def test_mismatched_braces(self): -- self.assertAllRaise(SyntaxError, "f-string: single '}' is not allowed", -- ["f'{{}'", -- "f'{{}}}'", -- "f'}'", -- "f'x}'", -- "f'x}x'", -- r"f'\u007b}'", -- -- # Can't have { or } in a format spec. -- "f'{3:}>10}'", -- "f'{3:}}>10}'", -- ]) -- -- self.assertAllRaise(SyntaxError, "f-string: expecting '}'", -- ["f'{3'", -- "f'{3!'", -- "f'{3:'", -- "f'{3!s'", -- "f'{3!s:'", -- "f'{3!s:3'", -- "f'x{'", -- "f'x{x'", -- "f'{x'", -- "f'{3:s'", -- "f'{{{'", -- "f'{{}}{'", -- "f'{'", -- "f'{i='", # See gh-93418. -- ]) -- -- self.assertAllRaise(SyntaxError, -- "f-string: expecting a valid expression after '{'", -- ["f'{3:{{>10}'", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: single '}' is not allowed", -+ [ -+ "f'{{}'", -+ "f'{{}}}'", -+ "f'}'", -+ "f'x}'", -+ "f'x}x'", -+ r"f'\u007b}'", -+ # Can't have { or } in a format spec. -+ "f'{3:}>10}'", -+ "f'{3:}}>10}'", -+ ], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expecting '}'", -+ [ -+ "f'{3'", -+ "f'{3!'", -+ "f'{3:'", -+ "f'{3!s'", -+ "f'{3!s:'", -+ "f'{3!s:3'", -+ "f'x{'", -+ "f'x{x'", -+ "f'{x'", -+ "f'{3:s'", -+ "f'{{{'", -+ "f'{{}}{'", -+ "f'{'", -+ "f'{i='", # See gh-93418. -+ ], -+ ) -+ -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expecting a valid expression after '{'", -+ [ -+ "f'{3:{{>10}'", -+ ], -+ ) - - # But these are just normal strings. -- self.assertEqual(f'{"{"}', '{') -- self.assertEqual(f'{"}"}', '}') -- self.assertEqual(f'{3:{"}"}>10}', '}}}}}}}}}3') -- self.assertEqual(f'{2:{"{"}>10}', '{{{{{{{{{2') -+ self.assertEqual(f'{"{"}', "{") -+ self.assertEqual(f'{"}"}', "}") -+ self.assertEqual(f'{3:{"}"}>10}', "}}}}}}}}}3") -+ self.assertEqual(f'{2:{"{"}>10}', "{{{{{{{{{2") - - def test_if_conditional(self): - # There's special logic in compile.c to test if the -@@ -1392,7 +1540,7 @@ - - def test_fstring(x, expected): - flag = 0 -- if f'{x}': -+ if f"{x}": - flag = 1 - else: - flag = 2 -@@ -1400,7 +1548,7 @@ - - def test_concat_empty(x, expected): - flag = 0 -- if '' f'{x}': -+ if "" f"{x}": - flag = 1 - else: - flag = 2 -@@ -1408,137 +1556,149 @@ - - def test_concat_non_empty(x, expected): - flag = 0 -- if ' ' f'{x}': -+ if " " f"{x}": - flag = 1 - else: - flag = 2 - self.assertEqual(flag, expected) - -- test_fstring('', 2) -- test_fstring(' ', 1) -+ test_fstring("", 2) -+ test_fstring(" ", 1) - -- test_concat_empty('', 2) -- test_concat_empty(' ', 1) -+ test_concat_empty("", 2) -+ test_concat_empty(" ", 1) - -- test_concat_non_empty('', 1) -- test_concat_non_empty(' ', 1) -+ test_concat_non_empty("", 1) -+ test_concat_non_empty(" ", 1) - - def test_empty_format_specifier(self): -- x = 'test' -- self.assertEqual(f'{x}', 'test') -- self.assertEqual(f'{x:}', 'test') -- self.assertEqual(f'{x!s:}', 'test') -- self.assertEqual(f'{x!r:}', "'test'") -+ x = "test" -+ self.assertEqual(f"{x}", "test") -+ self.assertEqual(f"{x:}", "test") -+ self.assertEqual(f"{x!s:}", "test") -+ self.assertEqual(f"{x!r:}", "'test'") - - def test_str_format_differences(self): -- d = {'a': 'string', -- 0: 'integer', -- } -+ d = { -+ "a": "string", -+ 0: "integer", -+ } - a = 0 -- self.assertEqual(f'{d[0]}', 'integer') -- self.assertEqual(f'{d["a"]}', 'string') -- self.assertEqual(f'{d[a]}', 'integer') -- self.assertEqual('{d[a]}'.format(d=d), 'string') -- self.assertEqual('{d[0]}'.format(d=d), 'integer') -+ self.assertEqual(f"{d[0]}", "integer") -+ self.assertEqual(f'{d["a"]}', "string") -+ self.assertEqual(f"{d[a]}", "integer") -+ self.assertEqual("{d[a]}".format(d=d), "string") -+ self.assertEqual("{d[0]}".format(d=d), "integer") - - def test_errors(self): - # see issue 26287 -- self.assertAllRaise(TypeError, 'unsupported', -- [r"f'{(lambda: 0):x}'", -- r"f'{(0,):x}'", -- ]) -- self.assertAllRaise(ValueError, 'Unknown format code', -- [r"f'{1000:j}'", -- r"f'{1000:j}'", -- ]) -+ self.assertAllRaise( -+ TypeError, -+ "unsupported", -+ [ -+ r"f'{(lambda: 0):x}'", -+ r"f'{(0,):x}'", -+ ], -+ ) -+ self.assertAllRaise( -+ ValueError, -+ "Unknown format code", -+ [ -+ r"f'{1000:j}'", -+ r"f'{1000:j}'", -+ ], -+ ) - - def test_filename_in_syntaxerror(self): - # see issue 38964 - with temp_cwd() as cwd: -- file_path = os.path.join(cwd, 't.py') -- with open(file_path, 'w', encoding="utf-8") as f: -- f.write('f"{a b}"') # This generates a SyntaxError -- _, _, stderr = assert_python_failure(file_path, -- PYTHONIOENCODING='ascii') -- self.assertIn(file_path.encode('ascii', 'backslashreplace'), stderr) -+ file_path = os.path.join(cwd, "t.py") -+ with open(file_path, "w", encoding="utf-8") as f: -+ f.write('f"{a b}"') # This generates a SyntaxError -+ _, _, stderr = assert_python_failure(file_path, PYTHONIOENCODING="ascii") -+ self.assertIn(file_path.encode("ascii", "backslashreplace"), stderr) - - def test_loop(self): - for i in range(1000): -- self.assertEqual(f'i:{i}', 'i:' + str(i)) -+ self.assertEqual(f"i:{i}", "i:" + str(i)) - - def test_dict(self): -- d = {'"': 'dquote', -- "'": 'squote', -- 'foo': 'bar', -- } -- self.assertEqual(f'''{d["'"]}''', 'squote') -- self.assertEqual(f"""{d['"']}""", 'dquote') -+ d = { -+ '"': "dquote", -+ "'": "squote", -+ "foo": "bar", -+ } -+ self.assertEqual(f"""{d["'"]}""", "squote") -+ self.assertEqual(f"""{d['"']}""", "dquote") - -- self.assertEqual(f'{d["foo"]}', 'bar') -- self.assertEqual(f"{d['foo']}", 'bar') -+ self.assertEqual(f'{d["foo"]}', "bar") -+ self.assertEqual(f"{d['foo']}", "bar") - - def test_backslash_char(self): - # Check eval of a backslash followed by a control char. - # See bpo-30682: this used to raise an assert in pydebug mode. -- self.assertEqual(eval('f"\\\n"'), '') -- self.assertEqual(eval('f"\\\r"'), '') -+ self.assertEqual(eval('f"\\\n"'), "") -+ self.assertEqual(eval('f"\\\r"'), "") - - def test_debug_conversion(self): -- x = 'A string' -- self.assertEqual(f'{x=}', 'x=' + repr(x)) -- self.assertEqual(f'{x =}', 'x =' + repr(x)) -- self.assertEqual(f'{x=!s}', 'x=' + str(x)) -- self.assertEqual(f'{x=!r}', 'x=' + repr(x)) -- self.assertEqual(f'{x=!a}', 'x=' + ascii(x)) -+ x = "A string" -+ self.assertEqual(f"{x=}", "x=" + repr(x)) -+ self.assertEqual(f"{x =}", "x =" + repr(x)) -+ self.assertEqual(f"{x=!s}", "x=" + str(x)) -+ self.assertEqual(f"{x=!r}", "x=" + repr(x)) -+ self.assertEqual(f"{x=!a}", "x=" + ascii(x)) - - x = 2.71828 -- self.assertEqual(f'{x=:.2f}', 'x=' + format(x, '.2f')) -- self.assertEqual(f'{x=:}', 'x=' + format(x, '')) -- self.assertEqual(f'{x=!r:^20}', 'x=' + format(repr(x), '^20')) -- self.assertEqual(f'{x=!s:^20}', 'x=' + format(str(x), '^20')) -- self.assertEqual(f'{x=!a:^20}', 'x=' + format(ascii(x), '^20')) -+ self.assertEqual(f"{x=:.2f}", "x=" + format(x, ".2f")) -+ self.assertEqual(f"{x=:}", "x=" + format(x, "")) -+ self.assertEqual(f"{x=!r:^20}", "x=" + format(repr(x), "^20")) -+ self.assertEqual(f"{x=!s:^20}", "x=" + format(str(x), "^20")) -+ self.assertEqual(f"{x=!a:^20}", "x=" + format(ascii(x), "^20")) - - x = 9 -- self.assertEqual(f'{3*x+15=}', '3*x+15=42') -+ self.assertEqual(f"{3*x+15=}", "3*x+15=42") - - # There is code in ast.c that deals with non-ascii expression values. So, - # use a unicode identifier to trigger that. - tenπ = 31.4 -- self.assertEqual(f'{tenπ=:.2f}', 'tenπ=31.40') -+ self.assertEqual(f"{tenπ=:.2f}", "tenπ=31.40") - - # Also test with Unicode in non-identifiers. -- self.assertEqual(f'{"Σ"=}', '"Σ"=\'Σ\'') -+ self.assertEqual(f'{"Σ"=}', "\"Σ\"='Σ'") - - # Make sure nested fstrings still work. -- self.assertEqual(f'{f"{3.1415=:.1f}":*^20}', '*****3.1415=3.1*****') -+ self.assertEqual(f'{f"{3.1415=:.1f}":*^20}', "*****3.1415=3.1*****") - - # Make sure text before and after an expression with = works - # correctly. -- pi = 'π' -- self.assertEqual(f'alpha α {pi=} ω omega', "alpha α pi='π' ω omega") -+ pi = "π" -+ self.assertEqual(f"alpha α {pi=} ω omega", "alpha α pi='π' ω omega") - - # Check multi-line expressions. -- self.assertEqual(f'''{ -+ self.assertEqual( -+ f"""{ - 3 --=}''', '\n3\n=3') -+=}""", -+ "\n3\n=3", -+ ) - - # Since = is handled specially, make sure all existing uses of - # it still work. - -- self.assertEqual(f'{0==1}', 'False') -- self.assertEqual(f'{0!=1}', 'True') -- self.assertEqual(f'{0<=1}', 'True') -- self.assertEqual(f'{0>=1}', 'False') -- self.assertEqual(f'{(x:="5")}', '5') -- self.assertEqual(x, '5') -- self.assertEqual(f'{(x:=5)}', '5') -+ self.assertEqual(f"{0==1}", "False") -+ self.assertEqual(f"{0!=1}", "True") -+ self.assertEqual(f"{0<=1}", "True") -+ self.assertEqual(f"{0>=1}", "False") -+ self.assertEqual(f'{(x:="5")}', "5") -+ self.assertEqual(x, "5") -+ self.assertEqual(f"{(x:=5)}", "5") - self.assertEqual(x, 5) -- self.assertEqual(f'{"="}', '=') -+ self.assertEqual(f'{"="}', "=") - - x = 20 - # This isn't an assignment expression, it's 'x', with a format - # spec of '=10'. See test_walrus: you need to use parens. -- self.assertEqual(f'{x:=10}', ' 20') -+ self.assertEqual(f"{x:=10}", " 20") - - # Test named function parameters, to make sure '=' parsing works - # there. -@@ -1547,40 +1707,53 @@ - oldx = x - x = a - return oldx -+ - x = 0 -- self.assertEqual(f'{f(a="3=")}', '0') -- self.assertEqual(x, '3=') -- self.assertEqual(f'{f(a=4)}', '3=') -+ self.assertEqual(f'{f(a="3=")}', "0") -+ self.assertEqual(x, "3=") -+ self.assertEqual(f"{f(a=4)}", "3=") - self.assertEqual(x, 4) - -+ # Check debug expressions in format spec -+ y = 20 -+ self.assertEqual(f"{2:{y=}}", "yyyyyyyyyyyyyyyyyyy2") -+ self.assertEqual( -+ f"{datetime.datetime.now():h1{y=}h2{y=}h3{y=}}", "h1y=20h2y=20h3y=20" -+ ) -+ - # Make sure __format__ is being called. - class C: - def __format__(self, s): -- return f'FORMAT-{s}' -+ return f"FORMAT-{s}" -+ - def __repr__(self): -- return 'REPR' -+ return "REPR" - -- self.assertEqual(f'{C()=}', 'C()=REPR') -- self.assertEqual(f'{C()=!r}', 'C()=REPR') -- self.assertEqual(f'{C()=:}', 'C()=FORMAT-') -- self.assertEqual(f'{C()=: }', 'C()=FORMAT- ') -- self.assertEqual(f'{C()=:x}', 'C()=FORMAT-x') -- self.assertEqual(f'{C()=!r:*^20}', 'C()=********REPR********') -+ self.assertEqual(f"{C()=}", "C()=REPR") -+ self.assertEqual(f"{C()=!r}", "C()=REPR") -+ self.assertEqual(f"{C()=:}", "C()=FORMAT-") -+ self.assertEqual(f"{C()=: }", "C()=FORMAT- ") -+ self.assertEqual(f"{C()=:x}", "C()=FORMAT-x") -+ self.assertEqual(f"{C()=!r:*^20}", "C()=********REPR********") -+ self.assertEqual(f"{C():{20=}}", "FORMAT-20=20") - - self.assertRaises(SyntaxError, eval, "f'{C=]'") - - # Make sure leading and following text works. -- x = 'foo' -- self.assertEqual(f'X{x=}Y', 'Xx='+repr(x)+'Y') -+ x = "foo" -+ self.assertEqual(f"X{x=}Y", "Xx=" + repr(x) + "Y") - - # Make sure whitespace around the = works. -- self.assertEqual(f'X{x =}Y', 'Xx ='+repr(x)+'Y') -- self.assertEqual(f'X{x= }Y', 'Xx= '+repr(x)+'Y') -- self.assertEqual(f'X{x = }Y', 'Xx = '+repr(x)+'Y') -+ self.assertEqual(f"X{x =}Y", "Xx =" + repr(x) + "Y") -+ self.assertEqual(f"X{x= }Y", "Xx= " + repr(x) + "Y") -+ self.assertEqual(f"X{x = }Y", "Xx = " + repr(x) + "Y") - self.assertEqual(f"sadsd {1 + 1 = :{1 + 1:1d}f}", "sadsd 1 + 1 = 2.000000") - -- self.assertEqual(f"{1+2 = # my comment -- }", '1+2 = \n 3') -+ self.assertEqual( -+ f"{1+2 = # my comment -+ }", -+ "1+2 = \n 3", -+ ) - - # These next lines contains tabs. Backslash escapes don't - # work in f-strings. -@@ -1588,56 +1761,59 @@ - # this will be to dynamically created and exec the f-strings. But - # that's such a hassle I'll save it for another day. For now, convert - # the tabs to spaces just to shut up patchcheck. -- #self.assertEqual(f'X{x =}Y', 'Xx\t='+repr(x)+'Y') -- #self.assertEqual(f'X{x = }Y', 'Xx\t=\t'+repr(x)+'Y') -+ # self.assertEqual(f'X{x =}Y', 'Xx\t='+repr(x)+'Y') -+ # self.assertEqual(f'X{x = }Y', 'Xx\t=\t'+repr(x)+'Y') - - def test_walrus(self): - x = 20 - # This isn't an assignment expression, it's 'x', with a format - # spec of '=10'. -- self.assertEqual(f'{x:=10}', ' 20') -+ self.assertEqual(f"{x:=10}", " 20") - - # This is an assignment expression, which requires parens. -- self.assertEqual(f'{(x:=10)}', '10') -+ self.assertEqual(f"{(x:=10)}", "10") - self.assertEqual(x, 10) - - def test_invalid_syntax_error_message(self): -- with self.assertRaisesRegex(SyntaxError, -- "f-string: expecting '=', or '!', or ':', or '}'"): -+ with self.assertRaisesRegex( -+ SyntaxError, "f-string: expecting '=', or '!', or ':', or '}'" -+ ): - compile("f'{a $ b}'", "?", "exec") - - def test_with_two_commas_in_format_specifier(self): - error_msg = re.escape("Cannot specify ',' with ','.") - with self.assertRaisesRegex(ValueError, error_msg): -- f'{1:,,}' -+ f"{1:,,}" - - def test_with_two_underscore_in_format_specifier(self): - error_msg = re.escape("Cannot specify '_' with '_'.") - with self.assertRaisesRegex(ValueError, error_msg): -- f'{1:__}' -+ f"{1:__}" - - def test_with_a_commas_and_an_underscore_in_format_specifier(self): - error_msg = re.escape("Cannot specify both ',' and '_'.") - with self.assertRaisesRegex(ValueError, error_msg): -- f'{1:,_}' -+ f"{1:,_}" - - def test_with_an_underscore_and_a_comma_in_format_specifier(self): - error_msg = re.escape("Cannot specify both ',' and '_'.") - with self.assertRaisesRegex(ValueError, error_msg): -- f'{1:_,}' -+ f"{1:_,}" - - def test_syntax_error_for_starred_expressions(self): - with self.assertRaisesRegex(SyntaxError, "can't use starred expression here"): - compile("f'{*a}'", "?", "exec") - -- with self.assertRaisesRegex(SyntaxError, -- "f-string: expecting a valid expression after '{'"): -+ with self.assertRaisesRegex( -+ SyntaxError, "f-string: expecting a valid expression after '{'" -+ ): - compile("f'{**a}'", "?", "exec") - - def test_not_closing_quotes(self): - self.assertAllRaise(SyntaxError, "unterminated f-string literal", ['f"', "f'"]) -- self.assertAllRaise(SyntaxError, "unterminated triple-quoted f-string literal", -- ['f"""', "f'''"]) -+ self.assertAllRaise( -+ SyntaxError, "unterminated triple-quoted f-string literal", ['f"""', "f'''"] -+ ) - # Ensure that the errors are reported at the correct line number. - data = '''\ - x = 1 + 1 -@@ -1653,42 +1829,52 @@ - except SyntaxError as e: - self.assertEqual(e.text, 'z = f"""') - self.assertEqual(e.lineno, 3) -+ - def test_syntax_error_after_debug(self): -- self.assertAllRaise(SyntaxError, "f-string: expecting a valid expression after '{'", -- [ -- "f'{1=}{;'", -- "f'{1=}{+;'", -- "f'{1=}{2}{;'", -- "f'{1=}{3}{;'", -- ]) -- self.assertAllRaise(SyntaxError, "f-string: expecting '=', or '!', or ':', or '}'", -- [ -- "f'{1=}{1;'", -- "f'{1=}{1;}'", -- ]) -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expecting a valid expression after '{'", -+ [ -+ "f'{1=}{;'", -+ "f'{1=}{+;'", -+ "f'{1=}{2}{;'", -+ "f'{1=}{3}{;'", -+ ], -+ ) -+ self.assertAllRaise( -+ SyntaxError, -+ "f-string: expecting '=', or '!', or ':', or '}'", -+ [ -+ "f'{1=}{1;'", -+ "f'{1=}{1;}'", -+ ], -+ ) - - def test_debug_in_file(self): - with temp_cwd(): -- script = 'script.py' -- with open('script.py', 'w') as f: -+ script = "script.py" -+ with open("script.py", "w") as f: - f.write(f"""\ - print(f'''{{ - 3 - =}}''')""") - - _, stdout, _ = assert_python_ok(script) -- self.assertEqual(stdout.decode('utf-8').strip().replace('\r\n', '\n').replace('\r', '\n'), -- "3\n=3") -+ self.assertEqual( -+ stdout.decode("utf-8").strip().replace("\r\n", "\n").replace("\r", "\n"), -+ "3\n=3", -+ ) - - def test_syntax_warning_infinite_recursion_in_file(self): - with temp_cwd(): -- script = 'script.py' -- with open(script, 'w') as f: -+ script = "script.py" -+ with open(script, "w") as f: - f.write(r"print(f'\{1}')") - - _, stdout, stderr = assert_python_ok(script) -- self.assertIn(rb'\1', stdout) -+ self.assertIn(rb"\1", stdout) - self.assertEqual(len(stderr.strip().splitlines()), 2) - --if __name__ == '__main__': -+ -+if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py index 4c4a4498d6f..bed0e6d973b 100644 --- a/Lib/test/test_ftplib.py @@ -27492,25 +2384,6 @@ index 4c4a4498d6f..bed0e6d973b 100644 class TestTLS_FTPClass(TestCase): """Specific TLS_FTP class tests.""" -diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py -index 7f1b80a5e51..ff71fd53c08 100644 ---- a/Lib/test/test_functools.py -+++ b/Lib/test/test_functools.py -@@ -564,6 +564,14 @@ - method = functools.partialmethod(func=capture, a=1) - - def test_repr(self): -+ self.assertEqual(repr(vars(self.A)['nothing']), -+ 'functools.partialmethod({})'.format(capture)) -+ self.assertEqual(repr(vars(self.A)['positional']), -+ 'functools.partialmethod({}, 1)'.format(capture)) -+ self.assertEqual(repr(vars(self.A)['keywords']), -+ 'functools.partialmethod({}, a=2)'.format(capture)) -+ self.assertEqual(repr(vars(self.A)['spec_keywords']), -+ 'functools.partialmethod({}, self=1, func=2)'.format(capture)) - self.assertEqual(repr(vars(self.A)['both']), - 'functools.partialmethod({}, 3, b=4)'.format(capture)) - diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index 81bb5bb288e..dddf5e8cd93 100644 --- a/Lib/test/test_gc.py @@ -27523,90 +2396,6 @@ index 81bb5bb288e..dddf5e8cd93 100644 @unittest.skipIf(BUILD_WITH_NDEBUG, 'built with -NDEBUG') def test_refcount_errors(self): -diff --git a/Lib/test/test_gdb/__init__.py b/Lib/test/test_gdb/__init__.py -index 99557739af6..0dd72178023 100644 ---- a/Lib/test/test_gdb/__init__.py -+++ b/Lib/test/test_gdb/__init__.py -@@ -24,6 +24,9 @@ - if support.check_cflags_pgo(): - raise unittest.SkipTest("test_gdb is not reliable on PGO builds") - -+if support.check_bolt_optimized(): -+ raise unittest.SkipTest("test_gdb is not reliable on BOLT optimized builds") -+ - - def load_tests(*args): - return support.load_package_tests(os.path.dirname(__file__), *args) -diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py -index e0da9152c33..ea6eada07c5 100644 ---- a/Lib/test/test_generators.py -+++ b/Lib/test/test_generators.py -@@ -6,6 +6,7 @@ - import unittest - import weakref - import inspect -+import types - - from test import support - -@@ -89,9 +90,12 @@ - self.assertEqual(gc.garbage, old_garbage) - - def test_lambda_generator(self): -- # Issue #23192: Test that a lambda returning a generator behaves -+ # bpo-23192, gh-119897: Test that a lambda returning a generator behaves - # like the equivalent function - f = lambda: (yield 1) -+ self.assertIsInstance(f(), types.GeneratorType) -+ self.assertEqual(next(f()), 1) -+ - def g(): return (yield 1) - - # test 'yield from' -@@ -450,26 +454,6 @@ - self.assertIsInstance(cm.exception.value, StopIteration) - self.assertEqual(cm.exception.value.value, 2) - -- def test_close_releases_frame_locals(self): -- # See gh-118272 -- -- class Foo: -- pass -- -- f = Foo() -- f_wr = weakref.ref(f) -- -- def genfn(): -- a = f -- yield -- -- g = genfn() -- next(g) -- del f -- g.close() -- support.gc_collect() -- self.assertIsNone(f_wr()) -- - - class GeneratorThrowTest(unittest.TestCase): - -@@ -2161,6 +2145,16 @@ - ... - SyntaxError: 'yield' outside function - -+>>> f=lambda: (yield from (1,2)), (yield from (3,4)) -+Traceback (most recent call last): -+ ... -+SyntaxError: 'yield from' outside function -+ -+>>> yield from [1,2] -+Traceback (most recent call last): -+ ... -+SyntaxError: 'yield from' outside function -+ - >>> def f(): x = yield = y - Traceback (most recent call last): - ... diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py index bdfc5bfe260..515486c76cd 100644 --- a/Lib/test/test_genericpath.py @@ -27647,62 +2436,6 @@ index bdfc5bfe260..515486c76cd 100644 name = os_helper.TESTFN_UNDECODABLE elif os_helper.TESTFN_NONASCII: name = os_helper.TESTFN_NONASCII -diff --git a/Lib/test/test_http_cookies.py b/Lib/test/test_http_cookies.py -index 925c8697f60..8879902a6e2 100644 ---- a/Lib/test/test_http_cookies.py -+++ b/Lib/test/test_http_cookies.py -@@ -5,6 +5,7 @@ - import doctest - from http import cookies - import pickle -+from test import support - - - class CookieTests(unittest.TestCase): -@@ -58,6 +59,43 @@ - for k, v in sorted(case['dict'].items()): - self.assertEqual(C[k].value, v) - -+ def test_unquote(self): -+ cases = [ -+ (r'a="b=\""', 'b="'), -+ (r'a="b=\\"', 'b=\\'), -+ (r'a="b=\="', 'b=='), -+ (r'a="b=\n"', 'b=n'), -+ (r'a="b=\042"', 'b="'), -+ (r'a="b=\134"', 'b=\\'), -+ (r'a="b=\377"', 'b=\xff'), -+ (r'a="b=\400"', 'b=400'), -+ (r'a="b=\42"', 'b=42'), -+ (r'a="b=\\042"', 'b=\\042'), -+ (r'a="b=\\134"', 'b=\\134'), -+ (r'a="b=\\\""', 'b=\\"'), -+ (r'a="b=\\\042"', 'b=\\"'), -+ (r'a="b=\134\""', 'b=\\"'), -+ (r'a="b=\134\042"', 'b=\\"'), -+ ] -+ for encoded, decoded in cases: -+ with self.subTest(encoded): -+ C = cookies.SimpleCookie() -+ C.load(encoded) -+ self.assertEqual(C['a'].value, decoded) -+ -+ @support.requires_resource('cpu') -+ def test_unquote_large(self): -+ n = 10**6 -+ for encoded in r'\\', r'\134': -+ with self.subTest(encoded): -+ data = 'a="b=' + encoded*n + ';"' -+ C = cookies.SimpleCookie() -+ C.load(data) -+ value = C['a'].value -+ self.assertEqual(value[:3], 'b=\\') -+ self.assertEqual(value[-2:], '\\;') -+ self.assertEqual(len(value), n + 3) -+ - def test_load(self): - C = cookies.SimpleCookie() - C.load('Customer="WILE_E_COYOTE"; Version=1; Path=/acme') diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py index 88d06fe04fb..1dc38dba3d3 100644 --- a/Lib/test/test_httpservers.py @@ -28178,67 +2911,6 @@ index 12f9e43d123..7e8c9c8184a 100644 def test_functionality(self): # Test basic functionality of stuff defined in an extension module. -diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py -index 73e5da2ba92..9339d68f33e 100644 ---- a/Lib/test/test_importlib/fixtures.py -+++ b/Lib/test/test_importlib/fixtures.py -@@ -245,6 +245,44 @@ - build_files(EggInfoPkgPipInstalledNoToplevel.files, prefix=self.site_dir) - - -+class EggInfoPkgPipInstalledExternalDataFiles(OnSysPath, SiteDir): -+ files: FilesSpec = { -+ "egg_with_module_pkg.egg-info": { -+ "PKG-INFO": "Name: egg_with_module-pkg", -+ # SOURCES.txt is made from the source archive, and contains files -+ # (setup.py) that are not present after installation. -+ "SOURCES.txt": """ -+ egg_with_module.py -+ setup.py -+ egg_with_module.json -+ egg_with_module_pkg.egg-info/PKG-INFO -+ egg_with_module_pkg.egg-info/SOURCES.txt -+ egg_with_module_pkg.egg-info/top_level.txt -+ """, -+ # installed-files.txt is written by pip, and is a strictly more -+ # accurate source than SOURCES.txt as to the installed contents of -+ # the package. -+ "installed-files.txt": """ -+ ../../../etc/jupyter/jupyter_notebook_config.d/relative.json -+ /etc/jupyter/jupyter_notebook_config.d/absolute.json -+ ../egg_with_module.py -+ PKG-INFO -+ SOURCES.txt -+ top_level.txt -+ """, -+ # missing top_level.txt (to trigger fallback to installed-files.txt) -+ }, -+ "egg_with_module.py": """ -+ def main(): -+ print("hello world") -+ """, -+ } -+ -+ def setUp(self): -+ super().setUp() -+ build_files(EggInfoPkgPipInstalledExternalDataFiles.files, prefix=self.site_dir) -+ -+ - class EggInfoPkgPipInstalledNoModules(OnSysPath, SiteDir): - files: FilesSpec = { - "egg_with_no_modules_pkg.egg-info": { -diff --git a/Lib/test/test_importlib/test_metadata_api.py b/Lib/test/test_importlib/test_metadata_api.py -index 33c6e85ee94..29b261baba4 100644 ---- a/Lib/test/test_importlib/test_metadata_api.py -+++ b/Lib/test/test_importlib/test_metadata_api.py -@@ -29,6 +29,7 @@ - fixtures.EggInfoPkg, - fixtures.EggInfoPkgPipInstalledNoToplevel, - fixtures.EggInfoPkgPipInstalledNoModules, -+ fixtures.EggInfoPkgPipInstalledExternalDataFiles, - fixtures.EggInfoPkgSourcesFallback, - fixtures.DistInfoPkg, - fixtures.DistInfoPkgWithDot, diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py index e018af2e16b..743e947e564 100644 --- a/Lib/test/test_importlib/test_util.py @@ -28290,299 +2962,6 @@ index a900cc1dddf..89272484009 100644 filename = EXTENSIONS.name + ext file_path = os.path.join(path, filename) if os.path.exists(file_path): ---- /dev/null -+++ b/Lib/test/test_inspect/inspect_stringized_annotations_pep695.py -@@ -0,0 +1,87 @@ -+from __future__ import annotations -+from typing import Callable, Unpack -+ -+ -+class A[T, *Ts, **P]: -+ x: T -+ y: tuple[*Ts] -+ z: Callable[P, str] -+ -+ -+class B[T, *Ts, **P]: -+ T = int -+ Ts = str -+ P = bytes -+ x: T -+ y: Ts -+ z: P -+ -+ -+Eggs = int -+Spam = str -+ -+ -+class C[Eggs, **Spam]: -+ x: Eggs -+ y: Spam -+ -+ -+def generic_function[T, *Ts, **P]( -+ x: T, *y: Unpack[Ts], z: P.args, zz: P.kwargs -+) -> None: ... -+ -+ -+def generic_function_2[Eggs, **Spam](x: Eggs, y: Spam): pass -+ -+ -+class D: -+ Foo = int -+ Bar = str -+ -+ def generic_method[Foo, **Bar]( -+ self, x: Foo, y: Bar -+ ) -> None: ... -+ -+ def generic_method_2[Eggs, **Spam](self, x: Eggs, y: Spam): pass -+ -+ -+# Eggs is `int` in globals, a TypeVar in type_params, and `str` in locals: -+class E[Eggs]: -+ Eggs = str -+ x: Eggs -+ -+ -+ -+def nested(): -+ from types import SimpleNamespace -+ from inspect import get_annotations -+ -+ Eggs = bytes -+ Spam = memoryview -+ -+ -+ class F[Eggs, **Spam]: -+ x: Eggs -+ y: Spam -+ -+ def generic_method[Eggs, **Spam](self, x: Eggs, y: Spam): pass -+ -+ -+ def generic_function[Eggs, **Spam](x: Eggs, y: Spam): pass -+ -+ -+ # Eggs is `int` in globals, `bytes` in the function scope, -+ # a TypeVar in the type_params, and `str` in locals: -+ class G[Eggs]: -+ Eggs = str -+ x: Eggs -+ -+ -+ return SimpleNamespace( -+ F=F, -+ F_annotations=get_annotations(F, eval_str=True), -+ F_meth_annotations=get_annotations(F.generic_method, eval_str=True), -+ G_annotations=get_annotations(G, eval_str=True), -+ generic_func=generic_function, -+ generic_func_annotations=get_annotations(generic_function, eval_str=True) -+ ) -diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py -index 4a6d2b3e366..36978e8217c 100644 ---- a/Lib/test/test_inspect/test_inspect.py -+++ b/Lib/test/test_inspect/test_inspect.py -@@ -17,6 +17,7 @@ - import sys - import types - import textwrap -+from typing import Unpack - import unicodedata - import unittest - import unittest.mock -@@ -40,6 +41,7 @@ - from test.test_inspect import inspect_stock_annotations - from test.test_inspect import inspect_stringized_annotations - from test.test_inspect import inspect_stringized_annotations_2 -+from test.test_inspect import inspect_stringized_annotations_pep695 - - - # Functions tested in this suite: -@@ -199,6 +201,7 @@ - inspect.iscoroutinefunction( - functools.partial(functools.partial( - gen_coroutine_function_example)))) -+ self.assertFalse(inspect.iscoroutinefunction(inspect)) - self.assertFalse(inspect.iscoroutine(gen_coro)) - - self.assertTrue( -@@ -1361,6 +1364,56 @@ - self.assertIn(('f', b.f), inspect.getmembers(b)) - self.assertIn(('f', b.f), inspect.getmembers(b, inspect.ismethod)) - -+ def test_getmembers_custom_dir(self): -+ class CorrectDir: -+ def __init__(self, attr): -+ self.attr = attr -+ def method(self): -+ return self.attr + 1 -+ def __dir__(self): -+ return ['attr', 'method'] -+ -+ cd = CorrectDir(5) -+ self.assertEqual(inspect.getmembers(cd), [ -+ ('attr', 5), -+ ('method', cd.method), -+ ]) -+ self.assertEqual(inspect.getmembers(cd, inspect.ismethod), [ -+ ('method', cd.method), -+ ]) -+ -+ def test_getmembers_custom_broken_dir(self): -+ # inspect.getmembers calls `dir()` on the passed object inside. -+ # if `__dir__` mentions some non-existent attribute, -+ # we still need to return others correctly. -+ class BrokenDir: -+ existing = 1 -+ def method(self): -+ return self.existing + 1 -+ def __dir__(self): -+ return ['method', 'missing', 'existing'] -+ -+ bd = BrokenDir() -+ self.assertEqual(inspect.getmembers(bd), [ -+ ('existing', 1), -+ ('method', bd.method), -+ ]) -+ self.assertEqual(inspect.getmembers(bd, inspect.ismethod), [ -+ ('method', bd.method), -+ ]) -+ -+ def test_getmembers_custom_duplicated_dir(self): -+ # Duplicates in `__dir__` must not fail and return just one result. -+ class DuplicatedDir: -+ attr = 1 -+ def __dir__(self): -+ return ['attr', 'attr'] -+ -+ dd = DuplicatedDir() -+ self.assertEqual(inspect.getmembers(dd), [ -+ ('attr', 1), -+ ]) -+ - def test_getmembers_VirtualAttribute(self): - class M(type): - def __getattr__(cls, name): -@@ -1504,6 +1557,117 @@ - self.assertEqual(inspect.get_annotations(isa.MyClassWithLocalAnnotations), {'x': 'mytype'}) - self.assertEqual(inspect.get_annotations(isa.MyClassWithLocalAnnotations, eval_str=True), {'x': int}) - -+ def test_pep695_generic_class_with_future_annotations(self): -+ ann_module695 = inspect_stringized_annotations_pep695 -+ A_annotations = inspect.get_annotations(ann_module695.A, eval_str=True) -+ A_type_params = ann_module695.A.__type_params__ -+ self.assertIs(A_annotations["x"], A_type_params[0]) -+ self.assertEqual(A_annotations["y"].__args__[0], Unpack[A_type_params[1]]) -+ self.assertIs(A_annotations["z"].__args__[0], A_type_params[2]) -+ -+ def test_pep695_generic_class_with_future_annotations_and_local_shadowing(self): -+ B_annotations = inspect.get_annotations( -+ inspect_stringized_annotations_pep695.B, eval_str=True -+ ) -+ self.assertEqual(B_annotations, {"x": int, "y": str, "z": bytes}) -+ -+ def test_pep695_generic_class_with_future_annotations_name_clash_with_global_vars(self): -+ ann_module695 = inspect_stringized_annotations_pep695 -+ C_annotations = inspect.get_annotations(ann_module695.C, eval_str=True) -+ self.assertEqual( -+ set(C_annotations.values()), -+ set(ann_module695.C.__type_params__) -+ ) -+ -+ def test_pep_695_generic_function_with_future_annotations(self): -+ ann_module695 = inspect_stringized_annotations_pep695 -+ generic_func_annotations = inspect.get_annotations( -+ ann_module695.generic_function, eval_str=True -+ ) -+ func_t_params = ann_module695.generic_function.__type_params__ -+ self.assertEqual( -+ generic_func_annotations.keys(), {"x", "y", "z", "zz", "return"} -+ ) -+ self.assertIs(generic_func_annotations["x"], func_t_params[0]) -+ self.assertEqual(generic_func_annotations["y"], Unpack[func_t_params[1]]) -+ self.assertIs(generic_func_annotations["z"].__origin__, func_t_params[2]) -+ self.assertIs(generic_func_annotations["zz"].__origin__, func_t_params[2]) -+ -+ def test_pep_695_generic_function_with_future_annotations_name_clash_with_global_vars(self): -+ self.assertEqual( -+ set( -+ inspect.get_annotations( -+ inspect_stringized_annotations_pep695.generic_function_2, -+ eval_str=True -+ ).values() -+ ), -+ set( -+ inspect_stringized_annotations_pep695.generic_function_2.__type_params__ -+ ) -+ ) -+ -+ def test_pep_695_generic_method_with_future_annotations(self): -+ ann_module695 = inspect_stringized_annotations_pep695 -+ generic_method_annotations = inspect.get_annotations( -+ ann_module695.D.generic_method, eval_str=True -+ ) -+ params = { -+ param.__name__: param -+ for param in ann_module695.D.generic_method.__type_params__ -+ } -+ self.assertEqual( -+ generic_method_annotations, -+ {"x": params["Foo"], "y": params["Bar"], "return": None} -+ ) -+ -+ def test_pep_695_generic_method_with_future_annotations_name_clash_with_global_vars(self): -+ self.assertEqual( -+ set( -+ inspect.get_annotations( -+ inspect_stringized_annotations_pep695.D.generic_method_2, -+ eval_str=True -+ ).values() -+ ), -+ set( -+ inspect_stringized_annotations_pep695.D.generic_method_2.__type_params__ -+ ) -+ ) -+ -+ def test_pep_695_generic_method_with_future_annotations_name_clash_with_global_and_local_vars(self): -+ self.assertEqual( -+ inspect.get_annotations( -+ inspect_stringized_annotations_pep695.E, eval_str=True -+ ), -+ {"x": str}, -+ ) -+ -+ def test_pep_695_generics_with_future_annotations_nested_in_function(self): -+ results = inspect_stringized_annotations_pep695.nested() -+ -+ self.assertEqual( -+ set(results.F_annotations.values()), -+ set(results.F.__type_params__) -+ ) -+ self.assertEqual( -+ set(results.F_meth_annotations.values()), -+ set(results.F.generic_method.__type_params__) -+ ) -+ self.assertNotEqual( -+ set(results.F_meth_annotations.values()), -+ set(results.F.__type_params__) -+ ) -+ self.assertEqual( -+ set(results.F_meth_annotations.values()).intersection(results.F.__type_params__), -+ set() -+ ) -+ -+ self.assertEqual(results.G_annotations, {"x": str}) -+ -+ self.assertEqual( -+ set(results.generic_func_annotations.values()), -+ set(results.generic_func.__type_params__) -+ ) -+ - - class TestFormatAnnotation(unittest.TestCase): - def test_typing_replacement(self): diff --git a/Lib/test/test_interpreters.py b/Lib/test/test_interpreters.py index 0cd9e721b20..ffa58230425 100644 --- a/Lib/test/test_interpreters.py @@ -28596,7 +2975,7 @@ index 0cd9e721b20..ffa58230425 100644 import subprocess argv = [sys.executable, '-c', '''if True: diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py -index daa40a6ba36..2117c1f18de 100644 +index 8b68653779e..2117c1f18de 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -39,11 +39,9 @@ @@ -28629,137 +3008,6 @@ index daa40a6ba36..2117c1f18de 100644 support.requires( 'largefile', 'test requires %s bytes and a long time to run' % self.LARGE) -@@ -4066,6 +4064,28 @@ - t.write("x"*chunk_size) - self.assertEqual([b"abcdef", b"ghi", b"x"*chunk_size], buf._write_stack) - -+ def test_issue119506(self): -+ chunk_size = 8192 -+ -+ class MockIO(self.MockRawIO): -+ written = False -+ def write(self, data): -+ if not self.written: -+ self.written = True -+ t.write("middle") -+ return super().write(data) -+ -+ buf = MockIO() -+ t = self.TextIOWrapper(buf) -+ t.write("abc") -+ t.write("def") -+ # writing data which size >= chunk_size cause flushing buffer before write. -+ t.write("g" * chunk_size) -+ t.flush() -+ -+ self.assertEqual([b"abcdef", b"middle", b"g"*chunk_size], -+ buf._write_stack) -+ - - class PyTextIOWrapperTest(TextIOWrapperTest): - io = pyio -diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py -index 9606d5beab7..1b9f3cf7624 100644 ---- a/Lib/test/test_iter.py -+++ b/Lib/test/test_iter.py -@@ -5,11 +5,13 @@ - from test.support import cpython_only - from test.support.os_helper import TESTFN, unlink - from test.support import check_free_after_iterating, ALWAYS_EQ, NEVER_EQ -+from test.support import BrokenIter - import pickle - import collections.abc - import functools - import contextlib - import builtins -+import traceback - - # Test result of triple loop (too big to inline) - TRIPLETS = [(0, 0, 0), (0, 0, 1), (0, 0, 2), -@@ -1143,6 +1145,46 @@ - self.assertRaises(TypeError, iter, typ()) - self.assertRaises(ZeroDivisionError, iter, BadIterableClass()) - -+ def test_exception_locations(self): -+ # The location of an exception raised from __init__ or -+ # __next__ should should be the iterator expression -+ -+ def init_raises(): -+ try: -+ for x in BrokenIter(init_raises=True): -+ pass -+ except Exception as e: -+ return e -+ -+ def next_raises(): -+ try: -+ for x in BrokenIter(next_raises=True): -+ pass -+ except Exception as e: -+ return e -+ -+ def iter_raises(): -+ try: -+ for x in BrokenIter(iter_raises=True): -+ pass -+ except Exception as e: -+ return e -+ -+ for func, expected in [(init_raises, "BrokenIter(init_raises=True)"), -+ (next_raises, "BrokenIter(next_raises=True)"), -+ (iter_raises, "BrokenIter(iter_raises=True)"), -+ ]: -+ with self.subTest(func): -+ exc = func() -+ f = traceback.extract_tb(exc.__traceback__)[0] -+ indent = 16 -+ co = func.__code__ -+ self.assertEqual(f.lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.end_lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.line[f.colno - indent : f.end_colno - indent], -+ expected) -+ -+ - - if __name__ == "__main__": - unittest.main() -diff --git a/Lib/test/test_largefile.py b/Lib/test/test_largefile.py -index 3b0930fe69e..282400c6221 100644 ---- a/Lib/test/test_largefile.py -+++ b/Lib/test/test_largefile.py -@@ -142,6 +142,9 @@ - f.truncate(1) - self.assertEqual(f.tell(), 0) # else pointer moved - f.seek(0) -+ # Verify readall on a truncated file is well behaved. read() -+ # without a size can be unbounded, this should get just the byte -+ # that remains. - self.assertEqual(len(f.read()), 1) # else wasn't truncated - - def test_seekable(self): -@@ -152,6 +155,22 @@ - f.seek(pos) - self.assertTrue(f.seekable()) - -+ @bigmemtest(size=size, memuse=2, dry_run=False) -+ def test_seek_readall(self, _size): -+ # Seek which doesn't change position should readall successfully. -+ with self.open(TESTFN, 'rb') as f: -+ self.assertEqual(f.seek(0, os.SEEK_CUR), 0) -+ self.assertEqual(len(f.read()), size + 1) -+ -+ # Seek which changes (or might change) position should readall -+ # successfully. -+ with self.open(TESTFN, 'rb') as f: -+ self.assertEqual(f.seek(20, os.SEEK_SET), 20) -+ self.assertEqual(len(f.read()), size - 19) -+ -+ with self.open(TESTFN, 'rb') as f: -+ self.assertEqual(f.seek(-3, os.SEEK_END), size - 2) -+ self.assertEqual(len(f.read()), 3) - - def skip_no_disk_space(path, required): - def decorator(fun): diff --git a/Lib/test/test_lib2to3/test_parser.py b/Lib/test/test_lib2to3/test_parser.py index 2c798b181fd..e12ed1e9389 100644 --- a/Lib/test/test_lib2to3/test_parser.py @@ -28775,440 +3023,6 @@ index 2c798b181fd..e12ed1e9389 100644 def test_load_grammar_from_subprocess(self): tmpdir = tempfile.mkdtemp() tmpsubdir = os.path.join(tmpdir, 'subdir') -diff --git a/Lib/test/test_linecache.py b/Lib/test/test_linecache.py -index e42df3d9496..008f8c8fc17 100644 ---- a/Lib/test/test_linecache.py -+++ b/Lib/test/test_linecache.py -@@ -276,6 +276,37 @@ - self.assertEqual(linecache.getlines(filename, module_globals), - ['source for x.y.z\n']) - -+ def test_invalid_names(self): -+ for name, desc in [ -+ ('\x00', 'NUL bytes filename'), -+ (__file__ + '\x00', 'filename with embedded NUL bytes'), -+ # A filename with surrogate codes. A UnicodeEncodeError is raised -+ # by os.stat() upon querying, which is a subclass of ValueError. -+ ("\uD834\uDD1E.py", 'surrogate codes (MUSICAL SYMBOL G CLEF)'), -+ # For POSIX platforms, an OSError will be raised but for Windows -+ # platforms, a ValueError is raised due to the path_t converter. -+ # See: https://github.com/python/cpython/issues/122170 -+ ('a' * 1_000_000, 'very long filename'), -+ ]: -+ with self.subTest(f'updatecache: {desc}'): -+ linecache.clearcache() -+ lines = linecache.updatecache(name) -+ self.assertListEqual(lines, []) -+ self.assertNotIn(name, linecache.cache) -+ -+ # hack into the cache (it shouldn't be allowed -+ # but we never know what people do...) -+ for key, fullname in [(name, 'ok'), ('key', name), (name, name)]: -+ with self.subTest(f'checkcache: {desc}', -+ key=key, fullname=fullname): -+ linecache.clearcache() -+ linecache.cache[key] = (0, 1234, [], fullname) -+ linecache.checkcache(key) -+ self.assertNotIn(key, linecache.cache) -+ -+ # just to be sure that we did not mess with cache -+ linecache.clearcache() -+ - - class LineCacheInvalidationTests(unittest.TestCase): - def setUp(self): -diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py -index 2969c6e2f98..77264ed7e80 100644 ---- a/Lib/test/test_list.py -+++ b/Lib/test/test_list.py -@@ -229,6 +229,31 @@ - list4 = [1] - self.assertFalse(list3 == list4) - -+ def test_lt_operator_modifying_operand(self): -+ # See gh-120298 -+ class evil: -+ def __lt__(self, other): -+ other.clear() -+ return NotImplemented -+ -+ a = [[evil()]] -+ with self.assertRaises(TypeError): -+ a[0] < a -+ -+ def test_list_index_modifing_operand(self): -+ # See gh-120384 -+ class evil: -+ def __init__(self, lst): -+ self.lst = lst -+ def __iter__(self): -+ yield from self.lst -+ self.lst.clear() -+ -+ lst = list(range(5)) -+ operand = evil(lst) -+ with self.assertRaises(ValueError): -+ lst[::-1] = operand -+ - @cpython_only - def test_preallocation(self): - iterable = [0] * 10 -diff --git a/Lib/test/test_listcomps.py b/Lib/test/test_listcomps.py -index df1debf3521..2065afd455d 100644 ---- a/Lib/test/test_listcomps.py -+++ b/Lib/test/test_listcomps.py -@@ -1,8 +1,11 @@ - import doctest - import textwrap -+import traceback - import types - import unittest - -+from test.support import BrokenIter -+ - - doctests = """ - ########### Tests borrowed from or inspired by test_genexps.py ############ -@@ -168,6 +171,31 @@ - """ - self._check_in_scopes(code, raises=NameError) - -+ def test_references___class___defined(self): -+ code = """ -+ __class__ = 2 -+ res = [__class__ for x in [1]] -+ """ -+ self._check_in_scopes( -+ code, outputs={"res": [2]}, scopes=["module", "function"]) -+ self._check_in_scopes(code, raises=NameError, scopes=["class"]) -+ -+ def test_references___class___enclosing(self): -+ code = """ -+ __class__ = 2 -+ class C: -+ res = [__class__ for x in [1]] -+ res = C.res -+ """ -+ self._check_in_scopes(code, raises=NameError) -+ -+ def test_super_and_class_cell_in_sibling_comps(self): -+ code = """ -+ [super for _ in [1]] -+ [__class__ for _ in [1]] -+ """ -+ self._check_in_scopes(code, raises=NameError) -+ - def test_inner_cell_shadows_outer(self): - code = """ - items = [(lambda: i) for i in range(5)] -@@ -681,6 +709,42 @@ - self._check_in_scopes(code, {"x": 2, "y": [3]}, ns={"x": 3}, scopes=["class"]) - self._check_in_scopes(code, {"x": 2, "y": [2]}, ns={"x": 3}, scopes=["function", "module"]) - -+ def test_exception_locations(self): -+ # The location of an exception raised from __init__ or -+ # __next__ should should be the iterator expression -+ -+ def init_raises(): -+ try: -+ [x for x in BrokenIter(init_raises=True)] -+ except Exception as e: -+ return e -+ -+ def next_raises(): -+ try: -+ [x for x in BrokenIter(next_raises=True)] -+ except Exception as e: -+ return e -+ -+ def iter_raises(): -+ try: -+ [x for x in BrokenIter(iter_raises=True)] -+ except Exception as e: -+ return e -+ -+ for func, expected in [(init_raises, "BrokenIter(init_raises=True)"), -+ (next_raises, "BrokenIter(next_raises=True)"), -+ (iter_raises, "BrokenIter(iter_raises=True)"), -+ ]: -+ with self.subTest(func): -+ exc = func() -+ f = traceback.extract_tb(exc.__traceback__)[0] -+ indent = 16 -+ co = func.__code__ -+ self.assertEqual(f.lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.end_lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.line[f.colno - indent : f.end_colno - indent], -+ expected) -+ - __test__ = {'doctests' : doctests} - - def load_tests(loader, tests, pattern): -diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py -index 6dd1b6f8047..78bcd065ad5 100644 ---- a/Lib/test/test_logging.py -+++ b/Lib/test/test_logging.py -@@ -60,6 +60,7 @@ - import weakref - - from http.server import HTTPServer, BaseHTTPRequestHandler -+from unittest.mock import patch - from urllib.parse import urlparse, parse_qs - from socketserver import (ThreadingUDPServer, DatagramRequestHandler, - ThreadingTCPServer, StreamRequestHandler) -@@ -2381,6 +2382,26 @@ - class CustomQueue(queue.Queue): - pass - -+class CustomQueueProtocol: -+ def __init__(self, maxsize=0): -+ self.queue = queue.Queue(maxsize) -+ -+ def __getattr__(self, attribute): -+ queue = object.__getattribute__(self, 'queue') -+ return getattr(queue, attribute) -+ -+class CustomQueueFakeProtocol(CustomQueueProtocol): -+ # An object implementing the Queue API (incorrect signatures). -+ # The object will be considered a valid queue class since we -+ # do not check the signatures (only callability of methods) -+ # but will NOT be usable in production since a TypeError will -+ # be raised due to a missing argument. -+ def empty(self, x): -+ pass -+ -+class CustomQueueWrongProtocol(CustomQueueProtocol): -+ empty = None -+ - def queueMaker(): - return queue.Queue() - -@@ -3866,19 +3887,18 @@ - self.addCleanup(os.remove, fn) - - @threading_helper.requires_working_threading() -+ @support.requires_subprocess() - def test_config_queue_handler(self): -- q = CustomQueue() -- dq = { -- '()': __name__ + '.CustomQueue', -- 'maxsize': 10 -- } -+ qs = [CustomQueue(), CustomQueueProtocol()] -+ dqs = [{'()': f'{__name__}.{cls}', 'maxsize': 10} -+ for cls in ['CustomQueue', 'CustomQueueProtocol']] - dl = { - '()': __name__ + '.listenerMaker', - 'arg1': None, - 'arg2': None, - 'respect_handler_level': True - } -- qvalues = (None, __name__ + '.queueMaker', __name__ + '.CustomQueue', dq, q) -+ qvalues = (None, __name__ + '.queueMaker', __name__ + '.CustomQueue', *dqs, *qs) - lvalues = (None, __name__ + '.CustomListener', dl, CustomListener) - for qspec, lspec in itertools.product(qvalues, lvalues): - self.do_queuehandler_configuration(qspec, lspec) -@@ -3894,12 +3914,101 @@ - msg = str(ctx.exception) - self.assertEqual(msg, "Unable to configure handler 'ah'") - -+ @threading_helper.requires_working_threading() -+ @support.requires_subprocess() -+ @patch("multiprocessing.Manager") -+ def test_config_queue_handler_does_not_create_multiprocessing_manager(self, manager): -+ # gh-120868, gh-121723 -+ -+ from multiprocessing import Queue as MQ -+ -+ q1 = {"()": "queue.Queue", "maxsize": -1} -+ q2 = MQ() -+ q3 = queue.Queue() -+ # CustomQueueFakeProtocol passes the checks but will not be usable -+ # since the signatures are incompatible. Checking the Queue API -+ # without testing the type of the actual queue is a trade-off -+ # between usability and the work we need to do in order to safely -+ # check that the queue object correctly implements the API. -+ q4 = CustomQueueFakeProtocol() -+ -+ for qspec in (q1, q2, q3, q4): -+ self.apply_config( -+ { -+ "version": 1, -+ "handlers": { -+ "queue_listener": { -+ "class": "logging.handlers.QueueHandler", -+ "queue": qspec, -+ }, -+ }, -+ } -+ ) -+ manager.assert_not_called() -+ -+ @patch("multiprocessing.Manager") -+ def test_config_queue_handler_invalid_config_does_not_create_multiprocessing_manager(self, manager): -+ # gh-120868, gh-121723 -+ -+ for qspec in [object(), CustomQueueWrongProtocol()]: -+ with self.assertRaises(ValueError): -+ self.apply_config( -+ { -+ "version": 1, -+ "handlers": { -+ "queue_listener": { -+ "class": "logging.handlers.QueueHandler", -+ "queue": qspec, -+ }, -+ }, -+ } -+ ) -+ manager.assert_not_called() -+ -+ @skip_if_tsan_fork -+ @support.requires_subprocess() -+ @unittest.skipUnless(support.Py_DEBUG, "requires a debug build for testing" -+ "assertions in multiprocessing") -+ def test_config_queue_handler_multiprocessing_context(self): -+ # regression test for gh-121723 -+ if support.MS_WINDOWS: -+ start_methods = ['spawn'] -+ else: -+ start_methods = ['spawn', 'fork', 'forkserver'] -+ for start_method in start_methods: -+ with self.subTest(start_method=start_method): -+ ctx = multiprocessing.get_context(start_method) -+ with ctx.Manager() as manager: -+ q = manager.Queue() -+ records = [] -+ # use 1 process and 1 task per child to put 1 record -+ with ctx.Pool(1, initializer=self._mpinit_issue121723, -+ initargs=(q, "text"), maxtasksperchild=1): -+ records.append(q.get(timeout=60)) -+ self.assertTrue(q.empty()) -+ self.assertEqual(len(records), 1) -+ -+ @staticmethod -+ def _mpinit_issue121723(qspec, message_to_log): -+ # static method for pickling support -+ logging.config.dictConfig({ -+ 'version': 1, -+ 'disable_existing_loggers': True, -+ 'handlers': { -+ 'log_to_parent': { -+ 'class': 'logging.handlers.QueueHandler', -+ 'queue': qspec -+ } -+ }, -+ 'root': {'handlers': ['log_to_parent'], 'level': 'DEBUG'} -+ }) -+ # log a message (this creates a record put in the queue) -+ logging.getLogger().info(message_to_log) -+ -+ @support.requires_subprocess() - def test_multiprocessing_queues(self): - # See gh-119819 - -- # will skip test if it's not available -- import_helper.import_module('_multiprocessing') -- - cd = copy.deepcopy(self.config_queue_handler) - from multiprocessing import Queue as MQ, Manager as MM - q1 = MQ() # this can't be pickled -@@ -5956,13 +6065,28 @@ - self.assertEqual(fp.read().strip(), '1') - - class RotatingFileHandlerTest(BaseFileTest): -- @unittest.skipIf(support.is_wasi, "WASI does not have /dev/null.") - def test_should_not_rollover(self): -- # If maxbytes is zero rollover never occurs -+ # If file is empty rollover never occurs -+ rh = logging.handlers.RotatingFileHandler( -+ self.fn, encoding="utf-8", maxBytes=1) -+ self.assertFalse(rh.shouldRollover(None)) -+ rh.close() -+ -+ # If maxBytes is zero rollover never occurs -+ rh = logging.handlers.RotatingFileHandler( -+ self.fn, encoding="utf-8", maxBytes=0) -+ self.assertFalse(rh.shouldRollover(None)) -+ rh.close() -+ -+ with open(self.fn, 'wb') as f: -+ f.write(b'\n') - rh = logging.handlers.RotatingFileHandler( - self.fn, encoding="utf-8", maxBytes=0) - self.assertFalse(rh.shouldRollover(None)) - rh.close() -+ -+ @unittest.skipIf(support.is_wasi, "WASI does not have /dev/null.") -+ def test_should_not_rollover_non_file(self): - # bpo-45401 - test with special file - # We set maxBytes to 1 so that rollover would normally happen, except - # for the check for regular files -@@ -5972,18 +6096,47 @@ - rh.close() - - def test_should_rollover(self): -- rh = logging.handlers.RotatingFileHandler(self.fn, encoding="utf-8", maxBytes=1) -+ with open(self.fn, 'wb') as f: -+ f.write(b'\n') -+ rh = logging.handlers.RotatingFileHandler(self.fn, encoding="utf-8", maxBytes=2) - self.assertTrue(rh.shouldRollover(self.next_rec())) - rh.close() - - def test_file_created(self): - # checks that the file is created and assumes it was created - # by us -+ os.unlink(self.fn) - rh = logging.handlers.RotatingFileHandler(self.fn, encoding="utf-8") - rh.emit(self.next_rec()) - self.assertLogFile(self.fn) - rh.close() - -+ def test_max_bytes(self, delay=False): -+ kwargs = {'delay': delay} if delay else {} -+ os.unlink(self.fn) -+ rh = logging.handlers.RotatingFileHandler( -+ self.fn, encoding="utf-8", backupCount=2, maxBytes=100, **kwargs) -+ self.assertIs(os.path.exists(self.fn), not delay) -+ small = logging.makeLogRecord({'msg': 'a'}) -+ large = logging.makeLogRecord({'msg': 'b'*100}) -+ self.assertFalse(rh.shouldRollover(small)) -+ self.assertFalse(rh.shouldRollover(large)) -+ rh.emit(small) -+ self.assertLogFile(self.fn) -+ self.assertFalse(os.path.exists(self.fn + ".1")) -+ self.assertFalse(rh.shouldRollover(small)) -+ self.assertTrue(rh.shouldRollover(large)) -+ rh.emit(large) -+ self.assertTrue(os.path.exists(self.fn)) -+ self.assertLogFile(self.fn + ".1") -+ self.assertFalse(os.path.exists(self.fn + ".2")) -+ self.assertTrue(rh.shouldRollover(small)) -+ self.assertTrue(rh.shouldRollover(large)) -+ rh.close() -+ -+ def test_max_bytes_delay(self): -+ self.test_max_bytes(delay=True) -+ - def test_rollover_filenames(self): - def namer(name): - return name + ".test" -@@ -5992,11 +6145,15 @@ - rh.namer = namer - rh.emit(self.next_rec()) - self.assertLogFile(self.fn) -+ self.assertFalse(os.path.exists(namer(self.fn + ".1"))) - rh.emit(self.next_rec()) - self.assertLogFile(namer(self.fn + ".1")) -+ self.assertFalse(os.path.exists(namer(self.fn + ".2"))) - rh.emit(self.next_rec()) - self.assertLogFile(namer(self.fn + ".2")) - self.assertFalse(os.path.exists(namer(self.fn + ".3"))) -+ rh.emit(self.next_rec()) -+ self.assertFalse(os.path.exists(namer(self.fn + ".3"))) - rh.close() - - def test_namer_rotator_inheritance(self): diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py index 3d9d6d5d0ac..9c759170450 100644 --- a/Lib/test/test_marshal.py @@ -29250,32 +3064,10 @@ index 1867e8c957f..f75e40940e4 100644 'test requires %s bytes and a long time to run' % str(0x180000000)) f = open(TESTFN, 'w+b') diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py -index 88037059e96..ee18f99c476 100644 +index 7a04e5ad500..ee18f99c476 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py -@@ -824,7 +824,7 @@ - return (ns * 1e-9) + 0.5e-9 - - def test_utime_by_indexed(self): -- # pass times as floating point seconds as the second indexed parameter -+ # pass times as floating-point seconds as the second indexed parameter - def set_time(filename, ns): - atime_ns, mtime_ns = ns - atime = self.ns_to_sec(atime_ns) -@@ -1823,9 +1823,10 @@ - os.mkdir(path, mode=0o700) - out = subprocess.check_output(["cacls.exe", path, "/s"], encoding="oem") - os.rmdir(path) -+ out = out.strip().rsplit(" ", 1)[1] - self.assertEqual( -- out.strip(), -- f'{path} "D:P(A;OICI;FA;;;SY)(A;OICI;FA;;;BA)(A;OICI;FA;;;OW)"', -+ out, -+ '"D:P(A;OICI;FA;;;SY)(A;OICI;FA;;;BA)(A;OICI;FA;;;OW)"', - ) - - def tearDown(self): -@@ -2371,6 +2372,7 @@ +@@ -2372,6 +2372,7 @@ support.is_emscripten or support.is_wasi, "musl libc issue on Emscripten/WASI, bpo-46390" ) @@ -29283,7 +3075,7 @@ index 88037059e96..ee18f99c476 100644 def test_fpathconf(self): self.check(os.pathconf, "PC_NAME_MAX") self.check(os.fpathconf, "PC_NAME_MAX") -@@ -3944,6 +3946,7 @@ +@@ -3945,6 +3946,7 @@ self.assertGreaterEqual(size.columns, 0) self.assertGreaterEqual(size.lines, 0) @@ -29291,510 +3083,6 @@ index 88037059e96..ee18f99c476 100644 def test_stty_match(self): """Check if stty returns the same results -diff --git a/Lib/test/test_patma.py b/Lib/test/test_patma.py -index 3dbd19dfffd..6fe5360b5f2 100644 ---- a/Lib/test/test_patma.py -+++ b/Lib/test/test_patma.py -@@ -1,6 +1,7 @@ - import array - import collections - import dataclasses -+import dis - import enum - import inspect - import sys -@@ -3083,6 +3084,24 @@ - self.assertIs(y, None) - self.assertIs(z, None) - -+class TestSourceLocations(unittest.TestCase): -+ def test_jump_threading(self): -+ # See gh-123048 -+ def f(): -+ x = 0 -+ v = 1 -+ match v: -+ case 1: -+ if x < 0: -+ x = 1 -+ case 2: -+ if x < 0: -+ x = 1 -+ x += 1 -+ -+ for inst in dis.get_instructions(f): -+ if inst.opcode in dis.hasjrel or inst.opcode in dis.hasjabs: -+ self.assertIsNotNone(inst.positions.lineno, "jump without location") - - class TestTracing(unittest.TestCase): - -diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py -index 24324a37804..8a7e41b2811 100644 ---- a/Lib/test/test_pdb.py -+++ b/Lib/test/test_pdb.py -@@ -446,6 +446,38 @@ - (Pdb) continue - """ - -+def test_pdb_empty_line(): -+ """Test that empty line repeats the last command. -+ -+ >>> def test_function(): -+ ... x = 1 -+ ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() -+ ... pass -+ ... y = 2 -+ -+ >>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE -+ ... 'p x', -+ ... '', # Should repeat p x -+ ... 'n ;; p 0 ;; p x', # Fill cmdqueue with multiple commands -+ ... '', # Should still repeat p x -+ ... 'continue', -+ ... ]): -+ ... test_function() -+ > (4)test_function() -+ -> pass -+ (Pdb) p x -+ 1 -+ (Pdb) -+ 1 -+ (Pdb) n ;; p 0 ;; p x -+ 0 -+ 1 -+ > (5)test_function() -+ -> y = 2 -+ (Pdb) -+ 1 -+ (Pdb) continue -+ """ - - def do_nothing(): - pass -diff --git a/Lib/test/test_pickle.py b/Lib/test/test_pickle.py -index 1a55da39bdc..32fb3590ec6 100644 ---- a/Lib/test/test_pickle.py -+++ b/Lib/test/test_pickle.py -@@ -16,6 +16,7 @@ - - from test.pickletester import AbstractHookTests - from test.pickletester import AbstractUnpickleTests -+from test.pickletester import AbstractPicklingErrorTests - from test.pickletester import AbstractPickleTests - from test.pickletester import AbstractPickleModuleTests - from test.pickletester import AbstractPersistentPicklerTests -@@ -55,6 +56,18 @@ - return u.load() - - -+class PyPicklingErrorTests(AbstractPicklingErrorTests, unittest.TestCase): -+ -+ pickler = pickle._Pickler -+ -+ def dumps(self, arg, proto=None, **kwargs): -+ f = io.BytesIO() -+ p = self.pickler(f, proto, **kwargs) -+ p.dump(arg) -+ f.seek(0) -+ return bytes(f.read()) -+ -+ - class PyPicklerTests(AbstractPickleTests, unittest.TestCase): - - pickler = pickle._Pickler -@@ -88,6 +101,8 @@ - return pickle.loads(buf, **kwds) - - test_framed_write_sizes_with_delayed_writer = None -+ test_find_class = None -+ test_custom_find_class = None - - - class PersistentPicklerUnpicklerMixin(object): -@@ -245,6 +260,9 @@ - bad_stack_errors = (pickle.UnpicklingError,) - truncated_errors = (pickle.UnpicklingError,) - -+ class CPicklingErrorTests(PyPicklingErrorTests): -+ pickler = _pickle.Pickler -+ - class CPicklerTests(PyPicklerTests): - pickler = _pickle.Pickler - unpickler = _pickle.Unpickler -diff --git a/Lib/test/test_pickletools.py b/Lib/test/test_pickletools.py -index d37af79e878..8cb1f6dffcc 100644 ---- a/Lib/test/test_pickletools.py -+++ b/Lib/test/test_pickletools.py -@@ -1,3 +1,4 @@ -+import io - import pickle - import pickletools - from test import support -@@ -62,6 +63,315 @@ - self.assertNotIn(pickle.BINPUT, pickled2) - - -+class SimpleReader: -+ def __init__(self, data): -+ self.data = data -+ self.pos = 0 -+ -+ def read(self, n): -+ data = self.data[self.pos: self.pos + n] -+ self.pos += n -+ return data -+ -+ def readline(self): -+ nl = self.data.find(b'\n', self.pos) + 1 -+ if not nl: -+ nl = len(self.data) -+ data = self.data[self.pos: nl] -+ self.pos = nl -+ return data -+ -+ -+class GenopsTests(unittest.TestCase): -+ def test_genops(self): -+ it = pickletools.genops(b'(I123\nK\x12J\x12\x34\x56\x78t.') -+ self.assertEqual([(item[0].name,) + item[1:] for item in it], [ -+ ('MARK', None, 0), -+ ('INT', 123, 1), -+ ('BININT1', 0x12, 6), -+ ('BININT', 0x78563412, 8), -+ ('TUPLE', None, 13), -+ ('STOP', None, 14), -+ ]) -+ -+ def test_from_file(self): -+ f = io.BytesIO(b'prefix(I123\nK\x12J\x12\x34\x56\x78t.suffix') -+ self.assertEqual(f.read(6), b'prefix') -+ it = pickletools.genops(f) -+ self.assertEqual([(item[0].name,) + item[1:] for item in it], [ -+ ('MARK', None, 6), -+ ('INT', 123, 7), -+ ('BININT1', 0x12, 12), -+ ('BININT', 0x78563412, 14), -+ ('TUPLE', None, 19), -+ ('STOP', None, 20), -+ ]) -+ self.assertEqual(f.read(), b'suffix') -+ -+ def test_without_pos(self): -+ f = SimpleReader(b'(I123\nK\x12J\x12\x34\x56\x78t.') -+ it = pickletools.genops(f) -+ self.assertEqual([(item[0].name,) + item[1:] for item in it], [ -+ ('MARK', None, None), -+ ('INT', 123, None), -+ ('BININT1', 0x12, None), -+ ('BININT', 0x78563412, None), -+ ('TUPLE', None, None), -+ ('STOP', None, None), -+ ]) -+ -+ def test_no_stop(self): -+ it = pickletools.genops(b'N') -+ item = next(it) -+ self.assertEqual(item[0].name, 'NONE') -+ with self.assertRaisesRegex(ValueError, -+ 'pickle exhausted before seeing STOP'): -+ next(it) -+ -+ def test_truncated_data(self): -+ it = pickletools.genops(b'I123') -+ with self.assertRaisesRegex(ValueError, -+ 'no newline found when trying to read stringnl'): -+ next(it) -+ it = pickletools.genops(b'J\x12\x34') -+ with self.assertRaisesRegex(ValueError, -+ 'not enough data in stream to read int4'): -+ next(it) -+ -+ def test_unknown_opcode(self): -+ it = pickletools.genops(b'N\xff') -+ item = next(it) -+ self.assertEqual(item[0].name, 'NONE') -+ with self.assertRaisesRegex(ValueError, -+ r"at position 1, opcode b'\\xff' unknown"): -+ next(it) -+ -+ def test_unknown_opcode_without_pos(self): -+ f = SimpleReader(b'N\xff') -+ it = pickletools.genops(f) -+ item = next(it) -+ self.assertEqual(item[0].name, 'NONE') -+ with self.assertRaisesRegex(ValueError, -+ r"at position , opcode b'\\xff' unknown"): -+ next(it) -+ -+ -+class DisTests(unittest.TestCase): -+ maxDiff = None -+ -+ def check_dis(self, data, expected, **kwargs): -+ out = io.StringIO() -+ pickletools.dis(data, out=out, **kwargs) -+ self.assertEqual(out.getvalue(), expected) -+ -+ def check_dis_error(self, data, expected, expected_error, **kwargs): -+ out = io.StringIO() -+ with self.assertRaisesRegex(ValueError, expected_error): -+ pickletools.dis(data, out=out, **kwargs) -+ self.assertEqual(out.getvalue(), expected) -+ -+ def test_mark(self): -+ self.check_dis(b'(N(tl.', '''\ -+ 0: ( MARK -+ 1: N NONE -+ 2: ( MARK -+ 3: t TUPLE (MARK at 2) -+ 4: l LIST (MARK at 0) -+ 5: . STOP -+highest protocol among opcodes = 0 -+''') -+ -+ def test_indentlevel(self): -+ self.check_dis(b'(N(tl.', '''\ -+ 0: ( MARK -+ 1: N NONE -+ 2: ( MARK -+ 3: t TUPLE (MARK at 2) -+ 4: l LIST (MARK at 0) -+ 5: . STOP -+highest protocol among opcodes = 0 -+''', indentlevel=2) -+ -+ def test_mark_without_pos(self): -+ self.check_dis(SimpleReader(b'(N(tl.'), '''\ -+( MARK -+N NONE -+( MARK -+t TUPLE (MARK at unknown opcode offset) -+l LIST (MARK at unknown opcode offset) -+. STOP -+highest protocol among opcodes = 0 -+''') -+ -+ def test_no_mark(self): -+ self.check_dis_error(b'Nt.', '''\ -+ 0: N NONE -+ 1: t TUPLE no MARK exists on stack -+''', 'no MARK exists on stack') -+ -+ def test_put(self): -+ self.check_dis(b'Np0\nq\x01r\x02\x00\x00\x00\x94.', '''\ -+ 0: N NONE -+ 1: p PUT 0 -+ 4: q BINPUT 1 -+ 6: r LONG_BINPUT 2 -+ 11: \\x94 MEMOIZE (as 3) -+ 12: . STOP -+highest protocol among opcodes = 4 -+''') -+ -+ def test_put_redefined(self): -+ self.check_dis_error(b'Np1\np1\n.', '''\ -+ 0: N NONE -+ 1: p PUT 1 -+ 4: p PUT 1 -+''', 'memo key 1 already defined') -+ self.check_dis_error(b'Np1\nq\x01.', '''\ -+ 0: N NONE -+ 1: p PUT 1 -+ 4: q BINPUT 1 -+''', 'memo key 1 already defined') -+ self.check_dis_error(b'Np1\nr\x01\x00\x00\x00.', '''\ -+ 0: N NONE -+ 1: p PUT 1 -+ 4: r LONG_BINPUT 1 -+''', 'memo key 1 already defined') -+ self.check_dis_error(b'Np1\n\x94.', '''\ -+ 0: N NONE -+ 1: p PUT 1 -+ 4: \\x94 MEMOIZE (as 1) -+''', 'memo key None already defined') -+ -+ def test_put_empty_stack(self): -+ self.check_dis_error(b'p0\n', '''\ -+ 0: p PUT 0 -+''', "stack is empty -- can't store into memo") -+ -+ def test_put_markobject(self): -+ self.check_dis_error(b'(p0\n', '''\ -+ 0: ( MARK -+ 1: p PUT 0 -+''', "can't store markobject in the memo") -+ -+ def test_get(self): -+ self.check_dis(b'(Np1\ng1\nh\x01j\x01\x00\x00\x00t.', '''\ -+ 0: ( MARK -+ 1: N NONE -+ 2: p PUT 1 -+ 5: g GET 1 -+ 8: h BINGET 1 -+ 10: j LONG_BINGET 1 -+ 15: t TUPLE (MARK at 0) -+ 16: . STOP -+highest protocol among opcodes = 1 -+''') -+ -+ def test_get_without_put(self): -+ self.check_dis_error(b'g1\n.', '''\ -+ 0: g GET 1 -+''', 'memo key 1 has never been stored into') -+ self.check_dis_error(b'h\x01.', '''\ -+ 0: h BINGET 1 -+''', 'memo key 1 has never been stored into') -+ self.check_dis_error(b'j\x01\x00\x00\x00.', '''\ -+ 0: j LONG_BINGET 1 -+''', 'memo key 1 has never been stored into') -+ -+ def test_memo(self): -+ memo = {} -+ self.check_dis(b'Np1\n.', '''\ -+ 0: N NONE -+ 1: p PUT 1 -+ 4: . STOP -+highest protocol among opcodes = 0 -+''', memo=memo) -+ self.check_dis(b'g1\n.', '''\ -+ 0: g GET 1 -+ 3: . STOP -+highest protocol among opcodes = 0 -+''', memo=memo) -+ -+ def test_mark_pop(self): -+ self.check_dis(b'(N00N.', '''\ -+ 0: ( MARK -+ 1: N NONE -+ 2: 0 POP -+ 3: 0 POP (MARK at 0) -+ 4: N NONE -+ 5: . STOP -+highest protocol among opcodes = 0 -+''') -+ -+ def test_too_small_stack(self): -+ self.check_dis_error(b'a', '''\ -+ 0: a APPEND -+''', 'tries to pop 2 items from stack with only 0 items') -+ self.check_dis_error(b']a', '''\ -+ 0: ] EMPTY_LIST -+ 1: a APPEND -+''', 'tries to pop 2 items from stack with only 1 items') -+ -+ def test_no_stop(self): -+ self.check_dis_error(b'N', '''\ -+ 0: N NONE -+''', 'pickle exhausted before seeing STOP') -+ -+ def test_truncated_data(self): -+ self.check_dis_error(b'NI123', '''\ -+ 0: N NONE -+''', 'no newline found when trying to read stringnl') -+ self.check_dis_error(b'NJ\x12\x34', '''\ -+ 0: N NONE -+''', 'not enough data in stream to read int4') -+ -+ def test_unknown_opcode(self): -+ self.check_dis_error(b'N\xff', '''\ -+ 0: N NONE -+''', r"at position 1, opcode b'\\xff' unknown") -+ -+ def test_stop_not_empty_stack(self): -+ self.check_dis_error(b']N.', '''\ -+ 0: ] EMPTY_LIST -+ 1: N NONE -+ 2: . STOP -+highest protocol among opcodes = 1 -+''', r'stack not empty after STOP: \[list\]') -+ -+ def test_annotate(self): -+ self.check_dis(b'(Nt.', '''\ -+ 0: ( MARK Push markobject onto the stack. -+ 1: N NONE Push None on the stack. -+ 2: t TUPLE (MARK at 0) Build a tuple out of the topmost stack slice, after markobject. -+ 3: . STOP Stop the unpickling machine. -+highest protocol among opcodes = 0 -+''', annotate=1) -+ self.check_dis(b'(Nt.', '''\ -+ 0: ( MARK Push markobject onto the stack. -+ 1: N NONE Push None on the stack. -+ 2: t TUPLE (MARK at 0) Build a tuple out of the topmost stack slice, after markobject. -+ 3: . STOP Stop the unpickling machine. -+highest protocol among opcodes = 0 -+''', annotate=20) -+ self.check_dis(b'(((((((ttttttt.', '''\ -+ 0: ( MARK Push markobject onto the stack. -+ 1: ( MARK Push markobject onto the stack. -+ 2: ( MARK Push markobject onto the stack. -+ 3: ( MARK Push markobject onto the stack. -+ 4: ( MARK Push markobject onto the stack. -+ 5: ( MARK Push markobject onto the stack. -+ 6: ( MARK Push markobject onto the stack. -+ 7: t TUPLE (MARK at 6) Build a tuple out of the topmost stack slice, after markobject. -+ 8: t TUPLE (MARK at 5) Build a tuple out of the topmost stack slice, after markobject. -+ 9: t TUPLE (MARK at 4) Build a tuple out of the topmost stack slice, after markobject. -+ 10: t TUPLE (MARK at 3) Build a tuple out of the topmost stack slice, after markobject. -+ 11: t TUPLE (MARK at 2) Build a tuple out of the topmost stack slice, after markobject. -+ 12: t TUPLE (MARK at 1) Build a tuple out of the topmost stack slice, after markobject. -+ 13: t TUPLE (MARK at 0) Build a tuple out of the topmost stack slice, after markobject. -+ 14: . STOP Stop the unpickling machine. -+highest protocol among opcodes = 0 -+''', annotate=20) -+ -+ - class MiscTestCase(unittest.TestCase): - def test__all__(self): - not_exported = { -diff --git a/Lib/test/test_pkgutil.py b/Lib/test/test_pkgutil.py -index d095f440a99..20fba87e4ec 100644 ---- a/Lib/test/test_pkgutil.py -+++ b/Lib/test/test_pkgutil.py -@@ -522,7 +522,43 @@ - del sys.modules['foo.bar'] - del sys.modules['foo.baz'] - -- # XXX: test .pkg files -+ -+ def test_extend_path_argument_types(self): -+ pkgname = 'foo' -+ dirname_0 = self.create_init(pkgname) -+ -+ # If the input path is not a list it is returned unchanged -+ self.assertEqual('notalist', pkgutil.extend_path('notalist', 'foo')) -+ self.assertEqual(('not', 'a', 'list'), pkgutil.extend_path(('not', 'a', 'list'), 'foo')) -+ self.assertEqual(123, pkgutil.extend_path(123, 'foo')) -+ self.assertEqual(None, pkgutil.extend_path(None, 'foo')) -+ -+ # Cleanup -+ shutil.rmtree(dirname_0) -+ del sys.path[0] -+ -+ -+ def test_extend_path_pkg_files(self): -+ pkgname = 'foo' -+ dirname_0 = self.create_init(pkgname) -+ -+ with open(os.path.join(dirname_0, 'bar.pkg'), 'w') as pkg_file: -+ pkg_file.write('\n'.join([ -+ 'baz', -+ '/foo/bar/baz', -+ '', -+ '#comment' -+ ])) -+ -+ extended_paths = pkgutil.extend_path(sys.path, 'bar') -+ -+ self.assertEqual(extended_paths[:-2], sys.path) -+ self.assertEqual(extended_paths[-2], 'baz') -+ self.assertEqual(extended_paths[-1], '/foo/bar/baz') -+ -+ # Cleanup -+ shutil.rmtree(dirname_0) -+ del sys.path[0] - - - class NestedNamespacePackageTest(unittest.TestCase): diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index b62a9e38977..39b9f7b178b 100644 --- a/Lib/test/test_platform.py @@ -29972,81 +3260,6 @@ index 7ed45acf28c..9bb1d056eb7 100644 class TestPosixSpawnP(unittest.TestCase, _PosixSpawnMixin): spawn_func = getattr(posix, 'posix_spawnp', None) -diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py -index 932d8a35d31..cc4fd2f4c95 100644 ---- a/Lib/test/test_posixpath.py -+++ b/Lib/test/test_posixpath.py -@@ -347,13 +347,19 @@ - "no home directory on VxWorks") - def test_expanduser_pwd2(self): - pwd = import_helper.import_module('pwd') -- for e in pwd.getpwall(): -- name = e.pw_name -- home = e.pw_dir -+ for all_entry in pwd.getpwall(): -+ name = all_entry.pw_name -+ -+ # gh-121200: pw_dir can be different between getpwall() and -+ # getpwnam(), so use getpwnam() pw_dir as expanduser() does. -+ entry = pwd.getpwnam(name) -+ home = entry.pw_dir - home = home.rstrip('/') or '/' -- self.assertEqual(posixpath.expanduser('~' + name), home) -- self.assertEqual(posixpath.expanduser(os.fsencode('~' + name)), -- os.fsencode(home)) -+ -+ with self.subTest(all_entry=all_entry, entry=entry): -+ self.assertEqual(posixpath.expanduser('~' + name), home) -+ self.assertEqual(posixpath.expanduser(os.fsencode('~' + name)), -+ os.fsencode(home)) - - NORMPATH_CASES = [ - ("", "."), -diff --git a/Lib/test/test_property.py b/Lib/test/test_property.py -index 4de2bb3781f..157a4835379 100644 ---- a/Lib/test/test_property.py -+++ b/Lib/test/test_property.py -@@ -431,6 +431,40 @@ - self.assertEqual(p.__doc__, "user") - self.assertEqual(p2.__doc__, "user") - -+ @unittest.skipIf(sys.flags.optimize >= 2, -+ "Docstrings are omitted with -O2 and above") -+ def test_prefer_explicit_doc(self): -+ # Issue 25757: subclasses of property lose docstring -+ self.assertEqual(property(doc="explicit doc").__doc__, "explicit doc") -+ self.assertEqual(PropertySub(doc="explicit doc").__doc__, "explicit doc") -+ -+ class Foo: -+ spam = PropertySub(doc="spam explicit doc") -+ -+ @spam.getter -+ def spam(self): -+ """ignored as doc already set""" -+ return 1 -+ -+ def _stuff_getter(self): -+ """ignored as doc set directly""" -+ stuff = PropertySub(doc="stuff doc argument", fget=_stuff_getter) -+ -+ #self.assertEqual(Foo.spam.__doc__, "spam explicit doc") -+ self.assertEqual(Foo.stuff.__doc__, "stuff doc argument") -+ -+ def test_property_no_doc_on_getter(self): -+ # If a property's getter has no __doc__ then the property's doc should -+ # be None; test that this is consistent with subclasses as well; see -+ # GH-2487 -+ class NoDoc: -+ @property -+ def __doc__(self): -+ raise AttributeError -+ -+ self.assertEqual(property(NoDoc()).__doc__, None) -+ self.assertEqual(PropertySub(NoDoc()).__doc__, None) -+ - @unittest.skipIf(sys.flags.optimize >= 2, - "Docstrings are omitted with -O2 and above") - def test_property_setter_copies_getter_docstring(self): diff --git a/Lib/test/test_pty.py b/Lib/test/test_pty.py index 51e3a46d0df..3f2bac0155f 100644 --- a/Lib/test/test_pty.py @@ -30083,594 +3296,6 @@ index 51e3a46d0df..3f2bac0155f 100644 import warnings TEST_STRING_1 = b"I wish to buy a fish license.\n" -diff --git a/Lib/test/test_pyclbr.py b/Lib/test/test_pyclbr.py -index 23453e34015..5415fa08330 100644 ---- a/Lib/test/test_pyclbr.py -+++ b/Lib/test/test_pyclbr.py -@@ -78,7 +78,8 @@ - - objname = obj.__name__ - if objname.startswith("__") and not objname.endswith("__"): -- objname = "_%s%s" % (oclass.__name__, objname) -+ if stripped_typename := oclass.__name__.lstrip('_'): -+ objname = f"_{stripped_typename}{objname}" - return objname == name - - # Make sure the toplevel functions and classes are the same. -@@ -111,12 +112,16 @@ - for m in py_item.__dict__.keys(): - if ismethod(py_item, getattr(py_item, m), m): - actualMethods.append(m) -- foundMethods = [] -- for m in value.methods.keys(): -- if m[:2] == '__' and m[-2:] != '__': -- foundMethods.append('_'+name+m) -- else: -- foundMethods.append(m) -+ -+ if stripped_typename := name.lstrip('_'): -+ foundMethods = [] -+ for m in value.methods.keys(): -+ if m.startswith('__') and not m.endswith('__'): -+ foundMethods.append(f"_{stripped_typename}{m}") -+ else: -+ foundMethods.append(m) -+ else: -+ foundMethods = list(value.methods.keys()) - - try: - self.assertListEq(foundMethods, actualMethods, ignore) -@@ -150,8 +155,9 @@ - "DocTestCase", '_DocTestSuite')) - self.checkModule('difflib', ignore=("Match",)) - -- def test_decorators(self): -- self.checkModule('test.pyclbr_input', ignore=['om']) -+ def test_cases(self): -+ # see test.pyclbr_input for the rationale behind the ignored symbols -+ self.checkModule('test.pyclbr_input', ignore=['om', 'f']) - - def test_nested(self): - mb = pyclbr -diff --git a/Lib/test/test_pydoc/test_pydoc.py b/Lib/test/test_pydoc/test_pydoc.py -index a35257c8ffc..6e80b54335b 100644 ---- a/Lib/test/test_pydoc/test_pydoc.py -+++ b/Lib/test/test_pydoc/test_pydoc.py -@@ -15,6 +15,7 @@ - import types - import typing - import unittest -+import unittest.mock - import urllib.parse - import xml.etree - import xml.etree.ElementTree -@@ -377,6 +378,11 @@ - - - class PydocBaseTest(unittest.TestCase): -+ def tearDown(self): -+ # Self-testing. Mocking only works if sys.modules['pydoc'] and pydoc -+ # are the same. But some pydoc functions reload the module and change -+ # sys.modules, so check that it was restored. -+ self.assertIs(sys.modules['pydoc'], pydoc) - - def _restricted_walk_packages(self, walk_packages, path=None): - """ -@@ -408,6 +414,8 @@ - - class PydocDocTest(unittest.TestCase): - maxDiff = None -+ def tearDown(self): -+ self.assertIs(sys.modules['pydoc'], pydoc) - - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') -@@ -656,16 +664,13 @@ - - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') -+ @unittest.mock.patch('pydoc.pager') - @requires_docstrings -- def test_help_output_redirect(self): -+ def test_help_output_redirect(self, pager_mock): - # issue 940286, if output is set in Helper, then all output from - # Helper.help should be redirected -- getpager_old = pydoc.getpager -- getpager_new = lambda: (lambda x: x) - self.maxDiff = None - -- buf = StringIO() -- helper = pydoc.Helper(output=buf) - unused, doc_loc = get_pydoc_text(pydoc_mod) - module = "test.test_pydoc.pydoc_mod" - help_header = """ -@@ -675,21 +680,112 @@ - help_header = textwrap.dedent(help_header) - expected_help_pattern = help_header + expected_text_pattern - -- pydoc.getpager = getpager_new -- try: -+ with captured_output('stdout') as output, \ -+ captured_output('stderr') as err, \ -+ StringIO() as buf: -+ helper = pydoc.Helper(output=buf) -+ helper.help(module) -+ result = buf.getvalue().strip() -+ expected_text = expected_help_pattern % ( -+ (doc_loc,) + -+ expected_text_data_docstrings + -+ (inspect.getabsfile(pydoc_mod),)) -+ self.assertEqual('', output.getvalue()) -+ self.assertEqual('', err.getvalue()) -+ self.assertEqual(expected_text, result) -+ -+ pager_mock.assert_not_called() -+ -+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), -+ 'trace function introduces __locals__ unexpectedly') -+ @requires_docstrings -+ @unittest.mock.patch('pydoc.pager') -+ def test_help_output_redirect_various_requests(self, pager_mock): -+ # issue 940286, if output is set in Helper, then all output from -+ # Helper.help should be redirected -+ -+ def run_pydoc_for_request(request, expected_text_part): -+ """Helper function to run pydoc with its output redirected""" - with captured_output('stdout') as output, \ -- captured_output('stderr') as err: -- helper.help(module) -+ captured_output('stderr') as err, \ -+ StringIO() as buf: -+ helper = pydoc.Helper(output=buf) -+ helper.help(request) - result = buf.getvalue().strip() -- expected_text = expected_help_pattern % ( -- (doc_loc,) + -- expected_text_data_docstrings + -- (inspect.getabsfile(pydoc_mod),)) -- self.assertEqual('', output.getvalue()) -- self.assertEqual('', err.getvalue()) -- self.assertEqual(expected_text, result) -- finally: -- pydoc.getpager = getpager_old -+ self.assertEqual('', output.getvalue(), msg=f'failed on request "{request}"') -+ self.assertEqual('', err.getvalue(), msg=f'failed on request "{request}"') -+ self.assertIn(expected_text_part, result, msg=f'failed on request "{request}"') -+ pager_mock.assert_not_called() -+ -+ self.maxDiff = None -+ -+ # test for "keywords" -+ run_pydoc_for_request('keywords', 'Here is a list of the Python keywords.') -+ # test for "symbols" -+ run_pydoc_for_request('symbols', 'Here is a list of the punctuation symbols') -+ # test for "topics" -+ run_pydoc_for_request('topics', 'Here is a list of available topics.') -+ # test for "modules" skipped, see test_modules() -+ # test for symbol "%" -+ run_pydoc_for_request('%', 'The power operator') -+ # test for special True, False, None keywords -+ run_pydoc_for_request('True', 'class bool(int)') -+ run_pydoc_for_request('False', 'class bool(int)') -+ run_pydoc_for_request('None', 'class NoneType(object)') -+ # test for keyword "assert" -+ run_pydoc_for_request('assert', 'The "assert" statement') -+ # test for topic "TYPES" -+ run_pydoc_for_request('TYPES', 'The standard type hierarchy') -+ # test for "pydoc.Helper.help" -+ run_pydoc_for_request('pydoc.Helper.help', 'Help on function help in pydoc.Helper:') -+ # test for pydoc.Helper.help -+ run_pydoc_for_request(pydoc.Helper.help, 'Help on function help in module pydoc:') -+ # test for pydoc.Helper() instance skipped because it is always meant to be interactive -+ -+ def test_showtopic(self): -+ with captured_stdout() as showtopic_io: -+ helper = pydoc.Helper() -+ helper.showtopic('with') -+ helptext = showtopic_io.getvalue() -+ self.assertIn('The "with" statement', helptext) -+ -+ def test_fail_showtopic(self): -+ with captured_stdout() as showtopic_io: -+ helper = pydoc.Helper() -+ helper.showtopic('abd') -+ expected = "no documentation found for 'abd'" -+ self.assertEqual(expected, showtopic_io.getvalue().strip()) -+ -+ @unittest.mock.patch('pydoc.pager') -+ def test_fail_showtopic_output_redirect(self, pager_mock): -+ with StringIO() as buf: -+ helper = pydoc.Helper(output=buf) -+ helper.showtopic("abd") -+ expected = "no documentation found for 'abd'" -+ self.assertEqual(expected, buf.getvalue().strip()) -+ -+ pager_mock.assert_not_called() -+ -+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), -+ 'trace function introduces __locals__ unexpectedly') -+ @requires_docstrings -+ @unittest.mock.patch('pydoc.pager') -+ def test_showtopic_output_redirect(self, pager_mock): -+ # issue 940286, if output is set in Helper, then all output from -+ # Helper.showtopic should be redirected -+ self.maxDiff = None -+ -+ with captured_output('stdout') as output, \ -+ captured_output('stderr') as err, \ -+ StringIO() as buf: -+ helper = pydoc.Helper(output=buf) -+ helper.showtopic('with') -+ result = buf.getvalue().strip() -+ self.assertEqual('', output.getvalue()) -+ self.assertEqual('', err.getvalue()) -+ self.assertIn('The "with" statement', result) -+ -+ pager_mock.assert_not_called() - - def test_lambda_with_return_annotation(self): - func = lambda a, b, c: 1 -@@ -1062,15 +1158,20 @@ - self.assertTrue(result.startswith(expected)) - - def test_importfile(self): -- loaded_pydoc = pydoc.importfile(pydoc.__file__) -+ try: -+ loaded_pydoc = pydoc.importfile(pydoc.__file__) - -- self.assertIsNot(loaded_pydoc, pydoc) -- self.assertEqual(loaded_pydoc.__name__, 'pydoc') -- self.assertEqual(loaded_pydoc.__file__, pydoc.__file__) -- self.assertEqual(loaded_pydoc.__spec__, pydoc.__spec__) -+ self.assertIsNot(loaded_pydoc, pydoc) -+ self.assertEqual(loaded_pydoc.__name__, 'pydoc') -+ self.assertEqual(loaded_pydoc.__file__, pydoc.__file__) -+ self.assertEqual(loaded_pydoc.__spec__, pydoc.__spec__) -+ finally: -+ sys.modules['pydoc'] = pydoc - - - class TestDescriptions(unittest.TestCase): -+ def tearDown(self): -+ self.assertIs(sys.modules['pydoc'], pydoc) - - def test_module(self): - # Check that pydocfodder module can be described -@@ -1416,6 +1517,8 @@ - - - class PydocFodderTest(unittest.TestCase): -+ def tearDown(self): -+ self.assertIs(sys.modules['pydoc'], pydoc) - - def getsection(self, text, beginline, endline): - lines = text.splitlines() -@@ -1543,6 +1646,8 @@ - ) - class PydocServerTest(unittest.TestCase): - """Tests for pydoc._start_server""" -+ def tearDown(self): -+ self.assertIs(sys.modules['pydoc'], pydoc) - - def test_server(self): - # Minimal test that starts the server, checks that it works, then stops -@@ -1605,9 +1710,14 @@ - ("foobar", "Pydoc: Error - foobar"), - ] - -- with self.restrict_walk_packages(): -- for url, title in requests: -- self.call_url_handler(url, title) -+ self.assertIs(sys.modules['pydoc'], pydoc) -+ try: -+ with self.restrict_walk_packages(): -+ for url, title in requests: -+ self.call_url_handler(url, title) -+ finally: -+ # Some requests reload the module and change sys.modules. -+ sys.modules['pydoc'] = pydoc - - - class TestHelper(unittest.TestCase): -@@ -1617,6 +1727,9 @@ - - - class PydocWithMetaClasses(unittest.TestCase): -+ def tearDown(self): -+ self.assertIs(sys.modules['pydoc'], pydoc) -+ - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') - @requires_docstrings -diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py -index e279412efbc..cb3e7463985 100644 ---- a/Lib/test/test_re.py -+++ b/Lib/test/test_re.py -@@ -1052,47 +1052,76 @@ - - def test_possible_set_operations(self): - s = bytes(range(128)).decode() -- with self.assertWarns(FutureWarning): -+ with self.assertWarnsRegex(FutureWarning, 'Possible set difference') as w: - p = re.compile(r'[0-9--1]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list('-./0123456789')) -+ with self.assertWarnsRegex(FutureWarning, 'Possible set difference') as w: -+ self.assertEqual(re.findall(r'[0-9--2]', s), list('-./0123456789')) -+ self.assertEqual(w.filename, __file__) -+ - self.assertEqual(re.findall(r'[--1]', s), list('-./01')) -- with self.assertWarns(FutureWarning): -+ -+ with self.assertWarnsRegex(FutureWarning, 'Possible set difference') as w: - p = re.compile(r'[%--1]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list("%&'()*+,-1")) -- with self.assertWarns(FutureWarning): -+ -+ with self.assertWarnsRegex(FutureWarning, 'Possible set difference ') as w: - p = re.compile(r'[%--]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list("%&'()*+,-")) - -- with self.assertWarns(FutureWarning): -+ with self.assertWarnsRegex(FutureWarning, 'Possible set intersection ') as w: - p = re.compile(r'[0-9&&1]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list('&0123456789')) -- with self.assertWarns(FutureWarning): -+ with self.assertWarnsRegex(FutureWarning, 'Possible set intersection ') as w: -+ self.assertEqual(re.findall(r'[0-8&&1]', s), list('&012345678')) -+ self.assertEqual(w.filename, __file__) -+ -+ with self.assertWarnsRegex(FutureWarning, 'Possible set intersection ') as w: - p = re.compile(r'[\d&&1]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list('&0123456789')) -+ - self.assertEqual(re.findall(r'[&&1]', s), list('&1')) - -- with self.assertWarns(FutureWarning): -+ with self.assertWarnsRegex(FutureWarning, 'Possible set union ') as w: - p = re.compile(r'[0-9||a]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list('0123456789a|')) -- with self.assertWarns(FutureWarning): -+ -+ with self.assertWarnsRegex(FutureWarning, 'Possible set union ') as w: - p = re.compile(r'[\d||a]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list('0123456789a|')) -+ - self.assertEqual(re.findall(r'[||1]', s), list('1|')) - -- with self.assertWarns(FutureWarning): -+ with self.assertWarnsRegex(FutureWarning, 'Possible set symmetric difference ') as w: - p = re.compile(r'[0-9~~1]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list('0123456789~')) -- with self.assertWarns(FutureWarning): -+ -+ with self.assertWarnsRegex(FutureWarning, 'Possible set symmetric difference ') as w: - p = re.compile(r'[\d~~1]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list('0123456789~')) -+ - self.assertEqual(re.findall(r'[~~1]', s), list('1~')) - -- with self.assertWarns(FutureWarning): -+ with self.assertWarnsRegex(FutureWarning, 'Possible nested set ') as w: - p = re.compile(r'[[0-9]|]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list('0123456789[]')) -+ with self.assertWarnsRegex(FutureWarning, 'Possible nested set ') as w: -+ self.assertEqual(re.findall(r'[[0-8]|]', s), list('012345678[]')) -+ self.assertEqual(w.filename, __file__) - -- with self.assertWarns(FutureWarning): -+ with self.assertWarnsRegex(FutureWarning, 'Possible nested set ') as w: - p = re.compile(r'[[:digit:]|]') -+ self.assertEqual(w.filename, __file__) - self.assertEqual(p.findall(s), list(':[]dgit')) - - def test_search_coverage(self): -diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py -index 6c2726d3209..fab124ae4ad 100644 ---- a/Lib/test/test_readline.py -+++ b/Lib/test/test_readline.py -@@ -12,6 +12,7 @@ - from test.support.os_helper import unlink, temp_dir, TESTFN - from test.support.pty_helper import run_pty - from test.support.script_helper import assert_python_ok -+from test.support.threading_helper import requires_working_threading - - # Skip tests if there is no readline module - readline = import_module('readline') -@@ -132,6 +133,32 @@ - self.assertEqual(readline.get_history_item(1), "entrée 1") - self.assertEqual(readline.get_history_item(2), "entrée 22") - -+ def test_write_read_limited_history(self): -+ previous_length = readline.get_history_length() -+ self.addCleanup(readline.set_history_length, previous_length) -+ -+ readline.clear_history() -+ readline.add_history("first line") -+ readline.add_history("second line") -+ readline.add_history("third line") -+ -+ readline.set_history_length(2) -+ self.assertEqual(readline.get_history_length(), 2) -+ readline.write_history_file(TESTFN) -+ self.addCleanup(os.remove, TESTFN) -+ -+ readline.clear_history() -+ self.assertEqual(readline.get_current_history_length(), 0) -+ self.assertEqual(readline.get_history_length(), 2) -+ -+ readline.read_history_file(TESTFN) -+ self.assertEqual(readline.get_history_item(1), "second line") -+ self.assertEqual(readline.get_history_item(2), "third line") -+ self.assertEqual(readline.get_history_item(3), None) -+ -+ # Readline seems to report an additional history element. -+ self.assertIn(readline.get_current_history_length(), (2, 3)) -+ - - class TestReadline(unittest.TestCase): - -@@ -320,6 +347,50 @@ - self.assertEqual(len(lines), history_size) - self.assertEqual(lines[-1].strip(), b"last input") - -+ @requires_working_threading() -+ def test_gh123321_threadsafe(self): -+ """gh-123321: readline should be thread-safe and not crash""" -+ script = textwrap.dedent(r""" -+ import threading -+ from test.support.threading_helper import join_thread -+ -+ def func(): -+ input() -+ -+ thread1 = threading.Thread(target=func) -+ thread2 = threading.Thread(target=func) -+ thread1.start() -+ thread2.start() -+ join_thread(thread1) -+ join_thread(thread2) -+ print("done") -+ """) -+ -+ output = run_pty(script, input=b"input1\rinput2\r") -+ -+ self.assertIn(b"done", output) -+ -+ -+ def test_write_read_limited_history(self): -+ previous_length = readline.get_history_length() -+ self.addCleanup(readline.set_history_length, previous_length) -+ -+ readline.add_history("first line") -+ readline.add_history("second line") -+ readline.add_history("third line") -+ -+ readline.set_history_length(2) -+ self.assertEqual(readline.get_history_length(), 2) -+ readline.write_history_file(TESTFN) -+ self.addCleanup(os.remove, TESTFN) -+ -+ readline.read_history_file(TESTFN) -+ # Without clear_history() there's no good way to test if -+ # the correct entries are present (we're combining history limiting and -+ # possible deduplication with arbitrary previous content). -+ # So, we've only tested that the read did not fail. -+ # See TestHistoryManipulation for the full test. -+ - - if __name__ == "__main__": - unittest.main() -diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py -index 8135a3fdad1..75196ac040a 100644 ---- a/Lib/test/test_regrtest.py -+++ b/Lib/test/test_regrtest.py -@@ -21,6 +21,8 @@ - import tempfile - import textwrap - import unittest -+from xml.etree import ElementTree -+ - from test import support - from test.support import os_helper - from test.libregrtest import cmdline -@@ -2221,6 +2223,44 @@ - self.check_executed_tests(output, testname, stats=1, parallel=True) - self.assertNotIn('SPAM SPAM SPAM', output) - -+ def test_xml(self): -+ code = textwrap.dedent(r""" -+ import unittest -+ from test import support -+ -+ class VerboseTests(unittest.TestCase): -+ def test_failed(self): -+ print("abc \x1b def") -+ self.fail() -+ """) -+ testname = self.create_test(code=code) -+ -+ # Run sequentially -+ filename = os_helper.TESTFN -+ self.addCleanup(os_helper.unlink, filename) -+ -+ output = self.run_tests(testname, "--junit-xml", filename, -+ exitcode=EXITCODE_BAD_TEST) -+ self.check_executed_tests(output, testname, -+ failed=testname, -+ stats=TestStats(1, 1, 0)) -+ -+ # Test generated XML -+ with open(filename, encoding="utf8") as fp: -+ content = fp.read() -+ -+ testsuite = ElementTree.fromstring(content) -+ self.assertEqual(int(testsuite.get('tests')), 1) -+ self.assertEqual(int(testsuite.get('errors')), 0) -+ self.assertEqual(int(testsuite.get('failures')), 1) -+ -+ testcase = testsuite[0][0] -+ self.assertEqual(testcase.get('status'), 'run') -+ self.assertEqual(testcase.get('result'), 'completed') -+ self.assertGreater(float(testcase.get('time')), 0) -+ for out in testcase.iter('system-out'): -+ self.assertEqual(out.text, r"abc \x1b def") -+ - - class TestUtils(unittest.TestCase): - def test_format_duration(self): -@@ -2403,6 +2443,25 @@ - self.assertTrue(match_test(test_chdir)) - self.assertFalse(match_test(test_copy)) - -+ def test_sanitize_xml(self): -+ sanitize_xml = utils.sanitize_xml -+ -+ # escape invalid XML characters -+ self.assertEqual(sanitize_xml('abc \x1b\x1f def'), -+ r'abc \x1b\x1f def') -+ self.assertEqual(sanitize_xml('nul:\x00, bell:\x07'), -+ r'nul:\x00, bell:\x07') -+ self.assertEqual(sanitize_xml('surrogate:\uDC80'), -+ r'surrogate:\udc80') -+ self.assertEqual(sanitize_xml('illegal \uFFFE and \uFFFF'), -+ r'illegal \ufffe and \uffff') -+ -+ # no escape for valid XML characters -+ self.assertEqual(sanitize_xml('a\n\tb'), -+ 'a\n\tb') -+ self.assertEqual(sanitize_xml('valid t\xe9xt \u20ac'), -+ 'valid t\xe9xt \u20ac') -+ - - if __name__ == '__main__': - unittest.main() -diff --git a/Lib/test/test_runpy.py b/Lib/test/test_runpy.py -index 9c849410583..3f68962a589 100644 ---- a/Lib/test/test_runpy.py -+++ b/Lib/test/test_runpy.py -@@ -660,8 +660,10 @@ - with temp_dir() as script_dir: - mod_name = 'script' - script_name = FakePath(self._make_test_script(script_dir, mod_name)) -- self._check_script(script_name, "", script_name, -- script_name, expect_spec=False) -+ self._check_script(script_name, "", -+ os.fsdecode(script_name), -+ os.fsdecode(script_name), -+ expect_spec=False) - - def test_basic_script_no_suffix(self): - with temp_dir() as script_dir: diff --git a/Lib/test/test_selectors.py b/Lib/test/test_selectors.py index 31757205ca3..6b88b121580 100644 --- a/Lib/test/test_selectors.py @@ -30694,63 +3319,6 @@ index 31757205ca3..6b88b121580 100644 # unexplainable errors on macOS don't need to fail the test self.skipTest("Invalid argument error calling poll()") raise -diff --git a/Lib/test/test_setcomps.py b/Lib/test/test_setcomps.py -index 976fa885bd8..0bb02ef11f6 100644 ---- a/Lib/test/test_setcomps.py -+++ b/Lib/test/test_setcomps.py -@@ -1,6 +1,9 @@ - import doctest -+import traceback - import unittest - -+from test.support import BrokenIter -+ - - doctests = """ - ########### Tests mostly copied from test_listcomps.py ############ -@@ -148,6 +151,42 @@ - - """ - -+class SetComprehensionTest(unittest.TestCase): -+ def test_exception_locations(self): -+ # The location of an exception raised from __init__ or -+ # __next__ should should be the iterator expression -+ -+ def init_raises(): -+ try: -+ {x for x in BrokenIter(init_raises=True)} -+ except Exception as e: -+ return e -+ -+ def next_raises(): -+ try: -+ {x for x in BrokenIter(next_raises=True)} -+ except Exception as e: -+ return e -+ -+ def iter_raises(): -+ try: -+ {x for x in BrokenIter(iter_raises=True)} -+ except Exception as e: -+ return e -+ -+ for func, expected in [(init_raises, "BrokenIter(init_raises=True)"), -+ (next_raises, "BrokenIter(next_raises=True)"), -+ (iter_raises, "BrokenIter(iter_raises=True)"), -+ ]: -+ with self.subTest(func): -+ exc = func() -+ f = traceback.extract_tb(exc.__traceback__)[0] -+ indent = 16 -+ co = func.__code__ -+ self.assertEqual(f.lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.end_lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.line[f.colno - indent : f.end_colno - indent], -+ expected) - - __test__ = {'doctests' : doctests} - diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 7bc5d12e09c..1680706a4d5 100644 --- a/Lib/test/test_shutil.py @@ -30807,7 +3375,7 @@ index c7b9549dd3a..591cd4177d9 100644 "test needs SIGUSR1") @threading_helper.requires_working_threading() diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py -index cda956499ed..63418d702c8 100644 +index 545564094e1..63418d702c8 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -3,6 +3,7 @@ @@ -30818,38 +3386,7 @@ index cda956499ed..63418d702c8 100644 import _thread as thread import array -@@ -558,19 +559,27 @@ - def __init__(self, methodName='runTest'): - unittest.TestCase.__init__(self, methodName=methodName) - ThreadableTest.__init__(self) -+ self.cli = None -+ self.serv = None -+ -+ def socketpair(self): -+ # To be overridden by some child classes. -+ return socket.socketpair() - - def setUp(self): -- self.serv, self.cli = socket.socketpair() -+ self.serv, self.cli = self.socketpair() - - def tearDown(self): -- self.serv.close() -+ if self.serv: -+ self.serv.close() - self.serv = None - - def clientSetUp(self): - pass - - def clientTearDown(self): -- self.cli.close() -+ if self.cli: -+ self.cli.close() - self.cli = None - ThreadableTest.clientTearDown(self) - -@@ -1171,8 +1180,11 @@ +@@ -1179,8 +1180,11 @@ # Find one service that exists, then check all the related interfaces. # I've ordered this by protocols that have both a tcp and udp # protocol, at least for modern Linuxes. @@ -30863,7 +3400,7 @@ index cda956499ed..63418d702c8 100644 # avoid the 'echo' service on this platform, as there is an # assumption breaking non-standard port/protocol entry services = ('daytime', 'qotd', 'domain') -@@ -3683,7 +3695,7 @@ +@@ -3691,7 +3695,7 @@ def _testFDPassCMSG_LEN(self): self.createAndSendFDs(1) @@ -30872,7 +3409,7 @@ index cda956499ed..63418d702c8 100644 @unittest.skipIf(AIX, "skipping, see issue #22397") @requireAttrs(socket, "CMSG_SPACE") def testFDPassSeparate(self): -@@ -3694,7 +3706,7 @@ +@@ -3702,7 +3706,7 @@ maxcmsgs=2) @testFDPassSeparate.client_skip @@ -30881,7 +3418,7 @@ index cda956499ed..63418d702c8 100644 @unittest.skipIf(AIX, "skipping, see issue #22397") def _testFDPassSeparate(self): fd0, fd1 = self.newFDs(2) -@@ -3707,7 +3719,7 @@ +@@ -3715,7 +3719,7 @@ array.array("i", [fd1]))]), len(MSG)) @@ -30890,7 +3427,7 @@ index cda956499ed..63418d702c8 100644 @unittest.skipIf(AIX, "skipping, see issue #22397") @requireAttrs(socket, "CMSG_SPACE") def testFDPassSeparateMinSpace(self): -@@ -3721,7 +3733,7 @@ +@@ -3729,7 +3733,7 @@ maxcmsgs=2, ignoreflags=socket.MSG_CTRUNC) @testFDPassSeparateMinSpace.client_skip @@ -30899,7 +3436,7 @@ index cda956499ed..63418d702c8 100644 @unittest.skipIf(AIX, "skipping, see issue #22397") def _testFDPassSeparateMinSpace(self): fd0, fd1 = self.newFDs(2) -@@ -3745,7 +3757,7 @@ +@@ -3753,7 +3757,7 @@ nbytes = self.sendmsgToServer([msg]) self.assertEqual(nbytes, len(msg)) @@ -30908,119 +3445,6 @@ index cda956499ed..63418d702c8 100644 def testFDPassEmpty(self): # Try to pass an empty FD array. Can receive either no array # or an empty array. -@@ -4786,6 +4798,112 @@ - self.assertEqual(msg, MSG) - - -+class PurePythonSocketPairTest(SocketPairTest): -+ # Explicitly use socketpair AF_INET or AF_INET6 to ensure that is the -+ # code path we're using regardless platform is the pure python one where -+ # `_socket.socketpair` does not exist. (AF_INET does not work with -+ # _socket.socketpair on many platforms). -+ def socketpair(self): -+ # called by super().setUp(). -+ try: -+ return socket.socketpair(socket.AF_INET6) -+ except OSError: -+ return socket.socketpair(socket.AF_INET) -+ -+ # Local imports in this class make for easy security fix backporting. -+ -+ def setUp(self): -+ if hasattr(_socket, "socketpair"): -+ self._orig_sp = socket.socketpair -+ # This forces the version using the non-OS provided socketpair -+ # emulation via an AF_INET socket in Lib/socket.py. -+ socket.socketpair = socket._fallback_socketpair -+ else: -+ # This platform already uses the non-OS provided version. -+ self._orig_sp = None -+ super().setUp() -+ -+ def tearDown(self): -+ super().tearDown() -+ if self._orig_sp is not None: -+ # Restore the default socket.socketpair definition. -+ socket.socketpair = self._orig_sp -+ -+ def test_recv(self): -+ msg = self.serv.recv(1024) -+ self.assertEqual(msg, MSG) -+ -+ def _test_recv(self): -+ self.cli.send(MSG) -+ -+ def test_send(self): -+ self.serv.send(MSG) -+ -+ def _test_send(self): -+ msg = self.cli.recv(1024) -+ self.assertEqual(msg, MSG) -+ -+ def test_ipv4(self): -+ cli, srv = socket.socketpair(socket.AF_INET) -+ cli.close() -+ srv.close() -+ -+ def _test_ipv4(self): -+ pass -+ -+ @unittest.skipIf(not hasattr(_socket, 'IPPROTO_IPV6') or -+ not hasattr(_socket, 'IPV6_V6ONLY'), -+ "IPV6_V6ONLY option not supported") -+ @unittest.skipUnless(socket_helper.IPV6_ENABLED, 'IPv6 required for this test') -+ def test_ipv6(self): -+ cli, srv = socket.socketpair(socket.AF_INET6) -+ cli.close() -+ srv.close() -+ -+ def _test_ipv6(self): -+ pass -+ -+ def test_injected_authentication_failure(self): -+ orig_getsockname = socket.socket.getsockname -+ inject_sock = None -+ -+ def inject_getsocketname(self): -+ nonlocal inject_sock -+ sockname = orig_getsockname(self) -+ # Connect to the listening socket ahead of the -+ # client socket. -+ if inject_sock is None: -+ inject_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) -+ inject_sock.setblocking(False) -+ try: -+ inject_sock.connect(sockname[:2]) -+ except (BlockingIOError, InterruptedError): -+ pass -+ inject_sock.setblocking(True) -+ return sockname -+ -+ sock1 = sock2 = None -+ try: -+ socket.socket.getsockname = inject_getsocketname -+ with self.assertRaises(OSError): -+ sock1, sock2 = socket.socketpair() -+ finally: -+ socket.socket.getsockname = orig_getsockname -+ if inject_sock: -+ inject_sock.close() -+ if sock1: # This cleanup isn't needed on a successful test. -+ sock1.close() -+ if sock2: -+ sock2.close() -+ -+ def _test_injected_authentication_failure(self): -+ # No-op. Exists for base class threading infrastructure to call. -+ # We could refactor this test into its own lesser class along with the -+ # setUp and tearDown code to construct an ideal; it is simpler to keep -+ # it here and live with extra overhead one this _one_ failure test. -+ pass -+ -+ - class NonBlockingTCPTests(ThreadedTCPSocketTest): - - def __init__(self, methodName='runTest'): diff --git a/Lib/test/test_sqlite3/test_dbapi.py b/Lib/test/test_sqlite3/test_dbapi.py index f0b99b13f68..f10b5209eb7 100644 --- a/Lib/test/test_sqlite3/test_dbapi.py @@ -31066,386 +3490,8 @@ index c77fec3d39d..ca55d429aec 100644 from test.support.import_helper import import_fresh_module from test.support.os_helper import TESTFN -diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py -index 0080d996040..d0e13bfbe92 100644 ---- a/Lib/test/test_statistics.py -+++ b/Lib/test/test_statistics.py -@@ -1074,7 +1074,7 @@ - def test_order_doesnt_matter(self): - # Test that the order of data points doesn't change the result. - -- # CAUTION: due to floating point rounding errors, the result actually -+ # CAUTION: due to floating-point rounding errors, the result actually - # may depend on the order. Consider this test representing an ideal. - # To avoid this test failing, only test with exact values such as ints - # or Fractions. -diff --git a/Lib/test/test_structseq.py b/Lib/test/test_structseq.py -index c6c0afaf077..9db35ab835c 100644 ---- a/Lib/test/test_structseq.py -+++ b/Lib/test/test_structseq.py -@@ -1,8 +1,10 @@ - import copy - import os - import pickle -+import textwrap - import time - import unittest -+from test.support import script_helper - - - class StructSeqTest(unittest.TestCase): -@@ -204,6 +206,17 @@ - self.assertEqual(os.stat_result.n_unnamed_fields, 3) - self.assertEqual(os.stat_result.__match_args__, expected_args) - -+ def test_reference_cycle(self): -+ # gh-122527: Check that a structseq that's part of a reference cycle -+ # with its own type doesn't crash. Previously, if the type's dictionary -+ # was cleared first, the structseq instance would crash in the -+ # destructor. -+ script_helper.assert_python_ok("-c", textwrap.dedent(r""" -+ import time -+ t = time.gmtime() -+ type(t).refcyle = t -+ """)) -+ - - if __name__ == "__main__": - unittest.main() -diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py -index f77c6ecc050..4e8e85a5902 100644 ---- a/Lib/test/test_subprocess.py -+++ b/Lib/test/test_subprocess.py -@@ -1407,7 +1407,7 @@ - t = threading.Thread(target=open_fds) - t.start() - try: -- with self.assertRaises(EnvironmentError): -+ with self.assertRaises(OSError): - subprocess.Popen(NONEXISTING_CMD, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, -diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py -index 3ea01413c8e..af350ab446d 100644 ---- a/Lib/test/test_super.py -+++ b/Lib/test/test_super.py -@@ -1,9 +1,10 @@ - """Unit tests for zero-argument super() & related machinery.""" - - import textwrap -+import threading - import unittest - from unittest.mock import patch --from test.support import import_helper -+from test.support import import_helper, threading_helper - - - ADAPTIVE_WARMUP_DELAY = 2 -@@ -478,6 +479,38 @@ - for _ in range(ADAPTIVE_WARMUP_DELAY): - C.some(C) - -+ @threading_helper.requires_working_threading() -+ def test___class___modification_multithreaded(self): -+ """ Note: this test isn't actually testing anything on its own. -+ It requires a sys audithook to be set to crash on older Python. -+ This should be the case anyways as our test suite sets -+ an audit hook. -+ """ -+ class Foo: -+ pass -+ -+ class Bar: -+ pass -+ -+ thing = Foo() -+ def work(): -+ foo = thing -+ for _ in range(5000): -+ foo.__class__ = Bar -+ type(foo) -+ foo.__class__ = Foo -+ type(foo) -+ -+ -+ threads = [] -+ for _ in range(6): -+ thread = threading.Thread(target=work) -+ thread.start() -+ threads.append(thread) -+ -+ for thread in threads: -+ thread.join() -+ - - if __name__ == "__main__": - unittest.main() -diff --git a/Lib/test/test_symtable.py b/Lib/test/test_symtable.py -index 82c1d7c856a..fe023fa61ac 100644 ---- a/Lib/test/test_symtable.py -+++ b/Lib/test/test_symtable.py -@@ -1,6 +1,8 @@ - """ - Test the API of the symtable module. - """ -+ -+import textwrap - import symtable - import unittest - -@@ -11,7 +13,7 @@ - - glob = 42 - some_var = 12 --some_non_assigned_global_var = 11 -+some_non_assigned_global_var: int - some_assigned_global_var = 11 - - class Mine: -@@ -51,6 +53,120 @@ - pass - """ - -+TEST_COMPLEX_CLASS_CODE = """ -+# The following symbols are defined in ComplexClass -+# without being introduced by a 'global' statement. -+glob_unassigned_meth: Any -+glob_unassigned_meth_pep_695: Any -+ -+glob_unassigned_async_meth: Any -+glob_unassigned_async_meth_pep_695: Any -+ -+def glob_assigned_meth(): pass -+def glob_assigned_meth_pep_695[T](): pass -+ -+async def glob_assigned_async_meth(): pass -+async def glob_assigned_async_meth_pep_695[T](): pass -+ -+# The following symbols are defined in ComplexClass after -+# being introduced by a 'global' statement (and therefore -+# are not considered as local symbols of ComplexClass). -+glob_unassigned_meth_ignore: Any -+glob_unassigned_meth_pep_695_ignore: Any -+ -+glob_unassigned_async_meth_ignore: Any -+glob_unassigned_async_meth_pep_695_ignore: Any -+ -+def glob_assigned_meth_ignore(): pass -+def glob_assigned_meth_pep_695_ignore[T](): pass -+ -+async def glob_assigned_async_meth_ignore(): pass -+async def glob_assigned_async_meth_pep_695_ignore[T](): pass -+ -+class ComplexClass: -+ a_var = 1234 -+ a_genexpr = (x for x in []) -+ a_lambda = lambda x: x -+ -+ type a_type_alias = int -+ type a_type_alias_pep_695[T] = list[T] -+ -+ class a_class: pass -+ class a_class_pep_695[T]: pass -+ -+ def a_method(self): pass -+ def a_method_pep_695[T](self): pass -+ -+ async def an_async_method(self): pass -+ async def an_async_method_pep_695[T](self): pass -+ -+ @classmethod -+ def a_classmethod(cls): pass -+ @classmethod -+ def a_classmethod_pep_695[T](self): pass -+ -+ @classmethod -+ async def an_async_classmethod(cls): pass -+ @classmethod -+ async def an_async_classmethod_pep_695[T](self): pass -+ -+ @staticmethod -+ def a_staticmethod(): pass -+ @staticmethod -+ def a_staticmethod_pep_695[T](self): pass -+ -+ @staticmethod -+ async def an_async_staticmethod(): pass -+ @staticmethod -+ async def an_async_staticmethod_pep_695[T](self): pass -+ -+ # These ones will be considered as methods because of the 'def' although -+ # they are *not* valid methods at runtime since they are not decorated -+ # with @staticmethod. -+ def a_fakemethod(): pass -+ def a_fakemethod_pep_695[T](): pass -+ -+ async def an_async_fakemethod(): pass -+ async def an_async_fakemethod_pep_695[T](): pass -+ -+ # Check that those are still considered as methods -+ # since they are not using the 'global' keyword. -+ def glob_unassigned_meth(): pass -+ def glob_unassigned_meth_pep_695[T](): pass -+ -+ async def glob_unassigned_async_meth(): pass -+ async def glob_unassigned_async_meth_pep_695[T](): pass -+ -+ def glob_assigned_meth(): pass -+ def glob_assigned_meth_pep_695[T](): pass -+ -+ async def glob_assigned_async_meth(): pass -+ async def glob_assigned_async_meth_pep_695[T](): pass -+ -+ # The following are not picked as local symbols because they are not -+ # visible by the class at runtime (this is equivalent to having the -+ # definitions outside of the class). -+ global glob_unassigned_meth_ignore -+ def glob_unassigned_meth_ignore(): pass -+ global glob_unassigned_meth_pep_695_ignore -+ def glob_unassigned_meth_pep_695_ignore[T](): pass -+ -+ global glob_unassigned_async_meth_ignore -+ async def glob_unassigned_async_meth_ignore(): pass -+ global glob_unassigned_async_meth_pep_695_ignore -+ async def glob_unassigned_async_meth_pep_695_ignore[T](): pass -+ -+ global glob_assigned_meth_ignore -+ def glob_assigned_meth_ignore(): pass -+ global glob_assigned_meth_pep_695_ignore -+ def glob_assigned_meth_pep_695_ignore[T](): pass -+ -+ global glob_assigned_async_meth_ignore -+ async def glob_assigned_async_meth_ignore(): pass -+ global glob_assigned_async_meth_pep_695_ignore -+ async def glob_assigned_async_meth_pep_695_ignore[T](): pass -+""" -+ - - def find_block(block, name): - for ch in block.get_children(): -@@ -63,6 +179,7 @@ - top = symtable.symtable(TEST_CODE, "?", "exec") - # These correspond to scopes in TEST_CODE - Mine = find_block(top, "Mine") -+ - a_method = find_block(Mine, "a_method") - spam = find_block(top, "spam") - internal = find_block(spam, "internal") -@@ -235,9 +352,75 @@ - self.assertEqual(self.spam.lookup("x").get_name(), "x") - self.assertEqual(self.Mine.get_name(), "Mine") - -- def test_class_info(self): -+ def test_class_get_methods(self): - self.assertEqual(self.Mine.get_methods(), ('a_method',)) - -+ top = symtable.symtable(TEST_COMPLEX_CLASS_CODE, "?", "exec") -+ this = find_block(top, "ComplexClass") -+ -+ self.assertEqual(this.get_methods(), ( -+ 'a_method', 'a_method_pep_695', -+ 'an_async_method', 'an_async_method_pep_695', -+ 'a_classmethod', 'a_classmethod_pep_695', -+ 'an_async_classmethod', 'an_async_classmethod_pep_695', -+ 'a_staticmethod', 'a_staticmethod_pep_695', -+ 'an_async_staticmethod', 'an_async_staticmethod_pep_695', -+ 'a_fakemethod', 'a_fakemethod_pep_695', -+ 'an_async_fakemethod', 'an_async_fakemethod_pep_695', -+ 'glob_unassigned_meth', 'glob_unassigned_meth_pep_695', -+ 'glob_unassigned_async_meth', 'glob_unassigned_async_meth_pep_695', -+ 'glob_assigned_meth', 'glob_assigned_meth_pep_695', -+ 'glob_assigned_async_meth', 'glob_assigned_async_meth_pep_695', -+ )) -+ -+ # Test generator expressions that are of type TYPE_FUNCTION -+ # but will not be reported by get_methods() since they are -+ # not functions per se. -+ # -+ # Other kind of comprehensions such as list, set or dict -+ # expressions do not have the TYPE_FUNCTION type. -+ -+ def check_body(body, expected_methods): -+ indented = textwrap.indent(body, ' ' * 4) -+ top = symtable.symtable(f"class A:\n{indented}", "?", "exec") -+ this = find_block(top, "A") -+ self.assertEqual(this.get_methods(), expected_methods) -+ -+ # statements with 'genexpr' inside it -+ GENEXPRS = ( -+ 'x = (x for x in [])', -+ 'x = (x async for x in [])', -+ 'genexpr = (x for x in [])', -+ 'genexpr = (x async for x in [])', -+ ) -+ -+ for gen in GENEXPRS: -+ # test generator expression -+ with self.subTest(gen=gen): -+ check_body(gen, ()) -+ -+ # test generator expression + variable named 'genexpr' -+ with self.subTest(gen=gen, isvar=True): -+ check_body('\n'.join((gen, 'genexpr = 1')), ()) -+ check_body('\n'.join(('genexpr = 1', gen)), ()) -+ -+ for paramlist in ('()', '(x)', '(x, y)', '(z: T)'): -+ for func in ( -+ f'def genexpr{paramlist}:pass', -+ f'async def genexpr{paramlist}:pass', -+ f'def genexpr[T]{paramlist}:pass', -+ f'async def genexpr[T]{paramlist}:pass', -+ ): -+ with self.subTest(func=func): -+ # test function named 'genexpr' -+ check_body(func, ('genexpr',)) -+ -+ for gen in GENEXPRS: -+ with self.subTest(gen=gen, func=func): -+ # test generator expression + function named 'genexpr' -+ check_body('\n'.join((gen, func)), ('genexpr',)) -+ check_body('\n'.join((func, gen)), ('genexpr',)) -+ - def test_filename_correct(self): - ### Bug tickler: SyntaxError file name correct whether error raised - ### while parsing or building symbol table. -diff --git a/Lib/test/test_sys_setprofile.py b/Lib/test/test_sys_setprofile.py -index bb8adc8b555..f77036962cf 100644 ---- a/Lib/test/test_sys_setprofile.py -+++ b/Lib/test/test_sys_setprofile.py -@@ -265,6 +265,10 @@ - f_ident = ident(f) - g_ident = ident(g) - self.check_events(g, [(1, 'call', g_ident), -+ (2, 'call', f_ident), -+ (2, 'return', f_ident), -+ # once more; the generator is being garbage collected -+ # and it will do a PY_THROW - (2, 'call', f_ident), - (2, 'return', f_ident), - (1, 'return', g_ident), -@@ -474,6 +478,20 @@ - sys.setprofile(lambda *args: None) - f() - -+ def test_method_with_c_function(self): -+ # gh-122029 -+ # When we have a PyMethodObject whose im_func is a C function, we -+ # should record both the call and the return. f = classmethod(repr) -+ # is just a way to create a PyMethodObject with a C function. -+ class A: -+ f = classmethod(repr) -+ events = [] -+ sys.setprofile(lambda frame, event, args: events.append(event)) -+ A().f() -+ sys.setprofile(None) -+ # The last c_call is the call to sys.setprofile -+ self.assertEqual(events, ['c_call', 'c_return', 'c_call']) -+ - - if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_sys_settrace.py b/Lib/test/test_sys_settrace.py -index 196fd60d197..fb4bc7fce9a 100644 +index 35985b34a42..fb4bc7fce9a 100644 --- a/Lib/test/test_sys_settrace.py +++ b/Lib/test/test_sys_settrace.py @@ -7,7 +7,7 @@ @@ -31457,25 +3503,6 @@ index 196fd60d197..fb4bc7fce9a 100644 import contextlib import warnings -@@ -1634,15 +1634,15 @@ - EXPECTED_EVENTS = [ - (0, 'call'), - (2, 'line'), -- (1, 'line'), - (-3, 'call'), - (-2, 'line'), - (-2, 'return'), -- (4, 'line'), - (1, 'line'), -+ (4, 'line'), -+ (2, 'line'), - (-2, 'call'), - (-2, 'return'), -- (1, 'return'), -+ (2, 'return'), - ] - - # C level events should be the same as expected and the same as Python level. diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index 1137c2032b9..89d046c865c 100644 --- a/Lib/test/test_sysconfig.py @@ -31520,1746 +3547,6 @@ index 1137c2032b9..89d046c865c 100644 def test_get_makefile_filename(self): makefile = sysconfig.get_makefile_filename() self.assertTrue(os.path.isfile(makefile), makefile) -diff --git a/Lib/test/test_tabnanny.py b/Lib/test/test_tabnanny.py -index cc122cafc79..30dcb3e3c4f 100644 ---- a/Lib/test/test_tabnanny.py -+++ b/Lib/test/test_tabnanny.py -@@ -315,7 +315,7 @@ - def test_with_errored_file(self): - """Should displays error when errored python file is given.""" - with TemporaryPyFile(SOURCE_CODES["wrong_indented"]) as file_path: -- stderr = f"{file_path!r}: Token Error: " -+ stderr = f"{file_path!r}: Indentation Error: " - stderr += ('unindent does not match any outer indentation level' - ' (, line 3)') - self.validate_cmd(file_path, stderr=stderr, expect_failure=True) -diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py -index 3fbd25e742b..e28d0311826 100644 ---- a/Lib/test/test_tarfile.py -+++ b/Lib/test/test_tarfile.py -@@ -1237,6 +1237,48 @@ - finally: - tar.close() - -+ def test_pax_header_bad_formats(self): -+ # The fields from the pax header have priority over the -+ # TarInfo. -+ pax_header_replacements = ( -+ b" foo=bar\n", -+ b"0 \n", -+ b"1 \n", -+ b"2 \n", -+ b"3 =\n", -+ b"4 =a\n", -+ b"1000000 foo=bar\n", -+ b"0 foo=bar\n", -+ b"-12 foo=bar\n", -+ b"000000000000000000000000036 foo=bar\n", -+ ) -+ pax_headers = {"foo": "bar"} -+ -+ for replacement in pax_header_replacements: -+ with self.subTest(header=replacement): -+ tar = tarfile.open(tmpname, "w", format=tarfile.PAX_FORMAT, -+ encoding="iso8859-1") -+ try: -+ t = tarfile.TarInfo() -+ t.name = "pax" # non-ASCII -+ t.uid = 1 -+ t.pax_headers = pax_headers -+ tar.addfile(t) -+ finally: -+ tar.close() -+ -+ with open(tmpname, "rb") as f: -+ data = f.read() -+ self.assertIn(b"11 foo=bar\n", data) -+ data = data.replace(b"11 foo=bar\n", replacement) -+ -+ with open(tmpname, "wb") as f: -+ f.truncate() -+ f.write(data) -+ -+ with self.assertRaisesRegex(tarfile.ReadError, r"method tar: ReadError\('invalid header'\)"): -+ tarfile.open(tmpname, encoding="iso8859-1") -+ - - class WriteTestBase(TarTest): - # Put all write tests in here that are supposed to be tested -diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py -index ebdb58f91d3..743ff85dd19 100644 ---- a/Lib/test/test_tcl.py -+++ b/Lib/test/test_tcl.py -@@ -51,7 +51,7 @@ - - def test_eval_surrogates_in_result(self): - tcl = self.interp -- self.assertIn(tcl.eval(r'set a "<\ud83d\udcbb>"'), '<\U0001f4bb>') -+ self.assertEqual(tcl.eval(r'set a "<\ud83d\udcbb>"'), '<\U0001f4bb>') - - def testEvalException(self): - tcl = self.interp -@@ -61,11 +61,30 @@ - tcl = self.interp - self.assertRaises(TclError,tcl.eval,'this is wrong') - -+ def test_eval_returns_tcl_obj(self): -+ tcl = self.interp.tk -+ tcl.eval(r'set a "\u20ac \ud83d\udcbb \0 \udcab"; regexp -about $a') -+ a = tcl.eval('set a') -+ expected = '\u20ac \U0001f4bb \0 \udced\udcb2\udcab' -+ self.assertEqual(a, expected) -+ - def testCall(self): - tcl = self.interp - tcl.call('set','a','1') - self.assertEqual(tcl.call('set','a'),'1') - -+ def test_call_passing_null(self): -+ tcl = self.interp -+ tcl.call('set', 'a', 'a\0b') # ASCII-only -+ self.assertEqual(tcl.getvar('a'), 'a\x00b') -+ self.assertEqual(tcl.call('set', 'a'), 'a\x00b') -+ self.assertEqual(tcl.eval('set a'), 'a\x00b') -+ -+ tcl.call('set', 'a', '\u20ac\0') # non-ASCII -+ self.assertEqual(tcl.getvar('a'), '\u20ac\x00') -+ self.assertEqual(tcl.call('set', 'a'), '\u20ac\x00') -+ self.assertEqual(tcl.eval('set a'), '\u20ac\x00') -+ - def testCallException(self): - tcl = self.interp - self.assertRaises(TclError,tcl.call,'set','a') -@@ -74,11 +93,35 @@ - tcl = self.interp - self.assertRaises(TclError,tcl.call,'this','is','wrong') - -+ def test_call_returns_tcl_obj(self): -+ tcl = self.interp.tk -+ tcl.eval(r'set a "\u20ac \ud83d\udcbb \0 \udcab"; regexp -about $a') -+ a = tcl.call('set', 'a') -+ expected = '\u20ac \U0001f4bb \0 \udced\udcb2\udcab' -+ if self.wantobjects: -+ self.assertEqual(str(a), expected) -+ self.assertEqual(a.string, expected) -+ self.assertEqual(a.typename, 'regexp') -+ else: -+ self.assertEqual(a, expected) -+ - def testSetVar(self): - tcl = self.interp - tcl.setvar('a','1') - self.assertEqual(tcl.eval('set a'),'1') - -+ def test_setvar_passing_null(self): -+ tcl = self.interp -+ tcl.setvar('a', 'a\0b') # ASCII-only -+ self.assertEqual(tcl.getvar('a'), 'a\x00b') -+ self.assertEqual(tcl.call('set', 'a'), 'a\x00b') -+ self.assertEqual(tcl.eval('set a'), 'a\x00b') -+ -+ tcl.setvar('a', '\u20ac\0') # non-ASCII -+ self.assertEqual(tcl.getvar('a'), '\u20ac\x00') -+ self.assertEqual(tcl.call('set', 'a'), '\u20ac\x00') -+ self.assertEqual(tcl.eval('set a'), '\u20ac\x00') -+ - def testSetVarArray(self): - tcl = self.interp - tcl.setvar('a(1)','1') -@@ -102,6 +145,18 @@ - tcl = self.interp - self.assertRaises(TclError,tcl.getvar,'a(1)') - -+ def test_getvar_returns_tcl_obj(self): -+ tcl = self.interp.tk -+ tcl.eval(r'set a "\u20ac \ud83d\udcbb \0 \udcab"; regexp -about $a') -+ a = tcl.getvar('a') -+ expected = '\u20ac \U0001f4bb \0 \udced\udcb2\udcab' -+ if self.wantobjects: -+ self.assertEqual(str(a), expected) -+ self.assertEqual(a.string, expected) -+ self.assertEqual(a.typename, 'regexp') -+ else: -+ self.assertEqual(a, expected) -+ - def testUnsetVar(self): - tcl = self.interp - tcl.setvar('a',1) -@@ -219,10 +274,18 @@ - with open(filename, 'wb') as f: - f.write(b""" - set a "<\xed\xa0\xbd\xed\xb2\xbb>" -+ """) -+ if tcl_version >= (9, 0): -+ self.assertRaises(TclError, tcl.evalfile, filename) -+ else: -+ tcl.evalfile(filename) -+ self.assertEqual(tcl.eval('set a'), '<\U0001f4bb>') -+ -+ with open(filename, 'wb') as f: -+ f.write(b""" - set b "<\\ud83d\\udcbb>" - """) - tcl.evalfile(filename) -- self.assertEqual(tcl.eval('set a'), '<\U0001f4bb>') - self.assertEqual(tcl.eval('set b'), '<\U0001f4bb>') - - def testEvalFileException(self): -@@ -530,6 +593,24 @@ - check((1, (2,), (3, 4), '5 6', ()), '1 2 {3 4} {5 6} {}') - check([1, [2,], [3, 4], '5 6', []], '1 2 {3 4} {5 6} {}') - -+ def test_passing_tcl_obj(self): -+ tcl = self.interp.tk -+ a = None -+ def testfunc(arg): -+ nonlocal a -+ a = arg -+ self.interp.createcommand('testfunc', testfunc) -+ self.addCleanup(self.interp.tk.deletecommand, 'testfunc') -+ tcl.eval(r'set a "\u20ac \ud83d\udcbb \0 \udcab"; regexp -about $a') -+ tcl.eval(r'testfunc $a') -+ expected = '\u20ac \U0001f4bb \0 \udced\udcb2\udcab' -+ if self.wantobjects >= 2: -+ self.assertEqual(str(a), expected) -+ self.assertEqual(a.string, expected) -+ self.assertEqual(a.typename, 'regexp') -+ else: -+ self.assertEqual(a, expected) -+ - def test_splitlist(self): - splitlist = self.interp.tk.splitlist - call = self.interp.tk.call -@@ -654,6 +735,7 @@ - support.check_disallow_instantiation(self, _tkinter.TkttType) - support.check_disallow_instantiation(self, _tkinter.TkappType) - -+ - class BigmemTclTest(unittest.TestCase): - - def setUp(self): -diff --git a/Lib/test/test_tkinter/test_geometry_managers.py b/Lib/test/test_tkinter/test_geometry_managers.py -index 59fe592b492..1be474b3019 100644 ---- a/Lib/test/test_tkinter/test_geometry_managers.py -+++ b/Lib/test/test_tkinter/test_geometry_managers.py -@@ -10,6 +10,11 @@ - requires('gui') - - -+EXPECTED_FLOAT_ERRMSG = 'expected floating-point number but got "{}"' -+EXPECTED_FLOAT_OR_EMPTY_ERRMSG = 'expected floating-point number (or "" )?but got "{}"' -+EXPECTED_SCREEN_DISTANCE_ERRMSG = '(bad|expected) screen distance (but got )?"{}"' -+EXPECTED_SCREEN_DISTANCE_OR_EMPTY_ERRMSG = '(bad|expected) screen distance (or "" but got )?"{}"' -+ - class PackTest(AbstractWidgetTest, unittest.TestCase): - - test_keys = None -@@ -317,7 +322,8 @@ - self.assertEqual(f2.place_info()['x'], '-10') - self.root.update() - self.assertEqual(f2.winfo_x(), 190) -- with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('spam')): - f2.place_configure(in_=f, x='spam') - - def test_place_configure_y(self): -@@ -334,7 +340,8 @@ - self.assertEqual(f2.place_info()['y'], '-10') - self.root.update() - self.assertEqual(f2.winfo_y(), 110) -- with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('spam')): - f2.place_configure(in_=f, y='spam') - - def test_place_configure_relx(self): -@@ -351,8 +358,7 @@ - self.assertEqual(f2.place_info()['relx'], '1') - self.root.update() - self.assertEqual(f2.winfo_x(), 200) -- with self.assertRaisesRegex(TclError, 'expected floating-point number ' -- 'but got "spam"'): -+ with self.assertRaisesRegex(TclError, EXPECTED_FLOAT_ERRMSG.format('spam')): - f2.place_configure(in_=f, relx='spam') - - def test_place_configure_rely(self): -@@ -369,8 +375,7 @@ - self.assertEqual(f2.place_info()['rely'], '1') - self.root.update() - self.assertEqual(f2.winfo_y(), 120) -- with self.assertRaisesRegex(TclError, 'expected floating-point number ' -- 'but got "spam"'): -+ with self.assertRaisesRegex(TclError, EXPECTED_FLOAT_ERRMSG.format('spam')): - f2.place_configure(in_=f, rely='spam') - - def test_place_configure_anchor(self): -@@ -391,7 +396,8 @@ - f2.place_configure(width='') - self.root.update() - self.assertEqual(f2.winfo_width(), 30) -- with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_OR_EMPTY_ERRMSG.format('abcd')): - f2.place_configure(width='abcd') - - def test_place_configure_height(self): -@@ -402,7 +408,8 @@ - f2.place_configure(height='') - self.root.update() - self.assertEqual(f2.winfo_height(), 60) -- with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_OR_EMPTY_ERRMSG.format('abcd')): - f2.place_configure(height='abcd') - - def test_place_configure_relwidth(self): -@@ -413,8 +420,7 @@ - f2.place_configure(relwidth='') - self.root.update() - self.assertEqual(f2.winfo_width(), 30) -- with self.assertRaisesRegex(TclError, 'expected floating-point number ' -- 'but got "abcd"'): -+ with self.assertRaisesRegex(TclError, EXPECTED_FLOAT_OR_EMPTY_ERRMSG.format('abcd')): - f2.place_configure(relwidth='abcd') - - def test_place_configure_relheight(self): -@@ -425,8 +431,7 @@ - f2.place_configure(relheight='') - self.root.update() - self.assertEqual(f2.winfo_height(), 60) -- with self.assertRaisesRegex(TclError, 'expected floating-point number ' -- 'but got "abcd"'): -+ with self.assertRaisesRegex(TclError, EXPECTED_FLOAT_OR_EMPTY_ERRMSG.format('abcd')): - f2.place_configure(relheight='abcd') - - def test_place_configure_bordermode(self): -@@ -629,7 +634,8 @@ - self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 4) - - def test_grid_columnconfigure_minsize(self): -- with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('foo')): - self.root.grid_columnconfigure(0, minsize='foo') - self.root.grid_columnconfigure(0, minsize=10) - self.assertEqual(self.root.grid_columnconfigure(0, 'minsize'), 10) -@@ -646,7 +652,8 @@ - self.assertEqual(self.root.grid_columnconfigure(0)['weight'], 3) - - def test_grid_columnconfigure_pad(self): -- with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('foo')): - self.root.grid_columnconfigure(0, pad='foo') - with self.assertRaisesRegex(TclError, 'invalid arg "-pad": ' - 'should be non-negative'): -@@ -683,7 +690,8 @@ - self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 4) - - def test_grid_rowconfigure_minsize(self): -- with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('foo')): - self.root.grid_rowconfigure(0, minsize='foo') - self.root.grid_rowconfigure(0, minsize=10) - self.assertEqual(self.root.grid_rowconfigure(0, 'minsize'), 10) -@@ -700,7 +708,8 @@ - self.assertEqual(self.root.grid_rowconfigure(0)['weight'], 3) - - def test_grid_rowconfigure_pad(self): -- with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('foo')): - self.root.grid_rowconfigure(0, pad='foo') - with self.assertRaisesRegex(TclError, 'invalid arg "-pad": ' - 'should be non-negative'): -@@ -818,9 +827,11 @@ - self.root.grid_location(0) - with self.assertRaises(TypeError): - self.root.grid_location(0, 0, 0) -- with self.assertRaisesRegex(TclError, 'bad screen distance "x"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('x')): - self.root.grid_location('x', 'y') -- with self.assertRaisesRegex(TclError, 'bad screen distance "y"'): -+ with self.assertRaisesRegex(TclError, -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('y')): - self.root.grid_location('1c', 'y') - t = self.root - # de-maximize -diff --git a/Lib/test/test_tkinter/test_misc.py b/Lib/test/test_tkinter/test_misc.py -index fc12860010e..81866993435 100644 ---- a/Lib/test/test_tkinter/test_misc.py -+++ b/Lib/test/test_tkinter/test_misc.py -@@ -382,6 +382,15 @@ - self.assertEqual(vi.micro, 0) - self.assertTrue(str(vi).startswith(f'{vi.major}.{vi.minor}')) - -+ def test_embedded_null(self): -+ widget = tkinter.Entry(self.root) -+ widget.insert(0, 'abc\0def') # ASCII-only -+ widget.selection_range(0, 'end') -+ self.assertEqual(widget.selection_get(), 'abc\x00def') -+ widget.insert(0, '\u20ac\0') # non-ASCII -+ widget.selection_range(0, 'end') -+ self.assertEqual(widget.selection_get(), '\u20ac\0abc\x00def') -+ - - class EventTest(AbstractTkTest, unittest.TestCase): - -diff --git a/Lib/test/test_tkinter/test_variables.py b/Lib/test/test_tkinter/test_variables.py -index c1d232e2feb..def7aec077e 100644 ---- a/Lib/test/test_tkinter/test_variables.py -+++ b/Lib/test/test_tkinter/test_variables.py -@@ -6,7 +6,7 @@ - from tkinter import (Variable, StringVar, IntVar, DoubleVar, BooleanVar, Tcl, - TclError) - from test.support import ALWAYS_EQ --from test.test_tkinter.support import AbstractDefaultRootTest -+from test.test_tkinter.support import AbstractDefaultRootTest, tcl_version - - - class Var(Variable): -@@ -112,6 +112,8 @@ - self.assertTrue(v.side_effect) - - def test_trace_old(self): -+ if tcl_version >= (9, 0): -+ self.skipTest('requires Tcl version < 9.0') - # Old interface - v = Variable(self.root) - vname = str(v) -diff --git a/Lib/test/test_tkinter/test_widgets.py b/Lib/test/test_tkinter/test_widgets.py -index 24604b27298..b020e1be6a2 100644 ---- a/Lib/test/test_tkinter/test_widgets.py -+++ b/Lib/test/test_tkinter/test_widgets.py -@@ -4,7 +4,7 @@ - import os - from test.support import requires - --from test.test_tkinter.support import (requires_tk, -+from test.test_tkinter.support import (requires_tk, tk_version, - get_tk_patchlevel, widget_eq, - AbstractDefaultRootTest) - from test.test_tkinter.widget_tests import ( -@@ -14,6 +14,9 @@ - requires('gui') - - -+EXPECTED_SCREEN_DISTANCE_ERRMSG = '(bad|expected) screen distance (but got )?"{}"' -+EXPECTED_SCREEN_DISTANCE_OR_EMPTY_ERRMSG = '(bad|expected) screen distance (or "" but got )?"{}"' -+ - def float_round(x): - return float(round(x)) - -@@ -58,11 +61,11 @@ - @add_standard_options(StandardOptionsTests) - class ToplevelTest(AbstractToplevelTest, unittest.TestCase): - OPTIONS = ( -- 'background', 'borderwidth', -+ 'background', 'backgroundimage', 'borderwidth', - 'class', 'colormap', 'container', 'cursor', 'height', - 'highlightbackground', 'highlightcolor', 'highlightthickness', - 'menu', 'padx', 'pady', 'relief', 'screen', -- 'takefocus', 'use', 'visual', 'width', -+ 'takefocus', 'tile', 'use', 'visual', 'width', - ) - - def create(self, **kwargs): -@@ -101,10 +104,10 @@ - @add_standard_options(StandardOptionsTests) - class FrameTest(AbstractToplevelTest, unittest.TestCase): - OPTIONS = ( -- 'background', 'borderwidth', -+ 'background', 'backgroundimage', 'borderwidth', - 'class', 'colormap', 'container', 'cursor', 'height', - 'highlightbackground', 'highlightcolor', 'highlightthickness', -- 'padx', 'pady', 'relief', 'takefocus', 'visual', 'width', -+ 'padx', 'pady', 'relief', 'takefocus', 'tile', 'visual', 'width', - ) - - def create(self, **kwargs): -@@ -141,11 +144,9 @@ - - class AbstractLabelTest(AbstractWidgetTest, IntegerSizeTests): - _conv_pixels = False -- -- def test_configure_highlightthickness(self): -- widget = self.create() -- self.checkPixelsParam(widget, 'highlightthickness', -- 0, 1.3, 2.6, 6, -2, '10p') -+ _clip_highlightthickness = tk_version >= (8, 7) -+ _clip_pad = tk_version >= (8, 7) -+ _clip_borderwidth = tk_version >= (8, 7) - - - @add_standard_options(StandardOptionsTests) -@@ -277,6 +278,9 @@ - 'underline', 'width', 'wraplength', - ) - _conv_pixels = round -+ _clip_highlightthickness = True -+ _clip_pad = True -+ _clip_borderwidth = False - - def create(self, **kwargs): - return tkinter.Menubutton(self.root, **kwargs) -@@ -290,9 +294,6 @@ - widget = self.create() - self.checkIntegerParam(widget, 'height', 100, -100, 0, conv=str) - -- test_configure_highlightthickness = \ -- StandardOptionsTests.test_configure_highlightthickness -- - def test_configure_image(self): - widget = self.create() - image = tkinter.PhotoImage(master=self.root, name='image1') -@@ -313,16 +314,6 @@ - self.checkParam(widget, 'menu', menu, eq=widget_eq) - menu.destroy() - -- def test_configure_padx(self): -- widget = self.create() -- self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, '12m') -- self.checkParam(widget, 'padx', -2, expected=0) -- -- def test_configure_pady(self): -- widget = self.create() -- self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, '12m') -- self.checkParam(widget, 'pady', -2, expected=0) -- - def test_configure_width(self): - widget = self.create() - self.checkIntegerParam(widget, 'width', 402, -402, 0, conv=str) -@@ -347,7 +338,8 @@ - 'highlightbackground', 'highlightcolor', 'highlightthickness', - 'insertbackground', 'insertborderwidth', - 'insertofftime', 'insertontime', 'insertwidth', -- 'invalidcommand', 'justify', 'readonlybackground', 'relief', -+ 'invalidcommand', 'justify', 'placeholder', 'placeholderforeground', -+ 'readonlybackground', 'relief', - 'selectbackground', 'selectborderwidth', 'selectforeground', - 'show', 'state', 'takefocus', 'textvariable', - 'validate', 'validatecommand', 'width', 'xscrollcommand', -@@ -441,8 +433,8 @@ - 'increment', - 'insertbackground', 'insertborderwidth', - 'insertofftime', 'insertontime', 'insertwidth', -- 'invalidcommand', 'justify', 'relief', 'readonlybackground', -- 'repeatdelay', 'repeatinterval', -+ 'invalidcommand', 'justify', 'placeholder', 'placeholderforeground', -+ 'relief', 'readonlybackground', 'repeatdelay', 'repeatinterval', - 'selectbackground', 'selectborderwidth', 'selectforeground', - 'state', 'takefocus', 'textvariable', 'to', - 'validate', 'validatecommand', 'values', -@@ -489,8 +481,12 @@ - widget = self.create() - self.checkParam(widget, 'to', 100.0) - self.checkFloatParam(widget, 'from', -10, 10.2, 11.7) -- self.checkInvalidParam(widget, 'from', 200, -- errmsg='-to value must be greater than -from value') -+ if tk_version >= (8, 7): -+ self.checkFloatParam(widget, 'from', 200, expected=100) -+ else: -+ self.checkInvalidParam( -+ widget, 'from', 200, -+ errmsg='-to value must be greater than -from value') - - def test_configure_increment(self): - widget = self.create() -@@ -500,8 +496,12 @@ - widget = self.create() - self.checkParam(widget, 'from', -100.0) - self.checkFloatParam(widget, 'to', -10, 10.2, 11.7) -- self.checkInvalidParam(widget, 'to', -200, -- errmsg='-to value must be greater than -from value') -+ if tk_version >= (8, 7): -+ self.checkFloatParam(widget, 'to', -200, expected=-100) -+ else: -+ self.checkInvalidParam( -+ widget, 'to', -200, -+ errmsg='-to value must be greater than -from value') - - def test_configure_values(self): - # XXX -@@ -666,7 +666,7 @@ - self.checkParam(widget, 'tabs', '2c left 4c 6c center', - expected=('2c', 'left', '4c', '6c', 'center')) - self.checkInvalidParam(widget, 'tabs', 'spam', -- errmsg='bad screen distance "spam"') -+ errmsg=EXPECTED_SCREEN_DISTANCE_ERRMSG.format('spam')) - - def test_configure_tabstyle(self): - widget = self.create() -@@ -860,24 +860,27 @@ - - def test_create_polygon(self): - c = self.create() -- i1 = c.create_polygon(20, 30, 40, 50, 60, 10) -+ tk87 = tk_version >= (8, 7) -+ # In Tk < 8.7 polygons are filled, but has no outline by default. -+ # This affects its size, so always explicitly specify outline. -+ i1 = c.create_polygon(20, 30, 40, 50, 60, 10, outline='red') - self.assertEqual(c.coords(i1), [20.0, 30.0, 40.0, 50.0, 60.0, 10.0]) -- self.assertEqual(c.bbox(i1), (19, 9, 61, 51)) -+ self.assertEqual(c.bbox(i1), (18, 8, 62, 52)) - self.assertEqual(c.itemcget(i1, 'joinstyle'), 'round') - self.assertEqual(c.itemcget(i1, 'smooth'), '0') - self.assertEqual(c.itemcget(i1, 'splinestep'), '12') - -- i2 = c.create_polygon([21, 31, 41, 51, 61, 11]) -+ i2 = c.create_polygon([21, 31, 41, 51, 61, 11], outline='red') - self.assertEqual(c.coords(i2), [21.0, 31.0, 41.0, 51.0, 61.0, 11.0]) -- self.assertEqual(c.bbox(i2), (20, 10, 62, 52)) -+ self.assertEqual(c.bbox(i2), (19, 9, 63, 53)) - -- i3 = c.create_polygon((22, 32), (42, 52), (62, 12)) -+ i3 = c.create_polygon((22, 32), (42, 52), (62, 12), outline='red') - self.assertEqual(c.coords(i3), [22.0, 32.0, 42.0, 52.0, 62.0, 12.0]) -- self.assertEqual(c.bbox(i3), (21, 11, 63, 53)) -+ self.assertEqual(c.bbox(i3), (20, 10, 64, 54)) - -- i4 = c.create_polygon([(23, 33), (43, 53), (63, 13)]) -+ i4 = c.create_polygon([(23, 33), (43, 53), (63, 13)], outline='red') - self.assertEqual(c.coords(i4), [23.0, 33.0, 43.0, 53.0, 63.0, 13.0]) -- self.assertEqual(c.bbox(i4), (22, 12, 64, 54)) -+ self.assertEqual(c.bbox(i4), (21, 11, 65, 55)) - - self.assertRaises(TclError, c.create_polygon, 20, 30, 60) - self.assertRaises(TclError, c.create_polygon, [20, 30, 60]) -@@ -1174,18 +1177,16 @@ - def create(self, **kwargs): - return tkinter.Scrollbar(self.root, **kwargs) - -- def test_configure_activerelief(self): -- widget = self.create() -- self.checkReliefParam(widget, 'activerelief') -- - def test_configure_elementborderwidth(self): - widget = self.create() -- self.checkPixelsParam(widget, 'elementborderwidth', 4.3, 5.6, -2, '1m') -+ self.checkPixelsParam(widget, 'elementborderwidth', 4.3, 5.6, '1m') -+ expected = self._default_pixels if tk_version >= (8, 7) else -2 -+ self.checkParam(widget, 'elementborderwidth', -2, expected=expected) - - def test_configure_orient(self): - widget = self.create() - self.checkEnumParam(widget, 'orient', 'vertical', 'horizontal', -- errmsg='bad orientation "{}": must be vertical or horizontal') -+ fullname='orientation', allow_empty=True) - - def test_activate(self): - sb = self.create() -@@ -1256,7 +1257,8 @@ - @requires_tk(8, 6, 5) - def test_configure_proxyrelief(self): - widget = self.create() -- self.checkReliefParam(widget, 'proxyrelief') -+ self.checkReliefParam(widget, 'proxyrelief', -+ allow_empty=(tk_version >= (8, 7))) - - def test_configure_sashcursor(self): - widget = self.create() -@@ -1329,7 +1331,7 @@ - p, b, c = self.create2() - self.check_paneconfigure(p, b, 'height', 10, 10) - self.check_paneconfigure_bad(p, b, 'height', -- 'bad screen distance "badValue"') -+ EXPECTED_SCREEN_DISTANCE_OR_EMPTY_ERRMSG.format('badValue')) - - def test_paneconfigure_hide(self): - p, b, c = self.create2() -@@ -1341,19 +1343,19 @@ - p, b, c = self.create2() - self.check_paneconfigure(p, b, 'minsize', 10, 10) - self.check_paneconfigure_bad(p, b, 'minsize', -- 'bad screen distance "badValue"') -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('badValue')) - - def test_paneconfigure_padx(self): - p, b, c = self.create2() - self.check_paneconfigure(p, b, 'padx', 1.3, 1) - self.check_paneconfigure_bad(p, b, 'padx', -- 'bad screen distance "badValue"') -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('badValue')) - - def test_paneconfigure_pady(self): - p, b, c = self.create2() - self.check_paneconfigure(p, b, 'pady', 1.3, 1) - self.check_paneconfigure_bad(p, b, 'pady', -- 'bad screen distance "badValue"') -+ EXPECTED_SCREEN_DISTANCE_ERRMSG.format('badValue')) - - def test_paneconfigure_sticky(self): - p, b, c = self.create2() -@@ -1374,13 +1376,14 @@ - p, b, c = self.create2() - self.check_paneconfigure(p, b, 'width', 10, 10) - self.check_paneconfigure_bad(p, b, 'width', -- 'bad screen distance "badValue"') -+ EXPECTED_SCREEN_DISTANCE_OR_EMPTY_ERRMSG.format('badValue')) - - - @add_standard_options(StandardOptionsTests) - class MenuTest(AbstractWidgetTest, unittest.TestCase): - OPTIONS = ( - 'activebackground', 'activeborderwidth', 'activeforeground', -+ 'activerelief', - 'background', 'borderwidth', 'cursor', - 'disabledforeground', 'font', 'foreground', - 'postcommand', 'relief', 'selectcolor', 'takefocus', -@@ -1396,6 +1399,8 @@ - i = widget.index('none') - self.assertIsNone(i) - -+ test_configure_activerelief = requires_tk(8, 7)(StandardOptionsTests.test_configure_activerelief) -+ - def test_configure_postcommand(self): - widget = self.create() - self.checkCommandParam(widget, 'postcommand') -@@ -1414,14 +1419,10 @@ - - def test_configure_type(self): - widget = self.create() -- opts = ('normal, tearoff, or menubar' -- if widget.info_patchlevel() < (8, 7) else -- 'menubar, normal, or tearoff') -- self.checkEnumParam( -- widget, 'type', -- 'normal', 'tearoff', 'menubar', -- errmsg='bad type "{}": must be ' + opts, -- ) -+ values = ('normal', 'tearoff', 'menubar') -+ self.checkEnumParam(widget, 'type', *values, -+ allow_empty=tk_version < (8, 7), -+ sort=tk_version >= (8, 7)) - - def test_entryconfigure(self): - m1 = self.create() -@@ -1467,6 +1468,10 @@ - 'takefocus', 'text', 'textvariable', 'width', - ) - _conv_pad_pixels = False -+ if tk_version >= (8, 7): -+ _conv_pixels = False -+ _clip_pad = tk_version >= (8, 7) -+ _clip_borderwidth = tk_version >= (8, 7) - - def create(self, **kwargs): - return tkinter.Message(self.root, **kwargs) -@@ -1475,6 +1480,26 @@ - widget = self.create() - self.checkIntegerParam(widget, 'aspect', 250, 0, -300) - -+ def test_configure_padx(self): -+ widget = self.create() -+ self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, '12m', -+ conv=self._conv_pad_pixels) -+ expected = self._default_pixels if self._clip_pad else -2 -+ self.checkParam(widget, 'padx', -2, expected=expected) -+ -+ def test_configure_pady(self): -+ widget = self.create() -+ self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, '12m', -+ conv=self._conv_pad_pixels) -+ expected = self._default_pixels if self._clip_pad else -2 -+ self.checkParam(widget, 'pady', -2, expected=expected) -+ -+ def test_configure_width(self): -+ widget = self.create() -+ self.checkPixelsParam(widget, 'width', 402, 403.4, 404.6, 0, '5i') -+ expected = 0 if tk_version >= (8, 7) else -402 -+ self.checkParam(widget, 'width', -402, expected=expected) -+ - - class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): - -diff --git a/Lib/test/test_tkinter/widget_tests.py b/Lib/test/test_tkinter/widget_tests.py -index 514b42bea76..3b75dc7b2c1 100644 ---- a/Lib/test/test_tkinter/widget_tests.py -+++ b/Lib/test/test_tkinter/widget_tests.py -@@ -1,7 +1,8 @@ - # Common tests for test_tkinter/test_widgets.py and test_ttk/test_widgets.py - -+import re - import tkinter --from test.test_tkinter.support import (AbstractTkTest, tk_version, -+from test.test_tkinter.support import (AbstractTkTest, requires_tk, tk_version, - pixels_conv, tcl_obj_eq) - import test.support - -@@ -9,9 +10,14 @@ - _sentinel = object() - - class AbstractWidgetTest(AbstractTkTest): -+ _default_pixels = '' if tk_version >= (9, 0) else -1 if tk_version >= (8, 7) else '' - _conv_pixels = round - _conv_pad_pixels = None - _stringify = False -+ _clip_highlightthickness = True -+ _clip_pad = False -+ _clip_borderwidth = False -+ _allow_empty_justify = False - - @property - def scaling(self): -@@ -56,16 +62,13 @@ - def checkInvalidParam(self, widget, name, value, errmsg=None): - orig = widget[name] - if errmsg is not None: -- errmsg = errmsg.format(value) -- with self.assertRaises(tkinter.TclError) as cm: -+ errmsg = errmsg.format(re.escape(str(value))) -+ errmsg = fr'\A{errmsg}\Z' -+ with self.assertRaisesRegex(tkinter.TclError, errmsg or ''): - widget[name] = value -- if errmsg is not None: -- self.assertEqual(str(cm.exception), errmsg) - self.assertEqual(widget[name], orig) -- with self.assertRaises(tkinter.TclError) as cm: -+ with self.assertRaisesRegex(tkinter.TclError, errmsg or ''): - widget.configure({name: value}) -- if errmsg is not None: -- self.assertEqual(str(cm.exception), errmsg) - self.assertEqual(widget[name], orig) - - def checkParams(self, widget, name, *values, **kwargs): -@@ -74,30 +77,26 @@ - - def checkIntegerParam(self, widget, name, *values, **kwargs): - self.checkParams(widget, name, *values, **kwargs) -- self.checkInvalidParam(widget, name, '', -- errmsg='expected integer but got ""') -- self.checkInvalidParam(widget, name, '10p', -- errmsg='expected integer but got "10p"') -- self.checkInvalidParam(widget, name, 3.2, -- errmsg='expected integer but got "3.2"') -+ errmsg = 'expected integer but got "{}"' -+ self.checkInvalidParam(widget, name, '', errmsg=errmsg) -+ self.checkInvalidParam(widget, name, '10p', errmsg=errmsg) -+ self.checkInvalidParam(widget, name, 3.2, errmsg=errmsg) - - def checkFloatParam(self, widget, name, *values, conv=float, **kwargs): - for value in values: - self.checkParam(widget, name, value, conv=conv, **kwargs) -- self.checkInvalidParam(widget, name, '', -- errmsg='expected floating-point number but got ""') -- self.checkInvalidParam(widget, name, 'spam', -- errmsg='expected floating-point number but got "spam"') -+ errmsg = 'expected floating-point number but got "{}"' -+ self.checkInvalidParam(widget, name, '', errmsg=errmsg) -+ self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg) - - def checkBooleanParam(self, widget, name): - for value in (False, 0, 'false', 'no', 'off'): - self.checkParam(widget, name, value, expected=0) - for value in (True, 1, 'true', 'yes', 'on'): - self.checkParam(widget, name, value, expected=1) -- self.checkInvalidParam(widget, name, '', -- errmsg='expected boolean value but got ""') -- self.checkInvalidParam(widget, name, 'spam', -- errmsg='expected boolean value but got "spam"') -+ errmsg = 'expected boolean value but got "{}"' -+ self.checkInvalidParam(widget, name, '', errmsg=errmsg) -+ self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg) - - def checkColorParam(self, widget, name, *, allow_empty=None, **kwargs): - self.checkParams(widget, name, -@@ -120,16 +119,24 @@ - self.assertTrue(widget[name]) - self.checkParams(widget, name, '') - -- def checkEnumParam(self, widget, name, *values, errmsg=None, **kwargs): -+ def checkEnumParam(self, widget, name, *values, -+ errmsg=None, allow_empty=False, fullname=None, -+ sort=False, **kwargs): - self.checkParams(widget, name, *values, **kwargs) - if errmsg is None: -+ if sort: -+ if values[-1]: -+ values = tuple(sorted(values)) -+ else: -+ values = tuple(sorted(values[:-1])) + ('',) - errmsg2 = ' %s "{}": must be %s%s or %s' % ( -- name, -+ fullname or name, - ', '.join(values[:-1]), - ',' if len(values) > 2 else '', -- values[-1]) -- self.checkInvalidParam(widget, name, '', -- errmsg='ambiguous' + errmsg2) -+ values[-1] or '""') -+ if '' not in values and not allow_empty: -+ self.checkInvalidParam(widget, name, '', -+ errmsg='ambiguous' + errmsg2) - errmsg = 'bad' + errmsg2 - self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg) - -@@ -146,20 +153,21 @@ - conv1 = round - self.checkParam(widget, name, value, expected=expected, - conv=conv1, **kwargs) -- self.checkInvalidParam(widget, name, '6x', -- errmsg='bad screen distance "6x"') -- self.checkInvalidParam(widget, name, 'spam', -- errmsg='bad screen distance "spam"') -+ errmsg = '(bad|expected) screen distance ((or "" )?but got )?"{}"' -+ self.checkInvalidParam(widget, name, '6x', errmsg=errmsg) -+ self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg) - -- def checkReliefParam(self, widget, name): -- self.checkParams(widget, name, -- 'flat', 'groove', 'raised', 'ridge', 'solid', 'sunken') -- errmsg='bad relief "spam": must be '\ -- 'flat, groove, raised, ridge, solid, or sunken' -+ def checkReliefParam(self, widget, name, *, allow_empty=False): -+ values = ('flat', 'groove', 'raised', 'ridge', 'solid', 'sunken') -+ if allow_empty: -+ values += ('',) -+ self.checkParams(widget, name, *values) -+ errmsg = 'bad relief "{}": must be %s, or %s' % ( -+ ', '.join(values[:-1]), -+ values[-1] or '""') - if tk_version < (8, 6): - errmsg = None -- self.checkInvalidParam(widget, name, 'spam', -- errmsg=errmsg) -+ self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg) - - def checkImageParam(self, widget, name): - image = tkinter.PhotoImage(master=self.root, name='image1') -@@ -193,6 +201,7 @@ - aliases = { - 'bd': 'borderwidth', - 'bg': 'background', -+ 'bgimg': 'backgroundimage', - 'fg': 'foreground', - 'invcmd': 'invalidcommand', - 'vcmd': 'validatecommand', -@@ -235,6 +244,10 @@ - widget = self.create() - self.checkColorParam(widget, 'activeforeground') - -+ def test_configure_activerelief(self): -+ widget = self.create() -+ self.checkReliefParam(widget, 'activerelief') -+ - def test_configure_anchor(self): - widget = self.create() - self.checkEnumParam(widget, 'anchor', -@@ -246,6 +259,11 @@ - if 'bg' in self.OPTIONS: - self.checkColorParam(widget, 'bg') - -+ @requires_tk(8, 7) -+ def test_configure_backgroundimage(self): -+ widget = self.create() -+ self.checkImageParam(widget, 'backgroundimage') -+ - def test_configure_bitmap(self): - widget = self.create() - self.checkParam(widget, 'bitmap', 'questhead') -@@ -262,9 +280,14 @@ - def test_configure_borderwidth(self): - widget = self.create() - self.checkPixelsParam(widget, 'borderwidth', -- 0, 1.3, 2.6, 6, -2, '10p') -+ 0, 1.3, 2.6, 6, '10p') -+ expected = 0 if self._clip_borderwidth else -2 -+ self.checkParam(widget, 'borderwidth', -2, expected=expected, -+ conv=self._conv_pixels) - if 'bd' in self.OPTIONS: -- self.checkPixelsParam(widget, 'bd', 0, 1.3, 2.6, 6, -2, '10p') -+ self.checkPixelsParam(widget, 'bd', 0, 1.3, 2.6, 6, '10p') -+ self.checkParam(widget, 'bd', -2, expected=expected, -+ conv=self._conv_pixels) - - def test_configure_compound(self): - widget = self.create() -@@ -287,8 +310,10 @@ - widget = self.create() - self.checkParam(widget, 'font', - '-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*') -- self.checkInvalidParam(widget, 'font', '', -- errmsg='font "" doesn\'t exist') -+ is_ttk = widget.__class__.__module__ == 'tkinter.ttk' -+ if not is_ttk: -+ self.checkInvalidParam(widget, 'font', '', -+ errmsg='font "" doesn\'t exist') - - def test_configure_foreground(self): - widget = self.create() -@@ -308,7 +333,8 @@ - widget = self.create() - self.checkPixelsParam(widget, 'highlightthickness', - 0, 1.3, 2.6, 6, '10p') -- self.checkParam(widget, 'highlightthickness', -2, expected=0, -+ expected = 0 if self._clip_highlightthickness else -2 -+ self.checkParam(widget, 'highlightthickness', -2, expected=expected, - conv=self._conv_pixels) - - def test_configure_image(self): -@@ -342,12 +368,11 @@ - - def test_configure_justify(self): - widget = self.create() -- self.checkEnumParam(widget, 'justify', 'left', 'right', 'center', -- errmsg='bad justification "{}": must be ' -- 'left, right, or center') -- self.checkInvalidParam(widget, 'justify', '', -- errmsg='ambiguous justification "": must be ' -- 'left, right, or center') -+ values = ('left', 'right', 'center') -+ if self._allow_empty_justify: -+ values += ('',) -+ self.checkEnumParam(widget, 'justify', *values, -+ fullname='justification') - - def test_configure_orient(self): - widget = self.create() -@@ -356,13 +381,29 @@ - - def test_configure_padx(self): - widget = self.create() -- self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, -2, '12m', -+ self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, '12m', - conv=self._conv_pad_pixels) -+ expected = 0 if self._clip_pad else -2 -+ self.checkParam(widget, 'padx', -2, expected=expected, -+ conv=self._conv_pad_pixels) - - def test_configure_pady(self): - widget = self.create() -- self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, -2, '12m', -+ self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, '12m', - conv=self._conv_pad_pixels) -+ expected = 0 if self._clip_pad else -2 -+ self.checkParam(widget, 'pady', -2, expected=expected, -+ conv=self._conv_pad_pixels) -+ -+ @requires_tk(8, 7) -+ def test_configure_placeholder(self): -+ widget = self.create() -+ self.checkParam(widget, 'placeholder', 'xxx') -+ -+ @requires_tk(8, 7) -+ def test_configure_placeholderforeground(self): -+ widget = self.create() -+ self.checkColorParam(widget, 'placeholderforeground') - - def test_configure_relief(self): - widget = self.create() -@@ -409,13 +450,35 @@ - var = tkinter.StringVar(self.root) - self.checkVariableParam(widget, 'textvariable', var) - -+ @requires_tk(8, 7) -+ def test_configure_tile(self): -+ widget = self.create() -+ self.checkBooleanParam(widget, 'tile') -+ - def test_configure_troughcolor(self): - widget = self.create() - self.checkColorParam(widget, 'troughcolor') - - def test_configure_underline(self): - widget = self.create() -- self.checkIntegerParam(widget, 'underline', 0, 1, 10) -+ self.checkParams(widget, 'underline', 0, 1, 10) -+ if tk_version >= (8, 7): -+ is_ttk = widget.__class__.__module__ == 'tkinter.ttk' -+ self.checkParam(widget, 'underline', '', -+ expected='' if is_ttk else self._default_pixels) -+ self.checkParam(widget, 'underline', '5+2', -+ expected='5+2' if is_ttk else 7) -+ self.checkParam(widget, 'underline', '5-2', -+ expected='5-2' if is_ttk else 3) -+ self.checkParam(widget, 'underline', 'end', expected='end') -+ self.checkParam(widget, 'underline', 'end-2', expected='end-2') -+ errmsg = (r'bad index "{}": must be integer\?\[\+-\]integer\?, ' -+ r'end\?\[\+-\]integer\?, or ""') -+ else: -+ errmsg = 'expected integer but got "{}"' -+ self.checkInvalidParam(widget, 'underline', '', errmsg=errmsg) -+ self.checkInvalidParam(widget, 'underline', '10p', errmsg=errmsg) -+ self.checkInvalidParam(widget, 'underline', 3.2, errmsg=errmsg) - - def test_configure_wraplength(self): - widget = self.create() -@@ -445,7 +508,8 @@ - - def test_configure_overrelief(self): - widget = self.create() -- self.checkReliefParam(widget, 'overrelief') -+ self.checkReliefParam(widget, 'overrelief', -+ allow_empty=(tk_version >= (8, 7))) - - def test_configure_selectcolor(self): - widget = self.create() -diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py -index c52b58b4ffd..84741e308bf 100644 ---- a/Lib/test/test_tokenize.py -+++ b/Lib/test/test_tokenize.py -@@ -233,7 +233,7 @@ - """) - - def test_float(self): -- # Floating point numbers -+ # Floating-point numbers - self.check_tokenize("x = 3.14159", """\ - NAME 'x' (1, 0) (1, 1) - OP '=' (1, 2) (1, 3) -@@ -1204,6 +1204,31 @@ - NAME 'x' (1, 3) (1, 4) - """) - -+ def test_multiline_non_ascii_fstring(self): -+ self.check_tokenize("""\ -+a = f''' -+ Autorzy, którzy tą jednostkę mają wpisani jako AKTUALNA -- czyli'''""", """\ -+ NAME 'a' (1, 0) (1, 1) -+ OP '=' (1, 2) (1, 3) -+ FSTRING_START "f\'\'\'" (1, 4) (1, 8) -+ FSTRING_MIDDLE '\\n Autorzy, którzy tą jednostkę mają wpisani jako AKTUALNA -- czyli' (1, 8) (2, 68) -+ FSTRING_END "\'\'\'" (2, 68) (2, 71) -+ """) -+ -+ def test_multiline_non_ascii_fstring_with_expr(self): -+ self.check_tokenize("""\ -+f''' -+ 🔗 This is a test {test_arg1}🔗 -+🔗'''""", """\ -+ FSTRING_START "f\'\'\'" (1, 0) (1, 4) -+ FSTRING_MIDDLE '\\n 🔗 This is a test ' (1, 4) (2, 21) -+ OP '{' (2, 21) (2, 22) -+ NAME 'test_arg1' (2, 22) (2, 31) -+ OP '}' (2, 31) (2, 32) -+ FSTRING_MIDDLE '🔗\\n🔗' (2, 32) (3, 1) -+ FSTRING_END "\'\'\'" (3, 1) (3, 4) -+ """) -+ - class GenerateTokensTest(TokenizeTest): - def check_tokenize(self, s, expected): - # Format the tokens in s in a table format. -diff --git a/Lib/test/test_ttk/test_widgets.py b/Lib/test/test_ttk/test_widgets.py -index 308bbba1ff6..eee39162054 100644 ---- a/Lib/test/test_ttk/test_widgets.py -+++ b/Lib/test/test_ttk/test_widgets.py -@@ -5,8 +5,9 @@ - import sys - - from test.test_ttk_textonly import MockTclObj --from test.test_tkinter.support import (AbstractTkTest, tk_version, get_tk_patchlevel, -- simulate_mouse_click, AbstractDefaultRootTest) -+from test.test_tkinter.support import ( -+ AbstractTkTest, requires_tk, tk_version, get_tk_patchlevel, -+ simulate_mouse_click, AbstractDefaultRootTest) - from test.test_tkinter.widget_tests import (add_standard_options, - AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests) - -@@ -44,6 +45,10 @@ - self.checkParam(widget, 'padding', ('5p', '6p', '7p', '8p')) - self.checkParam(widget, 'padding', (), expected='') - -+ def test_configure_state(self): -+ widget = self.create() -+ self.checkParams(widget, 'state', 'active', 'disabled', 'readonly') -+ - def test_configure_style(self): - widget = self.create() - self.assertEqual(widget['style'], '') -@@ -57,6 +62,11 @@ - self.assertEqual(widget2['class'], 'Foo') - # XXX - -+ def test_configure_relief(self): -+ widget = self.create() -+ self.checkReliefParam(widget, 'relief', -+ allow_empty=(tk_version >= (8, 7))) -+ - - class WidgetTest(AbstractTkTest, unittest.TestCase): - """Tests methods available in every ttk widget.""" -@@ -157,6 +167,7 @@ - - - class AbstractLabelTest(AbstractWidgetTest): -+ _allow_empty_justify = True - - def checkImageParam(self, widget, name): - image = tkinter.PhotoImage(master=self.root, name='image1') -@@ -172,17 +183,13 @@ - errmsg='image "spam" doesn\'t exist') - - def test_configure_compound(self): -- options = 'none text image center top bottom left right'.split() -- errmsg = ( -- 'bad compound "{}": must be' -- f' {", ".join(options[:-1])}, or {options[-1]}' -- ) -+ values = ('none', 'text', 'image', 'center', 'top', 'bottom', 'left', 'right') -+ if tk_version >= (8, 7): -+ values += ('',) - widget = self.create() -- self.checkEnumParam(widget, 'compound', *options, errmsg=errmsg) -+ self.checkEnumParam(widget, 'compound', *values, allow_empty=True) - -- def test_configure_state(self): -- widget = self.create() -- self.checkParams(widget, 'state', 'active', 'disabled', 'normal') -+ test_configure_justify = requires_tk(8, 7)(StandardOptionsTests.test_configure_justify) - - def test_configure_width(self): - widget = self.create() -@@ -199,21 +206,19 @@ - 'underline', 'width', 'wraplength', - ) - _conv_pixels = False -+ _allow_empty_justify = tk_version >= (8, 7) - - def create(self, **kwargs): - return ttk.Label(self.root, **kwargs) - -- def test_configure_font(self): -- widget = self.create() -- self.checkParam(widget, 'font', -- '-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*') -+ test_configure_justify = StandardOptionsTests.test_configure_justify - - - @add_standard_options(StandardTtkOptionsTests) - class ButtonTest(AbstractLabelTest, unittest.TestCase): - OPTIONS = ( - 'class', 'command', 'compound', 'cursor', 'default', -- 'image', 'padding', 'state', 'style', -+ 'image', 'justify', 'padding', 'state', 'style', - 'takefocus', 'text', 'textvariable', - 'underline', 'width', - ) -@@ -223,7 +228,9 @@ - - def test_configure_default(self): - widget = self.create() -- self.checkEnumParam(widget, 'default', 'normal', 'active', 'disabled') -+ values = ('normal', 'active', 'disabled') -+ self.checkEnumParam(widget, 'default', *values, -+ sort=tk_version >= (8, 7)) - - def test_invoke(self): - success = [] -@@ -236,7 +243,7 @@ - class CheckbuttonTest(AbstractLabelTest, unittest.TestCase): - OPTIONS = ( - 'class', 'command', 'compound', 'cursor', -- 'image', -+ 'image', 'justify', - 'offvalue', 'onvalue', - 'padding', 'state', 'style', - 'takefocus', 'text', 'textvariable', -@@ -275,7 +282,10 @@ - - cbtn['command'] = '' - res = cbtn.invoke() -- self.assertFalse(str(res)) -+ if tk_version >= (8, 7) and self.wantobjects: -+ self.assertEqual(res, ()) -+ else: -+ self.assertEqual(str(res), '') - self.assertLessEqual(len(success), 1) - self.assertEqual(cbtn['offvalue'], - cbtn.tk.globalgetvar(cbtn['variable'])) -@@ -322,6 +332,7 @@ - 'background', 'class', 'cursor', - 'exportselection', 'font', 'foreground', - 'invalidcommand', 'justify', -+ 'placeholder', 'placeholderforeground', - 'show', 'state', 'style', 'takefocus', 'textvariable', - 'validate', 'validatecommand', 'width', 'xscrollcommand', - ) -@@ -344,11 +355,6 @@ - self.checkParam(widget, 'show', '') - self.checkParam(widget, 'show', ' ') - -- def test_configure_state(self): -- widget = self.create() -- self.checkParams(widget, 'state', -- 'disabled', 'normal', 'readonly') -- - def test_configure_validate(self): - widget = self.create() - self.checkEnumParam(widget, 'validate', -@@ -449,7 +455,8 @@ - OPTIONS = ( - 'background', 'class', 'cursor', 'exportselection', - 'font', 'foreground', 'height', 'invalidcommand', -- 'justify', 'postcommand', 'show', 'state', 'style', -+ 'justify', 'placeholder', 'placeholderforeground', 'postcommand', -+ 'show', 'state', 'style', - 'takefocus', 'textvariable', - 'validate', 'validatecommand', 'values', - 'width', 'xscrollcommand', -@@ -513,7 +520,7 @@ - self.assertEqual(self.combo.get(), getval) - self.assertEqual(self.combo.current(), currval) - -- self.assertEqual(self.combo['values'], '') -+ self.assertIn(self.combo['values'], ((), '')) - check_get_current('', -1) - - self.checkParam(self.combo, 'values', 'mon tue wed thur', -@@ -638,8 +645,14 @@ - child2 = ttk.Label(self.root) - child3 = ttk.Label(self.root) - -- self.assertRaises(tkinter.TclError, self.paned.insert, 0, child) -+ if tk_version >= (8, 7): -+ self.paned.insert(0, child) -+ self.assertEqual(self.paned.panes(), (str(child),)) -+ self.paned.forget(0) -+ else: -+ self.assertRaises(tkinter.TclError, self.paned.insert, 0, child) - -+ self.assertEqual(self.paned.panes(), ()) - self.paned.insert('end', child2) - self.paned.insert(0, child) - self.assertEqual(self.paned.panes(), (str(child), str(child2))) -@@ -703,7 +716,7 @@ - class RadiobuttonTest(AbstractLabelTest, unittest.TestCase): - OPTIONS = ( - 'class', 'command', 'compound', 'cursor', -- 'image', -+ 'image', 'justify', - 'padding', 'state', 'style', - 'takefocus', 'text', 'textvariable', - 'underline', 'value', 'variable', 'width', -@@ -742,7 +755,10 @@ - - cbtn2['command'] = '' - res = cbtn2.invoke() -- self.assertEqual(str(res), '') -+ if tk_version >= (8, 7) and self.wantobjects: -+ self.assertEqual(res, ()) -+ else: -+ self.assertEqual(str(res), '') - self.assertLessEqual(len(success), 1) - self.assertEqual(conv(cbtn2['value']), myvar.get()) - self.assertEqual(myvar.get(), -@@ -754,7 +770,7 @@ - class MenubuttonTest(AbstractLabelTest, unittest.TestCase): - OPTIONS = ( - 'class', 'compound', 'cursor', 'direction', -- 'image', 'menu', 'padding', 'state', 'style', -+ 'image', 'justify', 'menu', 'padding', 'state', 'style', - 'takefocus', 'text', 'textvariable', - 'underline', 'width', - ) -@@ -762,10 +778,11 @@ - def create(self, **kwargs): - return ttk.Menubutton(self.root, **kwargs) - -- def test_direction(self): -+ def test_configure_direction(self): - widget = self.create() -- self.checkEnumParam(widget, 'direction', -- 'above', 'below', 'left', 'right', 'flush') -+ values = ('above', 'below', 'left', 'right', 'flush') -+ self.checkEnumParam(widget, 'direction', *values, -+ sort=tk_version >= (8, 7)) - - def test_configure_menu(self): - widget = self.create() -@@ -778,7 +795,7 @@ - class ScaleTest(AbstractWidgetTest, unittest.TestCase): - OPTIONS = ( - 'class', 'command', 'cursor', 'from', 'length', -- 'orient', 'style', 'takefocus', 'to', 'value', 'variable', -+ 'orient', 'state', 'style', 'takefocus', 'to', 'value', 'variable', - ) - _conv_pixels = False - default_orient = 'horizontal' -@@ -800,6 +817,8 @@ - widget = self.create() - self.checkPixelsParam(widget, 'length', 130, 131.2, 135.6, '5i') - -+ test_configure_state = requires_tk(8, 6, 9)(StandardTtkOptionsTests.test_configure_state) -+ - def test_configure_to(self): - widget = self.create() - self.checkFloatParam(widget, 'to', 300, 14.9, 15.1, -10, conv=False) -@@ -883,16 +902,28 @@ - @add_standard_options(StandardTtkOptionsTests) - class ProgressbarTest(AbstractWidgetTest, unittest.TestCase): - OPTIONS = ( -- 'class', 'cursor', 'orient', 'length', -- 'mode', 'maximum', 'phase', -+ 'anchor', 'class', 'cursor', 'font', 'foreground', 'justify', -+ 'orient', 'length', -+ 'mode', 'maximum', 'phase', 'text', 'wraplength', - 'style', 'takefocus', 'value', 'variable', - ) - _conv_pixels = False -+ _allow_empty_justify = True - default_orient = 'horizontal' - - def create(self, **kwargs): - return ttk.Progressbar(self.root, **kwargs) - -+ @requires_tk(8, 7) -+ def test_configure_anchor(self): -+ widget = self.create() -+ self.checkEnumParam(widget, 'anchor', -+ 'n', 'ne', 'e', 'se', 's', 'sw', 'w', 'nw', 'center', '') -+ -+ test_configure_font = requires_tk(8, 7)(StandardOptionsTests.test_configure_font) -+ test_configure_foreground = requires_tk(8, 7)(StandardOptionsTests.test_configure_foreground) -+ test_configure_justify = requires_tk(8, 7)(StandardTtkOptionsTests.test_configure_justify) -+ - def test_configure_length(self): - widget = self.create() - self.checkPixelsParam(widget, 'length', 100.1, 56.7, '2i') -@@ -909,11 +940,15 @@ - # XXX - pass - -+ test_configure_text = requires_tk(8, 7)(StandardOptionsTests.test_configure_text) -+ - def test_configure_value(self): - widget = self.create() - self.checkFloatParam(widget, 'value', 150.2, 77.7, 0, -10, - conv=False) - -+ test_configure_wraplength = requires_tk(8, 7)(StandardOptionsTests.test_configure_wraplength) -+ - - @unittest.skipIf(sys.platform == 'darwin', - 'ttk.Scrollbar is special on MacOSX') -@@ -928,11 +963,14 @@ - return ttk.Scrollbar(self.root, **kwargs) - - --@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) -+@add_standard_options(PixelSizeTests if tk_version >= (8, 7) else IntegerSizeTests, -+ StandardTtkOptionsTests) - class NotebookTest(AbstractWidgetTest, unittest.TestCase): - OPTIONS = ( - 'class', 'cursor', 'height', 'padding', 'style', 'takefocus', 'width', - ) -+ if tk_version >= (8, 7): -+ _conv_pixels = False - - def setUp(self): - super().setUp() -@@ -1051,7 +1089,11 @@ - self.nb.insert(self.child1, child3) - self.assertEqual(self.nb.tabs(), (str(child3), ) + tabs) - self.nb.forget(child3) -- self.assertRaises(tkinter.TclError, self.nb.insert, 2, child3) -+ if tk_version >= (8, 7): -+ self.nb.insert(2, child3) -+ self.assertEqual(self.nb.tabs(), (*tabs, str(child3))) -+ else: -+ self.assertRaises(tkinter.TclError, self.nb.insert, 2, child3) - self.assertRaises(tkinter.TclError, self.nb.insert, -1, child3) - - # bad inserts -@@ -1143,7 +1185,9 @@ - OPTIONS = ( - 'background', 'class', 'command', 'cursor', 'exportselection', - 'font', 'foreground', 'format', 'from', 'increment', -- 'invalidcommand', 'justify', 'show', 'state', 'style', -+ 'invalidcommand', 'justify', -+ 'placeholder', 'placeholderforeground', -+ 'show', 'state', 'style', - 'takefocus', 'textvariable', 'to', 'validate', 'validatecommand', - 'values', 'width', 'wrap', 'xscrollcommand', - ) -@@ -1317,8 +1361,9 @@ - class TreeviewTest(AbstractWidgetTest, unittest.TestCase): - OPTIONS = ( - 'class', 'columns', 'cursor', 'displaycolumns', -- 'height', 'padding', 'selectmode', 'show', -- 'style', 'takefocus', 'xscrollcommand', 'yscrollcommand', -+ 'height', 'padding', 'selectmode', 'selecttype', 'show', 'striped', -+ 'style', 'takefocus', 'titlecolumns', 'titleitems', -+ 'xscrollcommand', 'yscrollcommand', - ) - - def setUp(self): -@@ -1333,7 +1378,8 @@ - self.checkParam(widget, 'columns', 'a b c', - expected=('a', 'b', 'c')) - self.checkParam(widget, 'columns', ('a', 'b', 'c')) -- self.checkParam(widget, 'columns', '') -+ self.checkParam(widget, 'columns', '', -+ expected=() if tk_version >= (8, 7) else '') - - def test_configure_displaycolumns(self): - widget = self.create() -@@ -1345,11 +1391,12 @@ - expected=('#all',)) - self.checkParam(widget, 'displaycolumns', (2, 1, 0)) - self.checkInvalidParam(widget, 'displaycolumns', ('a', 'b', 'd'), -- errmsg='Invalid column index d') -+ errmsg='Invalid column index "?d"?') -+ errmsg = 'Column index "?{}"? out of bounds' - self.checkInvalidParam(widget, 'displaycolumns', (1, 2, 3), -- errmsg='Column index 3 out of bounds') -+ errmsg=errmsg.format(3)) - self.checkInvalidParam(widget, 'displaycolumns', (1, -2), -- errmsg='Column index -2 out of bounds') -+ errmsg=errmsg.format(-2)) - - def test_configure_height(self): - widget = self.create() -@@ -1361,6 +1408,11 @@ - self.checkEnumParam(widget, 'selectmode', - 'none', 'browse', 'extended') - -+ @requires_tk(8, 7) -+ def test_configure_selecttype(self): -+ widget = self.create() -+ self.checkEnumParam(widget, 'selecttype', 'item', 'cell') -+ - def test_configure_show(self): - widget = self.create() - self.checkParam(widget, 'show', 'tree headings', -@@ -1370,6 +1422,23 @@ - self.checkParam(widget, 'show', 'tree', expected=('tree',)) - self.checkParam(widget, 'show', 'headings', expected=('headings',)) - -+ @requires_tk(8, 7) -+ def test_configure_striped(self): -+ widget = self.create() -+ self.checkBooleanParam(widget, 'striped') -+ -+ @requires_tk(8, 7) -+ def test_configure_titlecolumns(self): -+ widget = self.create() -+ self.checkIntegerParam(widget, 'titlecolumns', 0, 1, 5) -+ self.checkInvalidParam(widget, 'titlecolumns', -2) -+ -+ @requires_tk(8, 7) -+ def test_configure_titleitems(self): -+ widget = self.create() -+ self.checkIntegerParam(widget, 'titleitems', 0, 1, 5) -+ self.checkInvalidParam(widget, 'titleitems', -2) -+ - def test_bbox(self): - self.tv.pack() - self.assertEqual(self.tv.bbox(''), '') -diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py -index 5ffe4085f09..43949770b16 100644 ---- a/Lib/test/test_types.py -+++ b/Lib/test/test_types.py -@@ -1,6 +1,10 @@ - # Python test set -- part 6, built-in types - --from test.support import run_with_locale, cpython_only, MISSING_C_DOCSTRINGS -+from test.support import ( -+ run_with_locale, cpython_only, iter_builtin_types, iter_slot_wrappers, -+ MISSING_C_DOCSTRINGS, -+) -+from test.test_import import no_rerun - import collections.abc - from collections import namedtuple - import copy -@@ -9,6 +13,7 @@ - import pickle - import locale - import sys -+import textwrap - import types - import unittest.mock - import weakref -@@ -2252,5 +2257,51 @@ - 'close', 'throw'})) - - -+class SubinterpreterTests(unittest.TestCase): -+ -+ @classmethod -+ def setUpClass(cls): -+ global interpreters -+ try: -+ from test.support import interpreters -+ except ModuleNotFoundError: -+ raise unittest.SkipTest('subinterpreters required') -+ -+ @cpython_only -+ @no_rerun('channels (and queues) might have a refleak; see gh-122199') -+ def test_static_types_inherited_slots(self): -+ rch, sch = interpreters.create_channel() -+ -+ slots = [] -+ script = '' -+ for cls in iter_builtin_types(): -+ for slot, own in iter_slot_wrappers(cls): -+ slots.append((cls, slot, own)) -+ attr = f'{cls.__name__}.{slot}' -+ script += textwrap.dedent(f""" -+ sch.send_nowait('{attr}: ' + repr({attr})) -+ """) -+ -+ exec(script) -+ all_expected = [] -+ for cls, slot, _ in slots: -+ result = rch.recv() -+ assert result.startswith(f'{cls.__name__}.{slot}: '), (cls, slot, result) -+ all_expected.append(result) -+ -+ interp = interpreters.create() -+ interp.run(textwrap.dedent(f""" -+ from test.support import interpreters -+ sch = interpreters.SendChannel({sch.id}) -+ """)) -+ interp.run(script) -+ -+ for i, (cls, slot, _) in enumerate(slots): -+ with self.subTest(cls=cls, slot=slot): -+ expected = all_expected[i] -+ result = rch.recv() -+ self.assertEqual(result, expected) -+ -+ - if __name__ == '__main__': - unittest.main() -diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py -index 4cf8d498fcc..106bd81b69e 100644 ---- a/Lib/test/test_typing.py -+++ b/Lib/test/test_typing.py -@@ -579,6 +579,41 @@ - self.assertEqual(T.__name__, "T") - self.assertEqual(T.__constraints__, ()) - self.assertIs(T.__bound__, None) -+ self.assertIs(T.__covariant__, False) -+ self.assertIs(T.__contravariant__, False) -+ self.assertIs(T.__infer_variance__, False) -+ -+ T = TypeVar(name="T", bound=type) -+ self.assertEqual(T.__name__, "T") -+ self.assertEqual(T.__constraints__, ()) -+ self.assertIs(T.__bound__, type) -+ self.assertIs(T.__covariant__, False) -+ self.assertIs(T.__contravariant__, False) -+ self.assertIs(T.__infer_variance__, False) -+ -+ T = TypeVar(name="T", covariant=True) -+ self.assertEqual(T.__name__, "T") -+ self.assertEqual(T.__constraints__, ()) -+ self.assertIs(T.__bound__, None) -+ self.assertIs(T.__covariant__, True) -+ self.assertIs(T.__contravariant__, False) -+ self.assertIs(T.__infer_variance__, False) -+ -+ T = TypeVar(name="T", contravariant=True) -+ self.assertEqual(T.__name__, "T") -+ self.assertEqual(T.__constraints__, ()) -+ self.assertIs(T.__bound__, None) -+ self.assertIs(T.__covariant__, False) -+ self.assertIs(T.__contravariant__, True) -+ self.assertIs(T.__infer_variance__, False) -+ -+ T = TypeVar(name="T", infer_variance=True) -+ self.assertEqual(T.__name__, "T") -+ self.assertEqual(T.__constraints__, ()) -+ self.assertIs(T.__bound__, None) -+ self.assertIs(T.__covariant__, False) -+ self.assertIs(T.__contravariant__, False) -+ self.assertIs(T.__infer_variance__, True) - - - def template_replace(templates: list[str], replacements: dict[str, list[str]]) -> list[tuple[str]]: -@@ -4531,20 +4566,30 @@ - {'x': list[list[ForwardRef('X')]]} - ) - -- def test_pep695_generic_with_future_annotations(self): -+ def test_pep695_generic_class_with_future_annotations(self): -+ original_globals = dict(ann_module695.__dict__) -+ - hints_for_A = get_type_hints(ann_module695.A) - A_type_params = ann_module695.A.__type_params__ - self.assertIs(hints_for_A["x"], A_type_params[0]) - self.assertEqual(hints_for_A["y"].__args__[0], Unpack[A_type_params[1]]) - self.assertIs(hints_for_A["z"].__args__[0], A_type_params[2]) - -+ # should not have changed as a result of the get_type_hints() calls! -+ self.assertEqual(ann_module695.__dict__, original_globals) -+ -+ def test_pep695_generic_class_with_future_annotations_and_local_shadowing(self): - hints_for_B = get_type_hints(ann_module695.B) -- self.assertEqual(hints_for_B.keys(), {"x", "y", "z"}) -+ self.assertEqual(hints_for_B, {"x": int, "y": str, "z": bytes}) -+ -+ def test_pep695_generic_class_with_future_annotations_name_clash_with_global_vars(self): -+ hints_for_C = get_type_hints(ann_module695.C) - self.assertEqual( -- set(hints_for_B.values()) ^ set(ann_module695.B.__type_params__), -- set() -+ set(hints_for_C.values()), -+ set(ann_module695.C.__type_params__) - ) - -+ def test_pep_695_generic_function_with_future_annotations(self): - hints_for_generic_function = get_type_hints(ann_module695.generic_function) - func_t_params = ann_module695.generic_function.__type_params__ - self.assertEqual( -@@ -4555,6 +4600,54 @@ - self.assertIs(hints_for_generic_function["z"].__origin__, func_t_params[2]) - self.assertIs(hints_for_generic_function["zz"].__origin__, func_t_params[2]) - -+ def test_pep_695_generic_function_with_future_annotations_name_clash_with_global_vars(self): -+ self.assertEqual( -+ set(get_type_hints(ann_module695.generic_function_2).values()), -+ set(ann_module695.generic_function_2.__type_params__) -+ ) -+ -+ def test_pep_695_generic_method_with_future_annotations(self): -+ hints_for_generic_method = get_type_hints(ann_module695.D.generic_method) -+ params = { -+ param.__name__: param -+ for param in ann_module695.D.generic_method.__type_params__ -+ } -+ self.assertEqual( -+ hints_for_generic_method, -+ {"x": params["Foo"], "y": params["Bar"], "return": types.NoneType} -+ ) -+ -+ def test_pep_695_generic_method_with_future_annotations_name_clash_with_global_vars(self): -+ self.assertEqual( -+ set(get_type_hints(ann_module695.D.generic_method_2).values()), -+ set(ann_module695.D.generic_method_2.__type_params__) -+ ) -+ -+ def test_pep_695_generics_with_future_annotations_nested_in_function(self): -+ results = ann_module695.nested() -+ -+ self.assertEqual( -+ set(results.hints_for_E.values()), -+ set(results.E.__type_params__) -+ ) -+ self.assertEqual( -+ set(results.hints_for_E_meth.values()), -+ set(results.E.generic_method.__type_params__) -+ ) -+ self.assertNotEqual( -+ set(results.hints_for_E_meth.values()), -+ set(results.E.__type_params__) -+ ) -+ self.assertEqual( -+ set(results.hints_for_E_meth.values()).intersection(results.E.__type_params__), -+ set() -+ ) -+ -+ self.assertEqual( -+ set(results.hints_for_generic_func.values()), -+ set(results.generic_func.__type_params__) -+ ) -+ - def test_extended_generic_rules_subclassing(self): - class T1(Tuple[T, KT]): ... - class T2(Tuple[T, ...]): ... diff --git a/Lib/test/test_unicode_file_functions.py b/Lib/test/test_unicode_file_functions.py index 47619c8807b..25c16e3a0b7 100644 --- a/Lib/test/test_unicode_file_functions.py @@ -33316,163 +3603,6 @@ index 47619c8807b..25c16e3a0b7 100644 def test_listdir(self): sf0 = set(self.files) with warnings.catch_warnings(): -diff --git a/Lib/test/test_unittest/test_case.py b/Lib/test/test_unittest/test_case.py -index ed5eb5609a5..82a442a04e6 100644 ---- a/Lib/test/test_unittest/test_case.py -+++ b/Lib/test/test_unittest/test_case.py -@@ -1132,6 +1132,8 @@ - # need to remove the first line of the error message - error = str(e).split('\n', 1)[1] - self.assertEqual(sample_text_error, error) -+ else: -+ self.fail(f'{self.failureException} not raised') - - def testAssertEqualSingleLine(self): - sample_text = "laden swallows fly slowly" -@@ -1148,6 +1150,8 @@ - # need to remove the first line of the error message - error = str(e).split('\n', 1)[1] - self.assertEqual(sample_text_error, error) -+ else: -+ self.fail(f'{self.failureException} not raised') - - def testAssertEqualwithEmptyString(self): - '''Verify when there is an empty string involved, the diff output -@@ -1165,6 +1169,8 @@ - # need to remove the first line of the error message - error = str(e).split('\n', 1)[1] - self.assertEqual(sample_text_error, error) -+ else: -+ self.fail(f'{self.failureException} not raised') - - def testAssertEqualMultipleLinesMissingNewlineTerminator(self): - '''Verifying format of diff output from assertEqual involving strings -@@ -1185,6 +1191,8 @@ - # need to remove the first line of the error message - error = str(e).split('\n', 1)[1] - self.assertEqual(sample_text_error, error) -+ else: -+ self.fail(f'{self.failureException} not raised') - - def testAssertEqualMultipleLinesMismatchedNewlinesTerminators(self): - '''Verifying format of diff output from assertEqual involving strings -@@ -1208,6 +1216,8 @@ - # need to remove the first line of the error message - error = str(e).split('\n', 1)[1] - self.assertEqual(sample_text_error, error) -+ else: -+ self.fail(f'{self.failureException} not raised') - - def testEqualityBytesWarning(self): - if sys.flags.bytes_warning: ---- /dev/null -+++ b/Lib/test/test_unittest/test_util.py -@@ -0,0 +1,33 @@ -+import unittest -+from unittest.util import safe_repr, sorted_list_difference, unorderable_list_difference -+ -+ -+class TestUtil(unittest.TestCase): -+ def test_safe_repr(self): -+ class RaisingRepr: -+ def __repr__(self): -+ raise ValueError("Invalid repr()") -+ -+ class LongRepr: -+ def __repr__(self): -+ return 'x' * 100 -+ -+ safe_repr(RaisingRepr()) -+ self.assertEqual(safe_repr('foo'), "'foo'") -+ self.assertEqual(safe_repr(LongRepr(), short=True), 'x'*80 + ' [truncated]...') -+ -+ def test_sorted_list_difference(self): -+ self.assertEqual(sorted_list_difference([], []), ([], [])) -+ self.assertEqual(sorted_list_difference([1, 2], [2, 3]), ([1], [3])) -+ self.assertEqual(sorted_list_difference([1, 2], [1, 3]), ([2], [3])) -+ self.assertEqual(sorted_list_difference([1, 1, 1], [1, 2, 3]), ([], [2, 3])) -+ self.assertEqual(sorted_list_difference([4], [1, 2, 3, 4]), ([], [1, 2, 3])) -+ self.assertEqual(sorted_list_difference([1, 1], [2]), ([1], [2])) -+ self.assertEqual(sorted_list_difference([2], [1, 1]), ([2], [1])) -+ self.assertEqual(sorted_list_difference([1, 2], [1, 1]), ([2], [])) -+ -+ def test_unorderable_list_difference(self): -+ self.assertEqual(unorderable_list_difference([], []), ([], [])) -+ self.assertEqual(unorderable_list_difference([1, 2], []), ([2, 1], [])) -+ self.assertEqual(unorderable_list_difference([], [1, 2]), ([], [1, 2])) -+ self.assertEqual(unorderable_list_difference([1, 2], [1, 3]), ([2], [3])) -diff --git a/Lib/test/test_unittest/testmock/support.py b/Lib/test/test_unittest/testmock/support.py -index 49986d65dc4..6c535b7944f 100644 ---- a/Lib/test/test_unittest/testmock/support.py -+++ b/Lib/test/test_unittest/testmock/support.py -@@ -14,3 +14,14 @@ - - class X(object): - pass -+ -+# A standin for weurkzeug.local.LocalProxy - issue 119600 -+def _inaccessible(*args, **kwargs): -+ raise AttributeError -+ -+ -+class OpaqueProxy: -+ __getattribute__ = _inaccessible -+ -+ -+g = OpaqueProxy() -diff --git a/Lib/test/test_unittest/testmock/testhelpers.py b/Lib/test/test_unittest/testmock/testhelpers.py -index 74785a83757..c9c20f008ca 100644 ---- a/Lib/test/test_unittest/testmock/testhelpers.py -+++ b/Lib/test/test_unittest/testmock/testhelpers.py -@@ -1127,6 +1127,14 @@ - p.assert_called_once_with() - - -+ def test_propertymock_attach(self): -+ m = Mock() -+ p = PropertyMock() -+ type(m).foo = p -+ m.attach_mock(p, 'foo') -+ self.assertEqual(m.mock_calls, []) -+ -+ - class TestCallablePredicate(unittest.TestCase): - - def test_type(self): -diff --git a/Lib/test/test_unittest/testmock/testmock.py b/Lib/test/test_unittest/testmock/testmock.py -index 165e2c044d8..1eb1a1bf03a 100644 ---- a/Lib/test/test_unittest/testmock/testmock.py -+++ b/Lib/test/test_unittest/testmock/testmock.py -@@ -118,6 +118,11 @@ - # pass kwargs with respect to the parent mock. - self.assertEqual(class_mock().return_value.meth.side_effect, None) - -+ def test_create_autospec_correctly_handles_name(self): -+ class X: ... -+ mock = create_autospec(X, spec_set=True, name="Y") -+ self.assertEqual(mock._mock_name, "Y") -+ - def test_repr(self): - mock = Mock(name='foo') - self.assertIn('foo', repr(mock)) -diff --git a/Lib/test/test_unittest/testmock/testpatch.py b/Lib/test/test_unittest/testmock/testpatch.py -index be75fda7826..f26e74ce0bc 100644 ---- a/Lib/test/test_unittest/testmock/testpatch.py -+++ b/Lib/test/test_unittest/testmock/testpatch.py -@@ -2045,6 +2045,13 @@ - with self.assertRaises(TypeError): - test() - -+ def test_patch_proxy_object(self): -+ @patch("test.test_unittest.testmock.support.g", new_callable=MagicMock()) -+ def test(_): -+ pass -+ -+ test() -+ - - if __name__ == '__main__': - unittest.main() diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index 69cf1dc7aef..6febb491788 100644 --- a/Lib/test/test_urllib2.py @@ -33493,84 +3623,8 @@ index 69cf1dc7aef..6febb491788 100644 def test_http_body_pipe(self): # A file reading from a pipe. # A pipe cannot be seek'ed. There is no way to determine the -diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py -index 4faad733245..818e7e93dbb 100644 ---- a/Lib/test/test_urlparse.py -+++ b/Lib/test/test_urlparse.py -@@ -207,6 +207,9 @@ - ('scheme://///path/to/file', - ('scheme', '', '///path/to/file', '', '', ''), - ('scheme', '', '///path/to/file', '', '')), -+ ('file:tmp/junk.txt', -+ ('file', '', 'tmp/junk.txt', '', '', ''), -+ ('file', '', 'tmp/junk.txt', '', '')), - ('file:///tmp/junk.txt', - ('file', '', '/tmp/junk.txt', '', '', ''), - ('file', '', '/tmp/junk.txt', '', '')), -@@ -216,6 +219,18 @@ - ('file://///tmp/junk.txt', - ('file', '', '///tmp/junk.txt', '', '', ''), - ('file', '', '///tmp/junk.txt', '', '')), -+ ('http:tmp/junk.txt', -+ ('http', '', 'tmp/junk.txt', '', '', ''), -+ ('http', '', 'tmp/junk.txt', '', '')), -+ ('http://example.com/tmp/junk.txt', -+ ('http', 'example.com', '/tmp/junk.txt', '', '', ''), -+ ('http', 'example.com', '/tmp/junk.txt', '', '')), -+ ('http:///example.com/tmp/junk.txt', -+ ('http', '', '/example.com/tmp/junk.txt', '', '', ''), -+ ('http', '', '/example.com/tmp/junk.txt', '', '')), -+ ('http:////example.com/tmp/junk.txt', -+ ('http', '', '//example.com/tmp/junk.txt', '', '', ''), -+ ('http', '', '//example.com/tmp/junk.txt', '', '')), - ('imap://mail.python.org/mbox1', - ('imap', 'mail.python.org', '/mbox1', '', '', ''), - ('imap', 'mail.python.org', '/mbox1', '', '')), -@@ -260,7 +275,8 @@ - ('', '', 'schème:path/to/file', '', '')), - ] - for url, parsed, split in str_cases + bytes_cases: -- self.checkRoundtrips(url, parsed, split) -+ with self.subTest(url): -+ self.checkRoundtrips(url, parsed, split) - - def test_roundtrips_normalization(self): - str_cases = [ -@@ -292,7 +308,8 @@ - tuple(x.encode('ascii') for x in t[3])) - bytes_cases = [_encode(x) for x in str_cases] - for url, url2, parsed, split in str_cases + bytes_cases: -- self.checkRoundtrips(url, parsed, split, url2) -+ with self.subTest(url): -+ self.checkRoundtrips(url, parsed, split, url2) - - def test_http_roundtrips(self): - # urllib.parse.urlsplit treats 'http:' as an optimized special case, -@@ -333,11 +350,17 @@ - self.checkRoundtrips(url, parsed, split) - - def checkJoin(self, base, relurl, expected): -- str_components = (base, relurl, expected) -- self.assertEqual(urllib.parse.urljoin(base, relurl), expected) -- bytes_components = baseb, relurlb, expectedb = [ -- x.encode('ascii') for x in str_components] -- self.assertEqual(urllib.parse.urljoin(baseb, relurlb), expectedb) -+ with self.subTest(base=base, relurl=relurl): -+ self.assertEqual(urllib.parse.urljoin(base, relurl), expected) -+ baseb = base.encode('ascii') -+ relurlb = relurl.encode('ascii') -+ expectedb = expected.encode('ascii') -+ self.assertEqual(urllib.parse.urljoin(baseb, relurlb), expectedb) -+ -+ relurl = urllib.parse.urlunsplit(urllib.parse.urlsplit(relurl)) -+ self.assertEqual(urllib.parse.urljoin(base, relurl), expected) -+ relurlb = urllib.parse.urlunsplit(urllib.parse.urlsplit(relurlb)) -+ self.assertEqual(urllib.parse.urljoin(baseb, relurlb), expectedb) - - def test_unparse_parse(self): - str_cases = ['Python', './Python','x-newscheme://foo.com/stuff','x://y','x:/y','x:/','/',] diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py -index feaf9784549..d984c9f9ee5 100644 +index 83d03aa3bf0..d984c9f9ee5 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -19,8 +19,8 @@ @@ -33597,196 +3651,6 @@ index feaf9784549..d984c9f9ee5 100644 @requires_subprocess() def check_output(cmd, encoding=None): -@@ -782,6 +784,14 @@ - err = re.sub("^(WARNING: )?The directory .* or its parent directory " - "is not owned or is not writable by the current user.*$", "", - err, flags=re.MULTILINE) -+ # Ignore warning about missing optional module: -+ try: -+ import ssl -+ except ImportError: -+ err = re.sub( -+ "^WARNING: Disabling truststore since ssl support is missing$", -+ "", -+ err, flags=re.MULTILINE) - self.assertEqual(err.rstrip(), "") - # Being fairly specific regarding the expected behaviour for the - # initial bundling phase in Python 3.4. If the output changes in -diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py -index 83237f5fe0d..a86750ab1e8 100644 ---- a/Lib/test/test_warnings/__init__.py -+++ b/Lib/test/test_warnings/__init__.py -@@ -1,10 +1,12 @@ - from contextlib import contextmanager - import linecache - import os -+import importlib - from io import StringIO - import re - import sys - import textwrap -+import types - import unittest - from test import support - from test.support import import_helper -@@ -610,6 +612,97 @@ - self.module.warn('good warning category', MyWarningClass) - self.assertIsInstance(cm.warning, Warning) - -+ def check_module_globals(self, module_globals): -+ with original_warnings.catch_warnings(module=self.module, record=True) as w: -+ self.module.filterwarnings('default') -+ self.module.warn_explicit( -+ 'eggs', UserWarning, 'bar', 1, -+ module_globals=module_globals) -+ self.assertEqual(len(w), 1) -+ self.assertEqual(w[0].category, UserWarning) -+ self.assertEqual(str(w[0].message), 'eggs') -+ -+ def check_module_globals_error(self, module_globals, errmsg, errtype=ValueError): -+ if self.module is py_warnings: -+ self.check_module_globals(module_globals) -+ return -+ with original_warnings.catch_warnings(module=self.module, record=True) as w: -+ self.module.filterwarnings('always') -+ with self.assertRaisesRegex(errtype, re.escape(errmsg)): -+ self.module.warn_explicit( -+ 'eggs', UserWarning, 'bar', 1, -+ module_globals=module_globals) -+ self.assertEqual(len(w), 0) -+ -+ def check_module_globals_deprecated(self, module_globals, msg): -+ if self.module is py_warnings: -+ self.check_module_globals(module_globals) -+ return -+ with original_warnings.catch_warnings(module=self.module, record=True) as w: -+ self.module.filterwarnings('always') -+ self.module.warn_explicit( -+ 'eggs', UserWarning, 'bar', 1, -+ module_globals=module_globals) -+ self.assertEqual(len(w), 2) -+ self.assertEqual(w[0].category, DeprecationWarning) -+ self.assertEqual(str(w[0].message), msg) -+ self.assertEqual(w[1].category, UserWarning) -+ self.assertEqual(str(w[1].message), 'eggs') -+ -+ def test_gh86298_no_loader_and_no_spec(self): -+ self.check_module_globals({'__name__': 'bar'}) -+ -+ def test_gh86298_loader_is_none_and_no_spec(self): -+ self.check_module_globals({'__name__': 'bar', '__loader__': None}) -+ -+ def test_gh86298_no_loader_and_spec_is_none(self): -+ self.check_module_globals_error( -+ {'__name__': 'bar', '__spec__': None}, -+ 'Module globals is missing a __spec__.loader') -+ -+ def test_gh86298_loader_is_none_and_spec_is_none(self): -+ self.check_module_globals_error( -+ {'__name__': 'bar', '__loader__': None, '__spec__': None}, -+ 'Module globals is missing a __spec__.loader') -+ -+ def test_gh86298_loader_is_none_and_spec_loader_is_none(self): -+ self.check_module_globals_error( -+ {'__name__': 'bar', '__loader__': None, -+ '__spec__': types.SimpleNamespace(loader=None)}, -+ 'Module globals is missing a __spec__.loader') -+ -+ def test_gh86298_no_spec(self): -+ self.check_module_globals_deprecated( -+ {'__name__': 'bar', '__loader__': object()}, -+ 'Module globals is missing a __spec__.loader') -+ -+ def test_gh86298_spec_is_none(self): -+ self.check_module_globals_deprecated( -+ {'__name__': 'bar', '__loader__': object(), '__spec__': None}, -+ 'Module globals is missing a __spec__.loader') -+ -+ def test_gh86298_no_spec_loader(self): -+ self.check_module_globals_deprecated( -+ {'__name__': 'bar', '__loader__': object(), -+ '__spec__': types.SimpleNamespace()}, -+ 'Module globals is missing a __spec__.loader') -+ -+ def test_gh86298_loader_and_spec_loader_disagree(self): -+ self.check_module_globals_deprecated( -+ {'__name__': 'bar', '__loader__': object(), -+ '__spec__': types.SimpleNamespace(loader=object())}, -+ 'Module globals; __loader__ != __spec__.loader') -+ -+ def test_gh86298_no_loader_and_no_spec_loader(self): -+ self.check_module_globals_error( -+ {'__name__': 'bar', '__spec__': types.SimpleNamespace()}, -+ 'Module globals is missing a __spec__.loader', AttributeError) -+ -+ def test_gh86298_no_loader_with_spec_loader_okay(self): -+ self.check_module_globals( -+ {'__name__': 'bar', -+ '__spec__': types.SimpleNamespace(loader=object())}) -+ - class CWarnTests(WarnTests, unittest.TestCase): - module = c_warnings - -@@ -858,37 +951,46 @@ - # warn_explicit() should neither raise a SystemError nor cause an - # assertion failure, in case the return value of get_source() has a - # bad splitlines() method. -- def get_bad_loader(splitlines_ret_val): -+ get_source_called = [] -+ def get_module_globals(*, splitlines_ret_val): -+ class BadSource(str): -+ def splitlines(self): -+ return splitlines_ret_val -+ - class BadLoader: - def get_source(self, fullname): -- class BadSource(str): -- def splitlines(self): -- return splitlines_ret_val -+ get_source_called.append(splitlines_ret_val) - return BadSource('spam') -- return BadLoader() -+ -+ loader = BadLoader() -+ spec = importlib.machinery.ModuleSpec('foobar', loader) -+ return {'__loader__': loader, -+ '__spec__': spec, -+ '__name__': 'foobar'} -+ - - wmod = self.module - with original_warnings.catch_warnings(module=wmod): - wmod.filterwarnings('default', category=UserWarning) - -+ linecache.clearcache() - with support.captured_stderr() as stderr: - wmod.warn_explicit( - 'foo', UserWarning, 'bar', 1, -- module_globals={'__loader__': get_bad_loader(42), -- '__name__': 'foobar'}) -+ module_globals=get_module_globals(splitlines_ret_val=42)) - self.assertIn('UserWarning: foo', stderr.getvalue()) -+ self.assertEqual(get_source_called, [42]) - -- show = wmod._showwarnmsg -- try: -+ linecache.clearcache() -+ with support.swap_attr(wmod, '_showwarnmsg', None): - del wmod._showwarnmsg - with support.captured_stderr() as stderr: - wmod.warn_explicit( - 'eggs', UserWarning, 'bar', 1, -- module_globals={'__loader__': get_bad_loader([42]), -- '__name__': 'foobar'}) -+ module_globals=get_module_globals(splitlines_ret_val=[42])) - self.assertIn('UserWarning: eggs', stderr.getvalue()) -- finally: -- wmod._showwarnmsg = show -+ self.assertEqual(get_source_called, [42, [42]]) -+ linecache.clearcache() - - @support.cpython_only - def test_issue31411(self): diff --git a/Lib/test/test_webbrowser.py b/Lib/test/test_webbrowser.py index 2d695bc8831..4a6586fb1dd 100644 --- a/Lib/test/test_webbrowser.py @@ -33912,1108 +3776,11 @@ index 2d695bc8831..4a6586fb1dd 100644 def test_environment_preferred(self): webbrowser = import_helper.import_fresh_module('webbrowser') try: -diff --git a/Lib/test/test_with.py b/Lib/test/test_with.py -index d81902327a7..e8c4ddf979e 100644 ---- a/Lib/test/test_with.py -+++ b/Lib/test/test_with.py -@@ -5,6 +5,7 @@ - __email__ = "mbland at acm dot org" - - import sys -+import traceback - import unittest - from collections import deque - from contextlib import _GeneratorContextManager, contextmanager, nullcontext -@@ -749,5 +750,48 @@ - self.assertEqual(10, b1) - self.assertEqual(20, b2) - -+ def testExceptionLocation(self): -+ # The location of an exception raised from -+ # __init__, __enter__ or __exit__ of a context -+ # manager should be just the context manager expression, -+ # pinpointing the precise context manager in case there -+ # is more than one. -+ -+ def init_raises(): -+ try: -+ with self.Dummy(), self.InitRaises() as cm, self.Dummy() as d: -+ pass -+ except Exception as e: -+ return e -+ -+ def enter_raises(): -+ try: -+ with self.EnterRaises(), self.Dummy() as d: -+ pass -+ except Exception as e: -+ return e -+ -+ def exit_raises(): -+ try: -+ with self.ExitRaises(), self.Dummy() as d: -+ pass -+ except Exception as e: -+ return e -+ -+ for func, expected in [(init_raises, "self.InitRaises()"), -+ (enter_raises, "self.EnterRaises()"), -+ (exit_raises, "self.ExitRaises()"), -+ ]: -+ with self.subTest(func): -+ exc = func() -+ f = traceback.extract_tb(exc.__traceback__)[0] -+ indent = 16 -+ co = func.__code__ -+ self.assertEqual(f.lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.end_lineno, co.co_firstlineno + 2) -+ self.assertEqual(f.line[f.colno - indent : f.end_colno - indent], -+ expected) -+ -+ - if __name__ == '__main__': - unittest.main() -diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py -index 9c382d14f57..f8c2e5ccaa4 100644 ---- a/Lib/test/test_xml_etree.py -+++ b/Lib/test/test_xml_etree.py -@@ -2343,6 +2343,22 @@ - self.assertRaises(TypeError, ET.TreeBuilder().start, "tag") - self.assertRaises(TypeError, ET.TreeBuilder().start, "tag", None) - -+ def test_issue123213_correct_extend_exception(self): -+ # Does not hide the internal exception when extending the element -+ self.assertRaises(ZeroDivisionError, ET.Element('tag').extend, -+ (1/0 for i in range(2))) -+ -+ # Still raises the TypeError when extending with a non-iterable -+ self.assertRaises(TypeError, ET.Element('tag').extend, None) -+ -+ # Preserves the TypeError message when extending with a generator -+ def f(): -+ raise TypeError("mymessage") -+ -+ self.assertRaisesRegex( -+ TypeError, 'mymessage', -+ ET.Element('tag').extend, (f() for i in range(2))) -+ - - - # -------------------------------------------------------------------- -@@ -3669,6 +3685,22 @@ - e[1::-sys.maxsize<<64] = [ET.Element('d')] - self.assertEqual(self._subelem_tags(e), ['a0', 'd', 'a2', 'a3']) - -+ def test_issue123213_setslice_exception(self): -+ e = ET.Element('tag') -+ # Does not hide the internal exception when assigning to the element -+ with self.assertRaises(ZeroDivisionError): -+ e[:1] = (1/0 for i in range(2)) -+ -+ # Still raises the TypeError when assigning with a non-iterable -+ with self.assertRaises(TypeError): -+ e[:1] = None -+ -+ # Preserve the original TypeError message when assigning. -+ def f(): -+ raise TypeError("mymessage") -+ -+ with self.assertRaisesRegex(TypeError, 'mymessage'): -+ e[:1] = (f() for i in range(2)) - - class IOTest(unittest.TestCase): - def test_encoding(self): -@@ -4009,7 +4041,7 @@ - def test_warning(self): - e = ET.fromstring('') - msg = ( -- r"Testing an element's truth value will raise an exception in " -+ r"Testing an element's truth value will always return True in " - r"future versions. " - r"Use specific 'len\(elem\)' or 'elem is not None' test instead.") - with self.assertWarnsRegex(DeprecationWarning, msg): -diff --git a/Lib/test/test_zipfile/_path/test_path.py b/Lib/test/test_zipfile/_path/test_path.py -index 06d5aab69bd..616c4e8ca7c 100644 ---- a/Lib/test/test_zipfile/_path/test_path.py -+++ b/Lib/test/test_zipfile/_path/test_path.py -@@ -4,6 +4,7 @@ - import pathlib - import pickle - import sys -+import time - import unittest - import zipfile - -@@ -472,6 +473,18 @@ - - assert list(root.glob("**/*.txt")) == list(root.rglob("*.txt")) - -+ @pass_alpharep -+ def test_glob_dirs(self, alpharep): -+ root = zipfile.Path(alpharep) -+ assert list(root.glob('b')) == [zipfile.Path(alpharep, "b/")] -+ assert list(root.glob('b*')) == [zipfile.Path(alpharep, "b/")] -+ -+ @pass_alpharep -+ def test_glob_subdir(self, alpharep): -+ root = zipfile.Path(alpharep) -+ assert list(root.glob('g/h')) == [zipfile.Path(alpharep, "g/h/")] -+ assert list(root.glob('g*/h*')) == [zipfile.Path(alpharep, "g/h/")] -+ - @pass_alpharep - def test_glob_subdirs(self, alpharep): - root = zipfile.Path(alpharep) -@@ -577,3 +590,87 @@ - zipfile.Path(alpharep) - with self.assertRaises(KeyError): - alpharep.getinfo('does-not-exist') -+ -+ def test_malformed_paths(self): -+ """ -+ Path should handle malformed paths gracefully. -+ -+ Paths with leading slashes are not visible. -+ -+ Paths with dots are treated like regular files. -+ """ -+ data = io.BytesIO() -+ zf = zipfile.ZipFile(data, "w") -+ zf.writestr("/one-slash.txt", b"content") -+ zf.writestr("//two-slash.txt", b"content") -+ zf.writestr("../parent.txt", b"content") -+ zf.filename = '' -+ root = zipfile.Path(zf) -+ assert list(map(str, root.iterdir())) == ['../'] -+ assert root.joinpath('..').joinpath('parent.txt').read_bytes() == b'content' -+ -+ def test_unsupported_names(self): -+ """ -+ Path segments with special characters are readable. -+ -+ On some platforms or file systems, characters like -+ ``:`` and ``?`` are not allowed, but they are valid -+ in the zip file. -+ """ -+ data = io.BytesIO() -+ zf = zipfile.ZipFile(data, "w") -+ zf.writestr("path?", b"content") -+ zf.writestr("V: NMS.flac", b"fLaC...") -+ zf.filename = '' -+ root = zipfile.Path(zf) -+ contents = root.iterdir() -+ assert next(contents).name == 'path?' -+ assert next(contents).name == 'V: NMS.flac' -+ assert root.joinpath('V: NMS.flac').read_bytes() == b"fLaC..." -+ -+ def test_backslash_not_separator(self): -+ """ -+ In a zip file, backslashes are not separators. -+ """ -+ data = io.BytesIO() -+ zf = zipfile.ZipFile(data, "w") -+ zf.writestr(DirtyZipInfo.for_name("foo\\bar", zf), b"content") -+ zf.filename = '' -+ root = zipfile.Path(zf) -+ (first,) = root.iterdir() -+ assert not first.is_dir() -+ assert first.name == 'foo\\bar' -+ -+ @pass_alpharep -+ def test_interface(self, alpharep): -+ from importlib.resources.abc import Traversable -+ -+ zf = zipfile.Path(alpharep) -+ assert isinstance(zf, Traversable) -+ -+ -+class DirtyZipInfo(zipfile.ZipInfo): -+ """ -+ Bypass name sanitization. -+ """ -+ -+ def __init__(self, filename, *args, **kwargs): -+ super().__init__(filename, *args, **kwargs) -+ self.filename = filename -+ -+ @classmethod -+ def for_name(cls, name, archive): -+ """ -+ Construct the same way that ZipFile.writestr does. -+ -+ TODO: extract this functionality and re-use -+ """ -+ self = cls(filename=name, date_time=time.localtime(time.time())[:6]) -+ self.compress_type = archive.compression -+ self.compress_level = archive.compresslevel -+ if self.filename.endswith('/'): # pragma: no cover -+ self.external_attr = 0o40775 << 16 # drwxrwxr-x -+ self.external_attr |= 0x10 # MS-DOS directory flag -+ else: -+ self.external_attr = 0o600 << 16 # ?rw------- -+ return self -diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py -index 14c19719e26..12fd9826d86 100644 ---- a/Lib/test/test_zipimport.py -+++ b/Lib/test/test_zipimport.py -@@ -50,8 +50,11 @@ - - - TESTMOD = "ziptestmodule" -+TESTMOD2 = "ziptestmodule2" -+TESTMOD3 = "ziptestmodule3" - TESTPACK = "ziptestpackage" - TESTPACK2 = "ziptestpackage2" -+TESTPACK3 = "ziptestpackage3" - TEMP_DIR = os.path.abspath("junk95142") - TEMP_ZIP = os.path.abspath("junk95142.zip") - -@@ -92,8 +95,10 @@ - # defined by files under the directory dirName. - self.addCleanup(os_helper.rmtree, dirName) - -- for name, (mtime, data) in files.items(): -- path = os.path.join(dirName, name) -+ for name, data in files.items(): -+ if isinstance(data, tuple): -+ mtime, data = data -+ path = os.path.join(dirName, *name.split('/')) - if path[-1] == os.sep: - if not os.path.isdir(path): - os.makedirs(path) -@@ -104,22 +109,18 @@ - with open(path, 'wb') as fp: - fp.write(data) - -- def makeZip(self, files, zipName=TEMP_ZIP, **kw): -+ def makeZip(self, files, zipName=TEMP_ZIP, *, -+ comment=None, file_comment=None, stuff=None, prefix='', **kw): - # Create a zip archive based set of modules/packages -- # defined by files in the zip file zipName. If the -- # key 'stuff' exists in kw it is prepended to the archive. -+ # defined by files in the zip file zipName. -+ # If stuff is not None, it is prepended to the archive. - self.addCleanup(os_helper.unlink, zipName) - -- with ZipFile(zipName, "w") as z: -- for name, (mtime, data) in files.items(): -- zinfo = ZipInfo(name, time.localtime(mtime)) -- zinfo.compress_type = self.compression -- z.writestr(zinfo, data) -- comment = kw.get("comment", None) -+ with ZipFile(zipName, "w", compression=self.compression) as z: -+ self.writeZip(z, files, file_comment=file_comment, prefix=prefix) - if comment is not None: - z.comment = comment - -- stuff = kw.get("stuff", None) - if stuff is not None: - # Prepend 'stuff' to the start of the zipfile - with open(zipName, "rb") as f: -@@ -128,20 +129,41 @@ - f.write(stuff) - f.write(data) - -- def doTest(self, expected_ext, files, *modules, **kw): -+ def writeZip(self, z, files, *, file_comment=None, prefix=''): -+ for name, data in files.items(): -+ if isinstance(data, tuple): -+ mtime, data = data -+ else: -+ mtime = NOW -+ name = name.replace(os.sep, '/') -+ zinfo = ZipInfo(prefix + name, time.localtime(mtime)) -+ zinfo.compress_type = self.compression -+ if file_comment is not None: -+ zinfo.comment = file_comment -+ if data is None: -+ zinfo.CRC = 0 -+ z.mkdir(zinfo) -+ else: -+ assert name[-1] != '/' -+ z.writestr(zinfo, data) -+ -+ def doTest(self, expected_ext, files, *modules, call=None, **kw): -+ if 'prefix' not in kw: -+ kw['prefix'] = 'pre/fix/' -+ prefix = kw['prefix'] - self.makeZip(files, **kw) - -- sys.path.insert(0, TEMP_ZIP) -+ zip_path = os.path.join(TEMP_ZIP, *prefix.split('/')[:-1]) -+ sys.path.insert(0, zip_path) - - mod = importlib.import_module(".".join(modules)) - -- call = kw.get('call') - if call is not None: - call(mod) - - if expected_ext: - file = mod.get_file() -- self.assertEqual(file, os.path.join(TEMP_ZIP, -+ self.assertEqual(file, os.path.join(zip_path, - *modules) + expected_ext) - - def testAFakeZlib(self): -@@ -167,7 +189,7 @@ - self.skipTest('zlib is a builtin module') - if "zlib" in sys.modules: - del sys.modules["zlib"] -- files = {"zlib.py": (NOW, test_src)} -+ files = {"zlib.py": test_src} - try: - self.doTest(".py", files, "zlib") - except ImportError: -@@ -178,16 +200,16 @@ - self.fail("expected test to raise ImportError") - - def testPy(self): -- files = {TESTMOD + ".py": (NOW, test_src)} -+ files = {TESTMOD + ".py": test_src} - self.doTest(".py", files, TESTMOD) - - def testPyc(self): -- files = {TESTMOD + pyc_ext: (NOW, test_pyc)} -+ files = {TESTMOD + pyc_ext: test_pyc} - self.doTest(pyc_ext, files, TESTMOD) - - def testBoth(self): -- files = {TESTMOD + ".py": (NOW, test_src), -- TESTMOD + pyc_ext: (NOW, test_pyc)} -+ files = {TESTMOD + ".py": test_src, -+ TESTMOD + pyc_ext: test_pyc} - self.doTest(pyc_ext, files, TESTMOD) - - def testUncheckedHashBasedPyc(self): -@@ -220,22 +242,22 @@ - self.doTest(None, files, TESTMOD, call=check) - - def testEmptyPy(self): -- files = {TESTMOD + ".py": (NOW, "")} -+ files = {TESTMOD + ".py": ""} - self.doTest(None, files, TESTMOD) - - def testBadMagic(self): - # make pyc magic word invalid, forcing loading from .py - badmagic_pyc = bytearray(test_pyc) - badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit -- files = {TESTMOD + ".py": (NOW, test_src), -- TESTMOD + pyc_ext: (NOW, badmagic_pyc)} -+ files = {TESTMOD + ".py": test_src, -+ TESTMOD + pyc_ext: badmagic_pyc} - self.doTest(".py", files, TESTMOD) - - def testBadMagic2(self): - # make pyc magic word invalid, causing an ImportError - badmagic_pyc = bytearray(test_pyc) - badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit -- files = {TESTMOD + pyc_ext: (NOW, badmagic_pyc)} -+ files = {TESTMOD + pyc_ext: badmagic_pyc} - try: - self.doTest(".py", files, TESTMOD) - self.fail("This should not be reached") -@@ -248,22 +270,22 @@ - # flip the second bit -- not the first as that one isn't stored in the - # .py's mtime in the zip archive. - badtime_pyc[11] ^= 0x02 -- files = {TESTMOD + ".py": (NOW, test_src), -- TESTMOD + pyc_ext: (NOW, badtime_pyc)} -+ files = {TESTMOD + ".py": test_src, -+ TESTMOD + pyc_ext: badtime_pyc} - self.doTest(".py", files, TESTMOD) - - def test2038MTime(self): - # Make sure we can handle mtimes larger than what a 32-bit signed number - # can hold. - twenty_thirty_eight_pyc = make_pyc(test_co, 2**32 - 1, len(test_src)) -- files = {TESTMOD + ".py": (NOW, test_src), -- TESTMOD + pyc_ext: (NOW, twenty_thirty_eight_pyc)} -+ files = {TESTMOD + ".py": test_src, -+ TESTMOD + pyc_ext: twenty_thirty_eight_pyc} - self.doTest(".py", files, TESTMOD) - - def testPackage(self): - packdir = TESTPACK + os.sep -- files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), -- packdir + TESTMOD + pyc_ext: (NOW, test_pyc)} -+ files = {packdir + "__init__" + pyc_ext: test_pyc, -+ packdir + TESTMOD + pyc_ext: test_pyc} - self.doTest(pyc_ext, files, TESTPACK, TESTMOD) - - def testSubPackage(self): -@@ -271,9 +293,9 @@ - # archives. - packdir = TESTPACK + os.sep - packdir2 = packdir + TESTPACK2 + os.sep -- files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), -- packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} -+ files = {packdir + "__init__" + pyc_ext: test_pyc, -+ packdir2 + "__init__" + pyc_ext: test_pyc, -+ packdir2 + TESTMOD + pyc_ext: test_pyc} - self.doTest(pyc_ext, files, TESTPACK, TESTPACK2, TESTMOD) - - def testSubNamespacePackage(self): -@@ -282,9 +304,9 @@ - packdir = TESTPACK + os.sep - packdir2 = packdir + TESTPACK2 + os.sep - # The first two files are just directory entries (so have no data). -- files = {packdir: (NOW, ""), -- packdir2: (NOW, ""), -- packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} -+ files = {packdir: None, -+ packdir2: None, -+ packdir2 + TESTMOD + pyc_ext: test_pyc} - self.doTest(pyc_ext, files, TESTPACK, TESTPACK2, TESTMOD) - - def testMixedNamespacePackage(self): -@@ -292,19 +314,19 @@ - # real filesystem and a zip archive. - packdir = TESTPACK + os.sep - packdir2 = packdir + TESTPACK2 + os.sep -- packdir3 = packdir2 + TESTPACK + '3' + os.sep -- files1 = {packdir: (NOW, ""), -- packdir + TESTMOD + pyc_ext: (NOW, test_pyc), -- packdir2: (NOW, ""), -- packdir3: (NOW, ""), -- packdir3 + TESTMOD + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + '3' + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} -- files2 = {packdir: (NOW, ""), -- packdir + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), -- packdir2: (NOW, ""), -- packdir2 + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} -+ packdir3 = packdir2 + TESTPACK3 + os.sep -+ files1 = {packdir: None, -+ packdir + TESTMOD + pyc_ext: test_pyc, -+ packdir2: None, -+ packdir3: None, -+ packdir3 + TESTMOD + pyc_ext: test_pyc, -+ packdir2 + TESTMOD3 + pyc_ext: test_pyc, -+ packdir2 + TESTMOD + pyc_ext: test_pyc} -+ files2 = {packdir: None, -+ packdir + TESTMOD2 + pyc_ext: test_pyc, -+ packdir2: None, -+ packdir2 + TESTMOD2 + pyc_ext: test_pyc, -+ packdir2 + TESTMOD + pyc_ext: test_pyc} - - zip1 = os.path.abspath("path1.zip") - self.makeZip(files1, zip1) -@@ -337,8 +359,8 @@ - mod = importlib.import_module('.'.join((TESTPACK, TESTMOD))) - self.assertEqual("path1.zip", mod.__file__.split(os.sep)[-3]) - -- # And TESTPACK/(TESTMOD + '2') only exists in path2. -- mod = importlib.import_module('.'.join((TESTPACK, TESTMOD + '2'))) -+ # And TESTPACK/(TESTMOD2) only exists in path2. -+ mod = importlib.import_module('.'.join((TESTPACK, TESTMOD2))) - self.assertEqual(os.path.basename(TEMP_DIR), - mod.__file__.split(os.sep)[-3]) - -@@ -355,13 +377,13 @@ - self.assertEqual(os.path.basename(TEMP_DIR), - mod.__file__.split(os.sep)[-4]) - -- # subpkg.TESTMOD + '2' only exists in zip2. -- mod = importlib.import_module('.'.join((subpkg, TESTMOD + '2'))) -+ # subpkg.TESTMOD2 only exists in zip2. -+ mod = importlib.import_module('.'.join((subpkg, TESTMOD2))) - self.assertEqual(os.path.basename(TEMP_DIR), - mod.__file__.split(os.sep)[-4]) - -- # Finally subpkg.TESTMOD + '3' only exists in zip1. -- mod = importlib.import_module('.'.join((subpkg, TESTMOD + '3'))) -+ # Finally subpkg.TESTMOD3 only exists in zip1. -+ mod = importlib.import_module('.'.join((subpkg, TESTMOD3))) - self.assertEqual('path1.zip', mod.__file__.split(os.sep)[-4]) - - def testNamespacePackage(self): -@@ -369,22 +391,22 @@ - # archives. - packdir = TESTPACK + os.sep - packdir2 = packdir + TESTPACK2 + os.sep -- packdir3 = packdir2 + TESTPACK + '3' + os.sep -- files1 = {packdir: (NOW, ""), -- packdir + TESTMOD + pyc_ext: (NOW, test_pyc), -- packdir2: (NOW, ""), -- packdir3: (NOW, ""), -- packdir3 + TESTMOD + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + '3' + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} -+ packdir3 = packdir2 + TESTPACK3 + os.sep -+ files1 = {packdir: None, -+ packdir + TESTMOD + pyc_ext: test_pyc, -+ packdir2: None, -+ packdir3: None, -+ packdir3 + TESTMOD + pyc_ext: test_pyc, -+ packdir2 + TESTMOD3 + pyc_ext: test_pyc, -+ packdir2 + TESTMOD + pyc_ext: test_pyc} - zip1 = os.path.abspath("path1.zip") - self.makeZip(files1, zip1) - -- files2 = {packdir: (NOW, ""), -- packdir + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), -- packdir2: (NOW, ""), -- packdir2 + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} -+ files2 = {packdir: None, -+ packdir + TESTMOD2 + pyc_ext: test_pyc, -+ packdir2: None, -+ packdir2 + TESTMOD2 + pyc_ext: test_pyc, -+ packdir2 + TESTMOD + pyc_ext: test_pyc} - zip2 = os.path.abspath("path2.zip") - self.makeZip(files2, zip2) - -@@ -413,8 +435,8 @@ - mod = importlib.import_module('.'.join((TESTPACK, TESTMOD))) - self.assertEqual("path1.zip", mod.__file__.split(os.sep)[-3]) - -- # And TESTPACK/(TESTMOD + '2') only exists in path2. -- mod = importlib.import_module('.'.join((TESTPACK, TESTMOD + '2'))) -+ # And TESTPACK/(TESTMOD2) only exists in path2. -+ mod = importlib.import_module('.'.join((TESTPACK, TESTMOD2))) - self.assertEqual("path2.zip", mod.__file__.split(os.sep)[-3]) - - # One level deeper... -@@ -429,29 +451,22 @@ - mod = importlib.import_module('.'.join((subpkg, TESTMOD))) - self.assertEqual('path2.zip', mod.__file__.split(os.sep)[-4]) - -- # subpkg.TESTMOD + '2' only exists in zip2. -- mod = importlib.import_module('.'.join((subpkg, TESTMOD + '2'))) -+ # subpkg.TESTMOD2 only exists in zip2. -+ mod = importlib.import_module('.'.join((subpkg, TESTMOD2))) - self.assertEqual('path2.zip', mod.__file__.split(os.sep)[-4]) - -- # Finally subpkg.TESTMOD + '3' only exists in zip1. -- mod = importlib.import_module('.'.join((subpkg, TESTMOD + '3'))) -+ # Finally subpkg.TESTMOD3 only exists in zip1. -+ mod = importlib.import_module('.'.join((subpkg, TESTMOD3))) - self.assertEqual('path1.zip', mod.__file__.split(os.sep)[-4]) - - def testZipImporterMethods(self): - packdir = TESTPACK + os.sep - packdir2 = packdir + TESTPACK2 + os.sep -- files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), -- packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), -- "spam" + pyc_ext: (NOW, test_pyc)} -- -- self.addCleanup(os_helper.unlink, TEMP_ZIP) -- with ZipFile(TEMP_ZIP, "w") as z: -- for name, (mtime, data) in files.items(): -- zinfo = ZipInfo(name, time.localtime(mtime)) -- zinfo.compress_type = self.compression -- zinfo.comment = b"spam" -- z.writestr(zinfo, data) -+ files = {packdir + "__init__" + pyc_ext: test_pyc, -+ packdir2 + "__init__" + pyc_ext: test_pyc, -+ packdir2 + TESTMOD + pyc_ext: test_pyc, -+ "spam" + pyc_ext: test_pyc} -+ self.makeZip(files, file_comment=b"spam") - - zi = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(zi.archive, TEMP_ZIP) -@@ -507,17 +522,11 @@ - def testInvalidateCaches(self): - packdir = TESTPACK + os.sep - packdir2 = packdir + TESTPACK2 + os.sep -- files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), -- packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), -- "spam" + pyc_ext: (NOW, test_pyc)} -- self.addCleanup(os_helper.unlink, TEMP_ZIP) -- with ZipFile(TEMP_ZIP, "w") as z: -- for name, (mtime, data) in files.items(): -- zinfo = ZipInfo(name, time.localtime(mtime)) -- zinfo.compress_type = self.compression -- zinfo.comment = b"spam" -- z.writestr(zinfo, data) -+ files = {packdir + "__init__" + pyc_ext: test_pyc, -+ packdir2 + "__init__" + pyc_ext: test_pyc, -+ packdir2 + TESTMOD + pyc_ext: test_pyc, -+ "spam" + pyc_ext: test_pyc} -+ self.makeZip(files, file_comment=b"spam") - - zi = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(zi._files.keys(), files.keys()) -@@ -525,14 +534,10 @@ - zi.invalidate_caches() - self.assertEqual(zi._files.keys(), files.keys()) - # Add a new file to the ZIP archive -- newfile = {"spam2" + pyc_ext: (NOW, test_pyc)} -+ newfile = {"spam2" + pyc_ext: test_pyc} - files.update(newfile) -- with ZipFile(TEMP_ZIP, "a") as z: -- for name, (mtime, data) in newfile.items(): -- zinfo = ZipInfo(name, time.localtime(mtime)) -- zinfo.compress_type = self.compression -- zinfo.comment = b"spam" -- z.writestr(zinfo, data) -+ with ZipFile(TEMP_ZIP, "a", compression=self.compression) as z: -+ self.writeZip(z, newfile, file_comment=b"spam") - # Check that we can detect the new file after invalidating the cache - zi.invalidate_caches() - self.assertEqual(zi._files.keys(), files.keys()) -@@ -549,16 +554,9 @@ - def testZipImporterMethodsInSubDirectory(self): - packdir = TESTPACK + os.sep - packdir2 = packdir + TESTPACK2 + os.sep -- files = {packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), -- packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} -- -- self.addCleanup(os_helper.unlink, TEMP_ZIP) -- with ZipFile(TEMP_ZIP, "w") as z: -- for name, (mtime, data) in files.items(): -- zinfo = ZipInfo(name, time.localtime(mtime)) -- zinfo.compress_type = self.compression -- zinfo.comment = b"eggs" -- z.writestr(zinfo, data) -+ files = {packdir2 + "__init__" + pyc_ext: test_pyc, -+ packdir2 + TESTMOD + pyc_ext: test_pyc} -+ self.makeZip(files, file_comment=b"eggs") - - zi = zipimport.zipimporter(TEMP_ZIP + os.sep + packdir) - self.assertEqual(zi.archive, TEMP_ZIP) -@@ -623,9 +621,9 @@ - if __loader__.get_data("some.data") != b"some data": - raise AssertionError("bad data")\n""" - pyc = make_pyc(compile(src, "", "exec"), NOW, len(src)) -- files = {TESTMOD + pyc_ext: (NOW, pyc), -- "some.data": (NOW, "some data")} -- self.doTest(pyc_ext, files, TESTMOD) -+ files = {TESTMOD + pyc_ext: pyc, -+ "some.data": "some data"} -+ self.doTest(pyc_ext, files, TESTMOD, prefix='') - - def testDefaultOptimizationLevel(self): - # zipimport should use the default optimization level (#28131) -@@ -633,7 +631,7 @@ - def test(val): - assert(val) - return val\n""" -- files = {TESTMOD + '.py': (NOW, src)} -+ files = {TESTMOD + '.py': src} - self.makeZip(files) - sys.path.insert(0, TEMP_ZIP) - mod = importlib.import_module(TESTMOD) -@@ -646,7 +644,7 @@ - def testImport_WithStuff(self): - # try importing from a zipfile which contains additional - # stuff at the beginning of the file -- files = {TESTMOD + ".py": (NOW, test_src)} -+ files = {TESTMOD + ".py": test_src} - self.doTest(".py", files, TESTMOD, - stuff=b"Some Stuff"*31) - -@@ -654,18 +652,18 @@ - self.assertEqual(inspect.getsource(module), test_src) - - def testGetSource(self): -- files = {TESTMOD + ".py": (NOW, test_src)} -+ files = {TESTMOD + ".py": test_src} - self.doTest(".py", files, TESTMOD, call=self.assertModuleSource) - - def testGetCompiledSource(self): - pyc = make_pyc(compile(test_src, "", "exec"), NOW, len(test_src)) -- files = {TESTMOD + ".py": (NOW, test_src), -- TESTMOD + pyc_ext: (NOW, pyc)} -+ files = {TESTMOD + ".py": test_src, -+ TESTMOD + pyc_ext: pyc} - self.doTest(pyc_ext, files, TESTMOD, call=self.assertModuleSource) - - def runDoctest(self, callback): -- files = {TESTMOD + ".py": (NOW, test_src), -- "xyz.txt": (NOW, ">>> log.append(True)\n")} -+ files = {TESTMOD + ".py": test_src, -+ "xyz.txt": ">>> log.append(True)\n"} - self.doTest(".py", files, TESTMOD, call=callback) - - def doDoctestFile(self, module): -@@ -717,29 +715,21 @@ - raise AssertionError("This ought to be impossible") - - def testTraceback(self): -- files = {TESTMOD + ".py": (NOW, raise_src)} -+ files = {TESTMOD + ".py": raise_src} - self.doTest(None, files, TESTMOD, call=self.doTraceback) - - @unittest.skipIf(os_helper.TESTFN_UNENCODABLE is None, - "need an unencodable filename") - def testUnencodable(self): - filename = os_helper.TESTFN_UNENCODABLE + ".zip" -- self.addCleanup(os_helper.unlink, filename) -- with ZipFile(filename, "w") as z: -- zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW)) -- zinfo.compress_type = self.compression -- z.writestr(zinfo, test_src) -+ self.makeZip({TESTMOD + ".py": test_src}, filename) - spec = zipimport.zipimporter(filename).find_spec(TESTMOD) - mod = importlib.util.module_from_spec(spec) - spec.loader.exec_module(mod) - - def testBytesPath(self): - filename = os_helper.TESTFN + ".zip" -- self.addCleanup(os_helper.unlink, filename) -- with ZipFile(filename, "w") as z: -- zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW)) -- zinfo.compress_type = self.compression -- z.writestr(zinfo, test_src) -+ self.makeZip({TESTMOD + ".py": test_src}, filename) - - zipimport.zipimporter(filename) - with self.assertRaises(TypeError): -@@ -750,15 +740,15 @@ - zipimport.zipimporter(memoryview(os.fsencode(filename))) - - def testComment(self): -- files = {TESTMOD + ".py": (NOW, test_src)} -+ files = {TESTMOD + ".py": test_src} - self.doTest(".py", files, TESTMOD, comment=b"comment") - - def testBeginningCruftAndComment(self): -- files = {TESTMOD + ".py": (NOW, test_src)} -+ files = {TESTMOD + ".py": test_src} - self.doTest(".py", files, TESTMOD, stuff=b"cruft" * 64, comment=b"hi") - - def testLargestPossibleComment(self): -- files = {TESTMOD + ".py": (NOW, test_src)} -+ files = {TESTMOD + ".py": test_src} - self.doTest(".py", files, TESTMOD, comment=b"c" * ((1 << 16) - 1)) - - -diff --git a/Lib/test/typinganndata/ann_module695.py b/Lib/test/typinganndata/ann_module695.py -index 2ede9fe3825..b6f3b06bd50 100644 ---- a/Lib/test/typinganndata/ann_module695.py -+++ b/Lib/test/typinganndata/ann_module695.py -@@ -17,6 +17,56 @@ - z: P - - -+Eggs = int -+Spam = str -+ -+ -+class C[Eggs, **Spam]: -+ x: Eggs -+ y: Spam -+ -+ - def generic_function[T, *Ts, **P]( - x: T, *y: *Ts, z: P.args, zz: P.kwargs - ) -> None: ... -+ -+ -+def generic_function_2[Eggs, **Spam](x: Eggs, y: Spam): pass -+ -+ -+class D: -+ Foo = int -+ Bar = str -+ -+ def generic_method[Foo, **Bar]( -+ self, x: Foo, y: Bar -+ ) -> None: ... -+ -+ def generic_method_2[Eggs, **Spam](self, x: Eggs, y: Spam): pass -+ -+ -+def nested(): -+ from types import SimpleNamespace -+ from typing import get_type_hints -+ -+ Eggs = bytes -+ Spam = memoryview -+ -+ -+ class E[Eggs, **Spam]: -+ x: Eggs -+ y: Spam -+ -+ def generic_method[Eggs, **Spam](self, x: Eggs, y: Spam): pass -+ -+ -+ def generic_function[Eggs, **Spam](x: Eggs, y: Spam): pass -+ -+ -+ return SimpleNamespace( -+ E=E, -+ hints_for_E=get_type_hints(E), -+ hints_for_E_meth=get_type_hints(E.generic_method), -+ generic_func=generic_function, -+ hints_for_generic_func=get_type_hints(generic_function) -+ ) -diff --git a/Lib/threading.py b/Lib/threading.py -index 98cb43c6972..0bba85d08a0 100644 ---- a/Lib/threading.py -+++ b/Lib/threading.py -@@ -332,7 +332,7 @@ - awakened or timed out, it re-acquires the lock and returns. - - When the timeout argument is present and not None, it should be a -- floating point number specifying a timeout for the operation in seconds -+ floating-point number specifying a timeout for the operation in seconds - (or fractions thereof). - - When the underlying lock is an RLock, it is not released using its -@@ -642,7 +642,7 @@ - the optional timeout occurs. - - When the timeout argument is present and not None, it should be a -- floating point number specifying a timeout for the operation in seconds -+ floating-point number specifying a timeout for the operation in seconds - (or fractions thereof). - - This method returns the internal flag on exit, so it will always return -@@ -685,6 +685,8 @@ - default for all subsequent 'wait()' calls. - - """ -+ if parties < 1: -+ raise ValueError("parties must be > 0") - self._cond = Condition(Lock()) - self._action = action - self._timeout = timeout -@@ -1120,7 +1122,7 @@ - or until the optional timeout occurs. - - When the timeout argument is present and not None, it should be a -- floating point number specifying a timeout for the operation in seconds -+ floating-point number specifying a timeout for the operation in seconds - (or fractions thereof). As join() always returns None, you must call - is_alive() after join() to decide whether a timeout happened -- if the - thread is still alive, the join() call timed out. -diff --git a/Lib/tkinter/simpledialog.py b/Lib/tkinter/simpledialog.py -index 538bbfc318d..2c1417f7382 100644 ---- a/Lib/tkinter/simpledialog.py -+++ b/Lib/tkinter/simpledialog.py -@@ -357,7 +357,7 @@ - - - class _QueryFloat(_QueryDialog): -- errormessage = "Not a floating point value." -+ errormessage = "Not a floating-point value." - - def getresult(self): - return self.getdouble(self.entry.get()) -diff --git a/Lib/tkinter/ttk.py b/Lib/tkinter/ttk.py -index efeabb7a92c..5d1c9d77a8e 100644 ---- a/Lib/tkinter/ttk.py -+++ b/Lib/tkinter/ttk.py -@@ -683,7 +683,10 @@ - returns the index of the current value in the list of values - or -1 if the current value does not appear in the list.""" - if newindex is None: -- return self.tk.getint(self.tk.call(self._w, "current")) -+ res = self.tk.call(self._w, "current") -+ if res == '': -+ return -1 -+ return self.tk.getint(res) - return self.tk.call(self._w, "current", newindex) - - -@@ -1515,7 +1518,7 @@ - self.label.place(anchor='n' if label_side == 'top' else 's') - - # update the label as scale or variable changes -- self.__tracecb = self._variable.trace_variable('w', self._adjust) -+ self.__tracecb = self._variable.trace_add('write', self._adjust) - self.bind('', self._adjust) - self.bind('', self._adjust) - -@@ -1523,7 +1526,7 @@ - def destroy(self): - """Destroy this widget and possibly its associated variable.""" - try: -- self._variable.trace_vdelete('w', self.__tracecb) -+ self._variable.trace_remove('write', self.__tracecb) - except AttributeError: - pass - else: -diff --git a/Lib/turtle.py b/Lib/turtle.py -index 811c5dfa492..92ac58f8e6b 100644 ---- a/Lib/turtle.py -+++ b/Lib/turtle.py -@@ -1719,7 +1719,7 @@ - >>> reset() - >>> turtle.left(60) - >>> turtle.forward(100) -- >>> print turtle.xcor() -+ >>> print(turtle.xcor()) - 50.0 - """ - return self._position[0] -@@ -1733,7 +1733,7 @@ - >>> reset() - >>> turtle.left(60) - >>> turtle.forward(100) -- >>> print turtle.ycor() -+ >>> print(turtle.ycor()) - 86.6025403784 - """ - return self._position[1] -@@ -2336,7 +2336,7 @@ - - Example (for a Turtle instance named turtle): - >>> turtle.hideturtle() -- >>> print turtle.isvisible(): -+ >>> print(turtle.isvisible()) - False - """ - return self._shown -diff --git a/Lib/typing.py b/Lib/typing.py -index 882dc4da58e..94c211292ec 100644 ---- a/Lib/typing.py -+++ b/Lib/typing.py -@@ -927,15 +927,24 @@ - globalns = getattr( - sys.modules.get(self.__forward_module__, None), '__dict__', globalns - ) -+ -+ # type parameters require some special handling, -+ # as they exist in their own scope -+ # but `eval()` does not have a dedicated parameter for that scope. -+ # For classes, names in type parameter scopes should override -+ # names in the global scope (which here are called `localns`!), -+ # but should in turn be overridden by names in the class scope -+ # (which here are called `globalns`!) - if type_params: -- # "Inject" type parameters into the local namespace -- # (unless they are shadowed by assignments *in* the local namespace), -- # as a way of emulating annotation scopes when calling `eval()` -- locals_to_pass = {param.__name__: param for param in type_params} | localns -- else: -- locals_to_pass = localns -+ globalns, localns = dict(globalns), dict(localns) -+ for param in type_params: -+ param_name = param.__name__ -+ if not self.__forward_is_class__ or param_name not in globalns: -+ globalns[param_name] = param -+ localns.pop(param_name, None) -+ - type_ = _type_check( -- eval(self.__forward_code__, globalns, locals_to_pass), -+ eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.", - is_argument=self.__forward_is_argument__, - allow_special_forms=self.__forward_is_class__, -diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py -index 486e0c634b8..9398f56506b 100644 ---- a/Lib/unittest/mock.py -+++ b/Lib/unittest/mock.py -@@ -800,6 +800,9 @@ - mock_name = f'{self._extract_mock_name()}.{name}' - raise AttributeError(f'Cannot set {mock_name}') - -+ if isinstance(value, PropertyMock): -+ self.__dict__[name] = value -+ return - return object.__setattr__(self, name, value) - - -@@ -1478,13 +1481,12 @@ - if isinstance(original, type): - # If we're patching out a class and there is a spec - inherit = True -- if spec is None and _is_async_obj(original): -- Klass = AsyncMock -- else: -- Klass = MagicMock -- _kwargs = {} -+ -+ # Determine the Klass to use - if new_callable is not None: - Klass = new_callable -+ elif spec is None and _is_async_obj(original): -+ Klass = AsyncMock - elif spec is not None or spec_set is not None: - this_spec = spec - if spec_set is not None: -@@ -1497,7 +1499,12 @@ - Klass = AsyncMock - elif not_callable: - Klass = NonCallableMagicMock -+ else: -+ Klass = MagicMock -+ else: -+ Klass = MagicMock - -+ _kwargs = {} - if spec is not None: - _kwargs['spec'] = spec - if spec_set is not None: -@@ -2718,6 +2725,12 @@ - if not unsafe: - _check_spec_arg_typos(kwargs) - -+ _name = kwargs.pop('name', _name) -+ _new_name = _name -+ if _parent is None: -+ # for a top level object no _new_name should be set -+ _new_name = '' -+ - _kwargs.update(kwargs) - - Klass = MagicMock -@@ -2735,13 +2748,6 @@ - elif is_type and instance and not _instance_callable(spec): - Klass = NonCallableMagicMock - -- _name = _kwargs.pop('name', _name) -- -- _new_name = _name -- if _parent is None: -- # for a top level object no _new_name should be set -- _new_name = '' -- - mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name, - name=_name, **_kwargs) - -diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py -index 3932bb99c7e..24815952037 100644 ---- a/Lib/urllib/parse.py -+++ b/Lib/urllib/parse.py -@@ -525,9 +525,13 @@ - empty query; the RFC states that these are equivalent).""" - scheme, netloc, url, query, fragment, _coerce_result = ( - _coerce_args(*components)) -- if netloc or (scheme and scheme in uses_netloc) or url[:2] == '//': -+ if netloc: - if url and url[:1] != '/': url = '/' + url -- url = '//' + (netloc or '') + url -+ url = '//' + netloc + url -+ elif url[:2] == '//': -+ url = '//' + url -+ elif scheme and scheme in uses_netloc and (not url or url[:1] == '/'): -+ url = '//' + url - if scheme: - url = scheme + ':' + url - if query: diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py -index ba6711e4ef5..a6792fa8d56 100755 +index 13b9e85f9e1..a6792fa8d56 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py -@@ -30,7 +30,7 @@ - # Preferred browsers go to the front of the list. - # Need to match to the default browser returned by xdg-settings, which - # may be of the form e.g. "firefox.desktop". -- if preferred or (_os_preferred_browser and name in _os_preferred_browser): -+ if preferred or (_os_preferred_browser and f'{name}.desktop' == _os_preferred_browser): - _tryorder.insert(0, name) - else: - _tryorder.append(name) -@@ -77,6 +77,9 @@ - - 1: a new browser window. - - 2: a new browser page ("tab"). - If possible, autoraise raises the window (the default) or not. -+ -+ If opening the browser succeeds, return True. -+ If there is a problem, return False. - """ - if _tryorder is None: - with _lock: -@@ -473,6 +476,9 @@ +@@ -476,6 +476,9 @@ # OS X can use below Unix support (but we prefer using the OS X # specific stuff) @@ -35023,7 +3790,7 @@ index ba6711e4ef5..a6792fa8d56 100755 if sys.platform == "serenityos": # SerenityOS webbrowser, simply called "Browser". register("Browser", None, BackgroundBrowser("Browser")) -@@ -653,6 +659,70 @@ +@@ -656,6 +659,70 @@ rc = osapipe.close() return not rc @@ -35094,306 +3861,6 @@ index ba6711e4ef5..a6792fa8d56 100755 def main(): import getopt -diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py -index fd2cc8704e1..c657b52d12b 100644 ---- a/Lib/xml/etree/ElementTree.py -+++ b/Lib/xml/etree/ElementTree.py -@@ -201,7 +201,7 @@ - - def __bool__(self): - warnings.warn( -- "Testing an element's truth value will raise an exception in " -+ "Testing an element's truth value will always return True in " - "future versions. " - "Use specific 'len(elem)' or 'elem is not None' test instead.", - DeprecationWarning, stacklevel=2 -diff --git a/Lib/zipfile/_path/__init__.py b/Lib/zipfile/_path/__init__.py -index 78c413563bb..8db5ef18d7c 100644 ---- a/Lib/zipfile/_path/__init__.py -+++ b/Lib/zipfile/_path/__init__.py -@@ -1,3 +1,12 @@ -+""" -+A Path-like interface for zipfiles. -+ -+This codebase is shared between zipfile.Path in the stdlib -+and zipp in PyPI. See -+https://github.com/python/importlib_metadata/wiki/Development-Methodology -+for more detail. -+""" -+ - import io - import posixpath - import zipfile -@@ -34,7 +43,7 @@ - def _ancestry(path): - """ - Given a path with elements separated by -- posixpath.sep, generate all elements of that path -+ posixpath.sep, generate all elements of that path. - - >>> list(_ancestry('b/d')) - ['b/d', 'b'] -@@ -46,9 +55,14 @@ - ['b'] - >>> list(_ancestry('')) - [] -+ -+ Multiple separators are treated like a single. -+ -+ >>> list(_ancestry('//b//d///f//')) -+ ['//b//d///f', '//b//d', '//b'] - """ - path = path.rstrip(posixpath.sep) -- while path and path != posixpath.sep: -+ while path.rstrip(posixpath.sep): - yield path - path, tail = posixpath.split(path) - -@@ -174,7 +188,10 @@ - - class Path: - """ -- A pathlib-compatible interface for zip files. -+ A :class:`importlib.resources.abc.Traversable` interface for zip files. -+ -+ Implements many of the features users enjoy from -+ :class:`pathlib.Path`. - - Consider a zip file with this structure:: - -diff --git a/Lib/zipfile/_path/glob.py b/Lib/zipfile/_path/glob.py -index 4a2e665e270..d5213533ad2 100644 ---- a/Lib/zipfile/_path/glob.py -+++ b/Lib/zipfile/_path/glob.py -@@ -2,6 +2,19 @@ - - - def translate(pattern): -+ return match_dirs(translate_core(pattern)) -+ -+ -+def match_dirs(pattern): -+ """ -+ Ensure that zipfile.Path directory names are matched. -+ -+ zipfile.Path directory names always end in a slash. -+ """ -+ return rf'{pattern}[/]?' -+ -+ -+def translate_core(pattern): - r""" - Given a glob pattern, produce a regex that matches it. - -diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py -index d24cb76fd48..9e1e180142d 100755 ---- a/Mac/BuildScript/build-installer.py -+++ b/Mac/BuildScript/build-installer.py -@@ -246,9 +246,9 @@ - - result.extend([ - dict( -- name="OpenSSL 3.0.13", -- url="https://www.openssl.org/source/openssl-3.0.13.tar.gz", -- checksum='88525753f79d3bec27d2fa7c66aa0b92b3aa9498dafd93d7cfa4b3780cdae313', -+ name="OpenSSL 3.0.15", -+ url="https://github.com/openssl/openssl/releases/download/openssl-3.0.15/openssl-3.0.15.tar.gz", -+ checksum='23c666d0edf20f14249b3d8f0368acaee9ab585b09e1de82107c66e1f3ec9533', - buildrecipe=build_universal_openssl, - configure=None, - install=None, -diff --git a/Mac/BuildScript/resources/ReadMe.rtf b/Mac/BuildScript/resources/ReadMe.rtf -index 384840cd92d..ced4c67645d 100644 ---- a/Mac/BuildScript/resources/ReadMe.rtf -+++ b/Mac/BuildScript/resources/ReadMe.rtf -@@ -1,8 +1,9 @@ --{\rtf1\ansi\ansicpg1252\cocoartf2709 -+{\rtf1\ansi\ansicpg1252\cocoartf2761 - \cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fswiss\fcharset0 Helvetica-Oblique; --\f3\fmodern\fcharset0 CourierNewPSMT;\f4\fmodern\fcharset0 Courier;} --{\colortbl;\red255\green255\blue255;} --{\*\expandedcolortbl;;} -+\f3\fmodern\fcharset0 CourierNewPSMT;\f4\fnil\fcharset0 .AppleSystemUIFontMonospaced-Regular;\f5\fmodern\fcharset0 Courier; -+} -+{\colortbl;\red255\green255\blue255;\red24\green26\blue30;\red244\green246\blue249;} -+{\*\expandedcolortbl;;\cssrgb\c12157\c13725\c15686;\cssrgb\c96471\c97255\c98039;} - \margl1440\margr1440\vieww13380\viewh14580\viewkind0 - \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 - -@@ -31,14 +32,33 @@ - \f3 pip - \f0 has its own default certificate store for verifying download connections.\ - \ -+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 - --\f1\b \ul Install Options\ -+\f1\b \cf0 \ul (Updated for 3.12.5): -+\f0\b0 \ulnone If you are using this installer on older, legacy versions of macOS, specifically -+\f1\b macOS 10.9 through 10.12 -+\f0\b0 , the bundled version of -+\f1\b pip -+\f0\b0 (pip 24.2) included with this installer does not work correctly on these old versions of macOS. -+\f3 pip install -+\f0 commands may fail with a message similar to -+\f4\fs23\fsmilli11900 \cf2 \cb3 \expnd0\expndtw0\kerning0 -+SecTrustEvaluateWithError: symbol not found. -+\f0\fs24 \cf0 \cb1 \kerning1\expnd0\expndtw0 (see {\field{\*\fldinst{HYPERLINK "https://github.com/pypa/pip/issues/12901"}}{\fldrslt https://github.com/pypa/pip/issues/12901}} for more information). To work around this issue, the -+\f3 Install Certificates -+\f0 command (described above) has been modified when running on these older macOS releases to attempt to first install an older version of pip that does not have this problem. You should avoid upgrading pip on these older systems until this problem has been resolved in a newer release of pip. If necessary, you can rerun -+\f3 Install Certificates -+\f0 at any time to attempt to revert to a working version of pip.\ -+\ -+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -+ -+\f1\b \cf0 \ul Install Options\ - - \f0\b0 \ulnone \ - You can control some aspects of what is installed by this package. To see the options, click on the --\f4 Customize -+\f5 Customize - \f0 button in the --\f4 Installation Type -+\f5 Installation Type - \f0 step of the macOS installer app. Click on a package name in the list shown to see more information about that option,\ - \ - -@@ -64,13 +84,25 @@ - - \f0\b0 \ulnone \ - On Apple Silicon Macs, it is possible to run Python either with native ARM64 code or under Intel 64 emulation using Rosetta2. This option might be useful for testing or if binary wheels are not yet available with native ARM64 binaries. To easily force Python to run in emulation mode, invoke it from a command line shell with the --\f4 python3-intel64 -+\f5 python3-intel64 - \f0 command instead of just --\f4 python3 -+\f5 python3 - \f0 .\ - - \f1\b \ul \ --Other changes\ -+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -+\cf0 Installer support for macOS 10.9 through 10.12 to be discontinued\ -+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -+ -+\f0\b0 \cf0 \ulnone \ -+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -+ -+\f1\b \cf0 \ul (Updated for 3.12.5): -+\f0\b0 \ulnone Up to now, python.org macOS installers have supported installation and running of Python 3.12.x on macOS releases as old as 10.9 Mavericks (which was first released in 2013). However, over time, it has become more difficult to continue supporting these older macOS releases. These operating system releases have long stopped receiving security updates and the most recent versions of Apple's Xcode developer tools no longer support building for these older systems, making it difficult for third-party developers to build and test their packages for them. We believe that only a very small and dwindling number of users are using these installers on these older macOS systems and thus believe that we can better serve the entire community by dropping support of these older macOS versions in future releases of Python 3.12.x installers. We have already announced that macOS installers for the next feature release of Python, 3.13, will initially support macOS 10.13 and newer releases. If you have a continued need for running Python 3.12 on these older systems, pre-built versions for these systems may be available from third-party distributors (such as MacPorts) or Python can be built from source ({\field{\*\fldinst{HYPERLINK "https://www.python.org/downloads/source/"}}{\fldrslt https://www.python.org/downloads/source/}}).\ -+ -+\f1\b \ul \ -+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -+\cf0 Other changes\ - - \f0\b0 \ulnone \ - For other changes in this release, see the -diff --git a/Mac/BuildScript/resources/Welcome.rtf b/Mac/BuildScript/resources/Welcome.rtf -index 8ae9b01b6dd..ddeca26ef4e 100644 ---- a/Mac/BuildScript/resources/Welcome.rtf -+++ b/Mac/BuildScript/resources/Welcome.rtf -@@ -1,4 +1,4 @@ --{\rtf1\ansi\ansicpg1252\cocoartf2709 -+{\rtf1\ansi\ansicpg1252\cocoartf2761 - \cocoascreenfonts1\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fmodern\fcharset0 CourierNewPSMT; - } - {\colortbl;\red255\green255\blue255;} -@@ -23,4 +23,13 @@ - At the end of this install, click on - \f2 Install Certificates - \f0 to install a set of current SSL root certificates.\ -+\ -+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -+ -+\f1\b \cf0 \ul \ulc0 Updated for 3.12.5: -+\f0\b0 \ulnone If you are using this installer on older, legacy versions of macOS, specifically -+\f1\b macOS 10.9 through 10.12 -+\f0\b0 , be aware of a potential issue with newer versions of -+\f1\b pip -+\f0\b0 as described in the ReadMe. Also be aware that a future version of this installer will no longer support these legacy macOS versions.\ - } -\ No newline at end of file -diff --git a/Mac/BuildScript/resources/install_certificates.command b/Mac/BuildScript/resources/install_certificates.command -index 19b4adac07b..b10e18e11a9 100755 ---- a/Mac/BuildScript/resources/install_certificates.command -+++ b/Mac/BuildScript/resources/install_certificates.command -@@ -10,22 +10,52 @@ - - import os - import os.path -+import platform - import ssl - import stat - import subprocess - import sys - --STAT_0o775 = ( stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR -- | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP -- | stat.S_IROTH | stat.S_IXOTH ) -+STAT_0o775 = ( -+ stat.S_IRUSR -+ | stat.S_IWUSR -+ | stat.S_IXUSR -+ | stat.S_IRGRP -+ | stat.S_IWGRP -+ | stat.S_IXGRP -+ | stat.S_IROTH -+ | stat.S_IXOTH -+) -+ - - def main(): -- openssl_dir, openssl_cafile = os.path.split( -- ssl.get_default_verify_paths().openssl_cafile) -+ pip_call = [sys.executable, "-E", "-s", "-m", "pip"] -+ macos_release = tuple([int(n) for n in platform.mac_ver()[0].split(".")[0:2]]) -+ old_macos = macos_release < (10, 13) -+ if old_macos: -+ pip_version_string = subprocess.check_output(pip_call + ["-V"]).decode().strip() -+ # Silence warning to user to upgrade pip -+ pip_call.append("--disable-pip-version-check") -+ pip_version = tuple( -+ [int(n) for n in pip_version_string.split()[1].split(".")[0:2]] -+ ) -+ if pip_version >= (24, 2): -+ print( -+ f" -- WARNING: this version of pip may not work on this older version of macOS.\n" -+ f" found {pip_version_string}\n" -+ f" (See https://github.com/pypa/pip/issues/12901 for more information.)\n" -+ f" Attempting to revert to an older version of pip.\n" -+ f" -- pip install --use-deprecated=legacy-certs pip==24.1.2\n" -+ ) -+ subprocess.check_call( -+ pip_call + ["install", "--use-deprecated=legacy-certs", "pip==24.1.2"] -+ ) - -+ openssl_dir, openssl_cafile = os.path.split( -+ ssl.get_default_verify_paths().openssl_cafile -+ ) - print(" -- pip install --upgrade certifi") -- subprocess.check_call([sys.executable, -- "-E", "-s", "-m", "pip", "install", "--upgrade", "certifi"]) -+ subprocess.check_call(pip_call + ["install", "--upgrade", "certifi"]) - - import certifi - -@@ -42,7 +72,16 @@ - print(" -- setting permissions") - os.chmod(openssl_cafile, STAT_0o775) - print(" -- update complete") -+ if old_macos: -+ print( -+ f" -- WARNING: Future releases of this Python installer may not support this older macOS version.\n" -+ ) - --if __name__ == '__main__': -- main() -+ -+if __name__ == "__main__": -+ try: -+ main() -+ except subprocess.SubprocessError: -+ print(" -- WARNING: Install Certificates failed") -+ sys.exit(1) - EOF --- /dev/null +++ b/Mac/Resources/app-store-compliance.patch @@ -0,0 +1,29 @@ @@ -35427,18 +3894,9 @@ index 19b4adac07b..b10e18e11a9 100755 + uses_params = ['', 'ftp', 'hdl', 'prospero', 'http', 'imap', + 'https', 'shttp', 'rtsp', 'rtsps', 'rtspu', 'sip', diff --git a/Makefile.pre.in b/Makefile.pre.in -index 4a957fb0044..b0318c1098e 100644 +index 0e64ccc5c21..b0318c1098e 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in -@@ -167,7 +167,7 @@ - EXT_SUFFIX= @EXT_SUFFIX@ - LDSHARED= @LDSHARED@ $(PY_LDFLAGS) - BLDSHARED= @BLDSHARED@ $(PY_CORE_LDFLAGS) --LDCXXSHARED= @LDCXXSHARED@ -+LDCXXSHARED= @LDCXXSHARED@ $(PY_LDFLAGS) - DESTSHARED= $(BINLIBDEST)/lib-dynload - - # List of exported symbols for AIX @@ -178,18 +178,29 @@ EXE= @EXEEXT@ BUILDEXE= @BUILDEXEEXT@ @@ -35557,14059 +4015,183 @@ index 4a957fb0044..b0318c1098e 100644 +.PHONY: testios +testios: + @if test "$(MACHDEP)" != "ios"; then \ -+ echo "Cannot run the iOS testbed for a non-iOS build."; \ -+ exit 1;\ -+ fi -+ @if test "$(findstring -iphonesimulator,$(MULTIARCH))" != "-iphonesimulator"; then \ -+ echo "Cannot run the iOS testbed for non-simulator builds."; \ -+ exit 1;\ -+ fi -+ @if test $(PYTHONFRAMEWORK) != "Python"; then \ -+ echo "Cannot run the iOS testbed with a non-default framework name."; \ -+ exit 1;\ -+ fi -+ @if ! test -d $(PYTHONFRAMEWORKPREFIX); then \ -+ echo "Cannot find a finalized iOS Python.framework. Have you run 'make install' to finalize the framework build?"; \ -+ exit 1;\ -+ fi -+ # Copy the testbed project into the build folder -+ cp -r $(srcdir)/iOS/testbed $(XCFOLDER) -+ # Copy the framework from the install location to the testbed project. -+ cp -r $(PYTHONFRAMEWORKPREFIX)/* $(XCFOLDER)/Python.xcframework/ios-arm64_x86_64-simulator -+ -+ # Run the test suite for the Xcode project, targeting the iOS simulator. -+ # If the suite fails, touch a file in the test folder as a marker -+ if ! xcodebuild test -project $(XCFOLDER)/iOSTestbed.xcodeproj -scheme "iOSTestbed" -destination "platform=iOS Simulator,name=iPhone SE (3rd Generation)" -resultBundlePath $(XCRESULT) -derivedDataPath $(XCFOLDER)/DerivedData ; then \ -+ touch $(XCFOLDER)/failed; \ -+ fi -+ -+ # Regardless of success or failure, extract and print the test output -+ xcrun xcresulttool get --path $(XCRESULT) \ -+ --id $$( \ -+ xcrun xcresulttool get --path $(XCRESULT) --format json | \ -+ $(PYTHON_FOR_BUILD) -c "import sys, json; result = json.load(sys.stdin); print(result['actions']['_values'][0]['actionResult']['logRef']['id']['_value'])" \ -+ ) \ -+ --format json | \ -+ $(PYTHON_FOR_BUILD) -c "import sys, json; result = json.load(sys.stdin); print(result['subsections']['_values'][1]['subsections']['_values'][0]['emittedOutput']['_value'])" -+ -+ @if test -e $(XCFOLDER)/failed ; then \ -+ exit 1; \ -+ fi -+ - # Like testall, but with only one pass and without multiple processes. - # Run an optional script to include information about the build environment. - .PHONY: buildbottest -@@ -1900,7 +1983,7 @@ - # which can lead to two parallel `./python setup.py build` processes that - # step on each others toes. - .PHONY: install --install: @FRAMEWORKINSTALLFIRST@ commoninstall bininstall maninstall @FRAMEWORKINSTALLLAST@ -+install: @FRAMEWORKINSTALLFIRST@ @INSTALLTARGETS@ @FRAMEWORKINSTALLLAST@ - if test "x$(ENSUREPIP)" != "xno" ; then \ - case $(ENSUREPIP) in \ - upgrade) ensurepip="--upgrade" ;; \ -@@ -2120,6 +2203,7 @@ - __phello__ - TESTSUBDIRS= idlelib/idle_test \ - test \ -+ test/test_ast \ - test/audiodata \ - test/certdata \ - test/certdata/capath \ -@@ -2328,6 +2412,14 @@ - $(INSTALL_DATA) `cat pybuilddir.txt`/_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH).py \ - $(DESTDIR)$(LIBDEST); \ - $(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt -+ @ # If app store compliance has been configured, apply the patch to the -+ @ # installed library code. The patch has been previously validated against -+ @ # the original source tree, so we can ignore any errors that are raised -+ @ # due to files that are missing because of --disable-test-modules etc. -+ @if [ "$(APP_STORE_COMPLIANCE_PATCH)" != "" ]; then \ -+ echo "Applying app store compliance patch"; \ -+ patch --force --reject-file "$(abs_builddir)/app-store-compliance.rej" --strip 2 --directory "$(DESTDIR)$(LIBDEST)" --input "$(abs_srcdir)/$(APP_STORE_COMPLIANCE_PATCH)" || true ; \ -+ fi - @ # Build PYC files for the 3 optimization levels (0, 1, 2) - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ -@@ -2504,10 +2596,11 @@ - # only have to cater for the structural bits of the framework. - - .PHONY: frameworkinstallframework --frameworkinstallframework: frameworkinstallstructure install frameworkinstallmaclib -+frameworkinstallframework: @FRAMEWORKINSTALLFIRST@ install frameworkinstallmaclib - --.PHONY: frameworkinstallstructure --frameworkinstallstructure: $(LDLIBRARY) -+# macOS uses a versioned frameworks structure that includes a full install -+.PHONY: frameworkinstallversionedstructure -+frameworkinstallversionedstructure: $(LDLIBRARY) - @if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ - echo Not configured with --enable-framework; \ - exit 1; \ -@@ -2528,6 +2621,27 @@ - $(LN) -fsn Versions/Current/Resources $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Resources - $(INSTALL_SHARED) $(LDLIBRARY) $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/$(LDLIBRARY) - -+# iOS/tvOS/watchOS uses a non-versioned framework with Info.plist in the -+# framework root, no .lproj data, and only stub compilation assistance binaries -+.PHONY: frameworkinstallunversionedstructure -+frameworkinstallunversionedstructure: $(LDLIBRARY) -+ @if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ -+ echo Not configured with --enable-framework; \ -+ exit 1; \ -+ else true; \ -+ fi -+ if test -d $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include; then \ -+ echo "Clearing stale header symlink directory"; \ -+ rm -rf $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include; \ -+ fi -+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR) -+ sed 's/%VERSION%/'"`$(RUNSHARED) $(PYTHON_FOR_BUILD) -c 'import platform; print(platform.python_version())'`"'/g' < $(RESSRCDIR)/Info.plist > $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Info.plist -+ $(INSTALL_SHARED) $(LDLIBRARY) $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/$(LDLIBRARY) -+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(BINDIR) -+ for file in $(srcdir)/$(RESSRCDIR)/bin/* ; do \ -+ $(INSTALL) -m $(EXEMODE) $$file $(DESTDIR)$(BINDIR); \ -+ done -+ - # This installs Mac/Lib into the framework - # Install a number of symlinks to keep software that expects a normal unix - # install (which includes python-config) happy. -@@ -2568,6 +2682,19 @@ - frameworkinstallextras: - cd Mac && $(MAKE) installextras DESTDIR="$(DESTDIR)" - -+# On iOS, bin/lib can't live inside the framework; include needs to be called -+# "Headers", but *must* be in the framework, and *not* include the `python3.X` -+# subdirectory. The install has put these folders in the same folder as -+# Python.framework; Move the headers to their final framework-compatible home. -+.PHONY: frameworkinstallmobileheaders -+frameworkinstallmobileheaders: frameworkinstallunversionedstructure inclinstall -+ if test -d $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers; then \ -+ echo "Removing old framework headers"; \ -+ rm -rf $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers; \ -+ fi -+ mv "$(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include/python$(LDVERSION)" "$(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers" -+ $(LN) -fs "../$(PYTHONFRAMEWORKDIR)/Headers" "$(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include/python$(LDVERSION)" -+ - # Build the toplevel Makefile - Makefile.pre: $(srcdir)/Makefile.pre.in config.status - CONFIG_FILES=Makefile.pre CONFIG_HEADERS= ./config.status -@@ -2678,6 +2805,10 @@ - -find build -type f -a ! -name '*.gc??' -exec rm -f {} ';' - -rm -f Include/pydtrace_probes.h - -rm -f profile-gen-stamp -+ -rm -rf iOS/testbed/Python.xcframework/ios-*/bin -+ -rm -rf iOS/testbed/Python.xcframework/ios-*/lib -+ -rm -rf iOS/testbed/Python.xcframework/ios-*/include -+ -rm -rf iOS/testbed/Python.xcframework/ios-*/Python.framework - - .PHONY: profile-removal - profile-removal: -@@ -2703,6 +2834,8 @@ - config.cache config.log pyconfig.h Modules/config.c - -rm -rf build platform - -rm -rf $(PYTHONFRAMEWORKDIR) -+ -rm -rf iOS/Frameworks -+ -rm -rf iOSTestbed.* - -rm -f python-config.py python-config - - # Make things extra clean, before making a distribution: -diff --git a/Misc/ACKS b/Misc/ACKS -index 88bac0a8749..15d4470e284 100644 ---- a/Misc/ACKS -+++ b/Misc/ACKS -@@ -516,6 +516,7 @@ - Ben Escoto - Andy Eskilsson - André Espaze -+Lucas Esposito - Stefan Esser - Nicolas Estibals - Jonathan Eunice -@@ -744,6 +745,7 @@ - Chris Herborth - Ivan Herman - Jürgen Hermann -+Joshua Jay Herman - Gary Herron - Ernie Hershey - Thomas Herve -@@ -1086,6 +1088,7 @@ - Ben Lewis - William Lewis - Akira Li -+Jiahao Li - Robert Li - Xuanji Li - Zekun Li -@@ -1303,6 +1306,7 @@ - Gregory Nofi - Jesse Noller - Bill Noon -+Janek Nouvertné - Stefan Norberg - Tim Northover - Joe Norton -diff --git a/Misc/HISTORY b/Misc/HISTORY -index 3cf3a0bfaaf..08b53c02d4b 100644 ---- a/Misc/HISTORY -+++ b/Misc/HISTORY -@@ -3952,7 +3952,7 @@ - - Issue #18626: the inspect module now offers a basic command line - introspection interface (Initial patch by Claudiu Popa) - --- Issue #3015: Fixed tkinter with wantobject=False. Any Tcl command call -+- Issue #3015: Fixed tkinter with ``wantobjects=False``. Any Tcl command call - returned empty string. - - - Issue #19037: The mailbox module now makes all changes to maildir files -@@ -5590,7 +5590,7 @@ - - Issue #16248: Disable code execution from the user's home directory by - tkinter when the -E flag is passed to Python. Patch by Zachary Ware. - --- Issue #13390: New function :func:`sys.getallocatedblocks()` returns the -+- Issue #13390: New function :func:`sys.getallocatedblocks` returns the - number of memory blocks currently allocated. - - - Issue #16628: Fix a memory leak in ctypes.resize(). -@@ -6157,7 +6157,7 @@ - starting with a ".". Patch by Sebastian Kreft. - - - Issue #13390: The ``-R`` option to regrtest now also checks for memory -- allocation leaks, using :func:`sys.getallocatedblocks()`. -+ allocation leaks, using :func:`sys.getallocatedblocks`. - - - Issue #16559: Add more tests for the json module, including some from the - official test suite at json.org. Patch by Serhiy Storchaka. -diff --git a/Misc/externals.spdx.json b/Misc/externals.spdx.json -index e905c2b907e..6c711e74f94 100644 ---- a/Misc/externals.spdx.json -+++ b/Misc/externals.spdx.json -@@ -48,21 +48,21 @@ - "checksums": [ - { - "algorithm": "SHA256", -- "checksumValue": "e6a77c273ebb284fedd8ea19b081fce74a9455936ffd47215f7c24713e2614b2" -+ "checksumValue": "1550c87996a0858474a9dd179deab2c55eb73726b9a140b32865b02fd3d8a86b" - } - ], -- "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/openssl-3.0.13.tar.gz", -+ "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/openssl-3.0.15.tar.gz", - "externalRefs": [ - { - "referenceCategory": "SECURITY", -- "referenceLocator": "cpe:2.3:a:openssl:openssl:3.0.13:*:*:*:*:*:*:*", -+ "referenceLocator": "cpe:2.3:a:openssl:openssl:3.0.15:*:*:*:*:*:*:*", - "referenceType": "cpe23Type" - } - ], - "licenseConcluded": "NOASSERTION", - "name": "openssl", - "primaryPackagePurpose": "SOURCE", -- "versionInfo": "3.0.13" -+ "versionInfo": "3.0.15" - }, - { - "SPDXID": "SPDXRef-PACKAGE-sqlite", -diff --git a/Misc/python.man b/Misc/python.man -index 9f89c94adf5..abb065cd610 100644 ---- a/Misc/python.man -+++ b/Misc/python.man -@@ -251,6 +251,7 @@ - -Wdefault # Warn once per call location - -Werror # Convert to exceptions - -Walways # Warn every time -+ -Wall # Same as -Walways - -Wmodule # Warn once per calling module - -Wonce # Warn once per Python process - -Wignore # Never warn -diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json -index 49b25ff774d..b33c66ab893 100644 ---- a/Misc/sbom.spdx.json -+++ b/Misc/sbom.spdx.json -@@ -48,11 +48,11 @@ - "checksums": [ - { - "algorithm": "SHA1", -- "checksumValue": "4076a884f0ca96873589b5c8159e2e5bfb8b829a" -+ "checksumValue": "6aaee1b194bea30f0a60d1cce71eada8b14d3526" - }, - { - "algorithm": "SHA256", -- "checksumValue": "1a434bf3d2f9fb8a0b5adb79201a942788d11824c3e5b46a0b9962c0c482016c" -+ "checksumValue": "7bd4e53a8015534b5bbb58afe1a131b3989d3d4fca29bca685c44d34bcaa2555" - } - ], - "fileName": "Modules/expat/expat.h" -@@ -146,11 +146,11 @@ - "checksums": [ - { - "algorithm": "SHA1", -- "checksumValue": "4c49b5df2bc702f663ba3b5a52d1940ec363226b" -+ "checksumValue": "aca27f46d9fd387b63ce7ff2e4f172cad130b39b" - }, - { - "algorithm": "SHA256", -- "checksumValue": "b5ec29f6560acc183f1ee8ab92bb3aea17b87b4c2120cd2e3f78deba7a12491e" -+ "checksumValue": "f537add526ecda8389503b7ef45fb52b6217e4dc171dcc3a8dc6903ff6134726" - } - ], - "fileName": "Modules/expat/siphash.h" -@@ -188,11 +188,11 @@ - "checksums": [ - { - "algorithm": "SHA1", -- "checksumValue": "fed1311be8577491b7f63085a27014eabf2caec8" -+ "checksumValue": "b2ec0ad170ccc21e63fbcfc8d7404cdd756eedd3" - }, - { - "algorithm": "SHA256", -- "checksumValue": "3dc233eca5fa1bb7387c503f8a12d840707e4374b229e05d5657db9645725040" -+ "checksumValue": "92159d4e17393e56ee85f47d9fb31348695a58589899aa01e7536cdc88f60b85" - } - ], - "fileName": "Modules/expat/xmlparse.c" -@@ -1562,14 +1562,14 @@ - "checksums": [ - { - "algorithm": "SHA256", -- "checksumValue": "d4cf38d26e21a56654ffe4acd9cd5481164619626802328506a2869afab29ab3" -+ "checksumValue": "17aa6cfc5c4c219c09287abfc10bc13f0c06f30bb654b28bfe6f567ca646eb79" - } - ], -- "downloadLocation": "https://github.com/libexpat/libexpat/releases/download/R_2_6_2/expat-2.6.2.tar.gz", -+ "downloadLocation": "https://github.com/libexpat/libexpat/releases/download/R_2_6_3/expat-2.6.3.tar.gz", - "externalRefs": [ - { - "referenceCategory": "SECURITY", -- "referenceLocator": "cpe:2.3:a:libexpat_project:libexpat:2.6.2:*:*:*:*:*:*:*", -+ "referenceLocator": "cpe:2.3:a:libexpat_project:libexpat:2.6.3:*:*:*:*:*:*:*", - "referenceType": "cpe23Type" - } - ], -@@ -1577,7 +1577,7 @@ - "name": "expat", - "originator": "Organization: Expat development team", - "primaryPackagePurpose": "SOURCE", -- "versionInfo": "2.6.2" -+ "versionInfo": "2.6.3" - }, - { - "SPDXID": "SPDXRef-PACKAGE-hacl-star", -diff --git a/Misc/valgrind-python.supp b/Misc/valgrind-python.supp -index c9c45ba7ed6..f5e39d6f2ec 100644 ---- a/Misc/valgrind-python.supp -+++ b/Misc/valgrind-python.supp -@@ -103,6 +103,49 @@ - fun:COMMENT_THIS_LINE_TO_DISABLE_LEAK_WARNING - } - -+# -+# Leaks: dlopen() called without dlclose() -+# -+ -+{ -+ dlopen() called without dlclose() -+ Memcheck:Leak -+ fun:malloc -+ fun:malloc -+ fun:strdup -+ fun:_dl_load_cache_lookup -+} -+{ -+ dlopen() called without dlclose() -+ Memcheck:Leak -+ fun:malloc -+ fun:malloc -+ fun:strdup -+ fun:_dl_map_object -+} -+{ -+ dlopen() called without dlclose() -+ Memcheck:Leak -+ fun:malloc -+ fun:* -+ fun:_dl_new_object -+} -+{ -+ dlopen() called without dlclose() -+ Memcheck:Leak -+ fun:calloc -+ fun:* -+ fun:_dl_new_object -+} -+{ -+ dlopen() called without dlclose() -+ Memcheck:Leak -+ fun:calloc -+ fun:* -+ fun:_dl_check_map_versions -+} -+ -+ - # - # Non-python specific leaks - # -diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c -index 05e79915ba7..da44bb6b714 100644 ---- a/Modules/_asynciomodule.c -+++ b/Modules/_asynciomodule.c -@@ -594,12 +594,27 @@ - PyErr_SetString(PyExc_TypeError, "invalid exception object"); - return NULL; - } -- if (Py_IS_TYPE(exc_val, (PyTypeObject *)PyExc_StopIteration)) { -+ if (PyErr_GivenExceptionMatches(exc_val, PyExc_StopIteration)) { -+ const char *msg = "StopIteration interacts badly with " -+ "generators and cannot be raised into a " -+ "Future"; -+ PyObject *message = PyUnicode_FromString(msg); -+ if (message == NULL) { -+ Py_DECREF(exc_val); -+ return NULL; -+ } -+ PyObject *err = PyObject_CallOneArg(PyExc_RuntimeError, message); -+ Py_DECREF(message); -+ if (err == NULL) { -+ Py_DECREF(exc_val); -+ return NULL; -+ } -+ assert(PyExceptionInstance_Check(err)); -+ -+ PyException_SetCause(err, Py_NewRef(exc_val)); -+ PyException_SetContext(err, Py_NewRef(exc_val)); - Py_DECREF(exc_val); -- PyErr_SetString(PyExc_TypeError, -- "StopIteration interacts badly with generators " -- "and cannot be raised into a Future"); -- return NULL; -+ exc_val = err; - } - - assert(!fut->fut_exception); -@@ -2509,7 +2524,11 @@ - _asyncio_Task_get_coro_impl(TaskObj *self) - /*[clinic end generated code: output=bcac27c8cc6c8073 input=d2e8606c42a7b403]*/ - { -- return Py_NewRef(self->task_coro); -+ if (self->task_coro) { -+ return Py_NewRef(self->task_coro); -+ } -+ -+ Py_RETURN_NONE; - } - - /*[clinic input] -@@ -3602,14 +3621,6 @@ - Py_VISIT(state->iscoroutine_typecache); - - Py_VISIT(state->context_kwname); -- -- // Visit freelist. -- PyObject *next = (PyObject*) state->fi_freelist; -- while (next != NULL) { -- PyObject *current = next; -- Py_VISIT(current); -- next = (PyObject*) ((futureiterobject*) current)->future; -- } - return 0; - } - -diff --git a/Modules/_csv.c b/Modules/_csv.c -index d63eac1bf7a..9a7b7d27c2e 100644 ---- a/Modules/_csv.c -+++ b/Modules/_csv.c -@@ -701,6 +701,8 @@ - } - else if (c == dialect->escapechar) { - /* possible escaped character */ -+ if (dialect->quoting == QUOTE_NONNUMERIC) -+ self->numeric_field = 1; - self->state = ESCAPED_CHAR; - } - else if (c == ' ' && dialect->skipinitialspace) -diff --git a/Modules/_ctypes/_ctypes_test.c b/Modules/_ctypes/_ctypes_test.c -index 3915786548f..5788b53bd5d 100644 ---- a/Modules/_ctypes/_ctypes_test.c -+++ b/Modules/_ctypes/_ctypes_test.c -@@ -164,7 +164,7 @@ - - /* - * Test3C struct tests the MAX_STRUCT_SIZE 32. Structs containing arrays of up -- * to four floating point types are passed in registers on Arm platforms. -+ * to four floating-point types are passed in registers on Arm platforms. - * This struct is used for within bounds test on Arm platfroms and for an - * out-of-bounds tests for platfroms where MAX_STRUCT_SIZE is less than 32. - * See gh-110190. -@@ -188,7 +188,7 @@ - - /* - * Test3D struct tests the MAX_STRUCT_SIZE 64. Structs containing arrays of up -- * to eight floating point types are passed in registers on PPC64LE platforms. -+ * to eight floating-point types are passed in registers on PPC64LE platforms. - * This struct is used for within bounds test on PPC64LE platfroms and for an - * out-of-bounds tests for platfroms where MAX_STRUCT_SIZE is less than 64. - * See gh-110190. -diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c -index 8552e42d8be..5a062b9c8c0 100644 ---- a/Modules/_datetimemodule.c -+++ b/Modules/_datetimemodule.c -@@ -5209,19 +5209,19 @@ - static PyObject * - datetime_strptime(PyObject *cls, PyObject *args) - { -- static PyObject *module = NULL; -- PyObject *string, *format; -+ PyObject *string, *format, *result; - - if (!PyArg_ParseTuple(args, "UU:strptime", &string, &format)) - return NULL; - -+ PyObject *module = PyImport_ImportModule("_strptime"); - if (module == NULL) { -- module = PyImport_ImportModule("_strptime"); -- if (module == NULL) -- return NULL; -+ return NULL; - } -- return PyObject_CallMethodObjArgs(module, &_Py_ID(_strptime_datetime), -- cls, string, format, NULL); -+ result = PyObject_CallMethodObjArgs(module, &_Py_ID(_strptime_datetime), -+ cls, string, format, NULL); -+ Py_DECREF(module); -+ return result; - } - - /* Return new datetime from date/datetime and time arguments. */ -diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c -index fcd4be9338f..386f38add16 100644 ---- a/Modules/_elementtree.c -+++ b/Modules/_elementtree.c -@@ -1213,12 +1213,8 @@ - PyObject* seq; - Py_ssize_t i; - -- seq = PySequence_Fast(elements, ""); -+ seq = PySequence_Fast(elements, "'elements' must be an iterable"); - if (!seq) { -- PyErr_Format( -- PyExc_TypeError, -- "expected sequence, not \"%.200s\"", Py_TYPE(elements)->tp_name -- ); - return NULL; - } - -@@ -1504,7 +1500,7 @@ - { - ElementObject* self = (ElementObject*) self_; - if (PyErr_WarnEx(PyExc_DeprecationWarning, -- "Testing an element's truth value will raise an exception " -+ "Testing an element's truth value will always return True " - "in future versions. Use specific 'len(elem)' or " - "'elem is not None' test instead.", - 1) < 0) { -@@ -1920,12 +1916,8 @@ - } - - /* A new slice is actually being assigned */ -- seq = PySequence_Fast(value, ""); -+ seq = PySequence_Fast(value, "assignment expects an iterable"); - if (!seq) { -- PyErr_Format( -- PyExc_TypeError, -- "expected sequence, not \"%.200s\"", Py_TYPE(value)->tp_name -- ); - return -1; - } - newlen = PySequence_Fast_GET_SIZE(seq); -diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c -index 14dd19d95c2..4a1ba22d381 100644 ---- a/Modules/_io/textio.c -+++ b/Modules/_io/textio.c -@@ -1723,16 +1723,26 @@ - bytes_len = PyBytes_GET_SIZE(b); - } - -- if (self->pending_bytes == NULL) { -- self->pending_bytes_count = 0; -- self->pending_bytes = b; -- } -- else if (self->pending_bytes_count + bytes_len > self->chunk_size) { -- // Prevent to concatenate more than chunk_size data. -- if (_textiowrapper_writeflush(self) < 0) { -- Py_DECREF(b); -- return NULL; -+ // We should avoid concatinating huge data. -+ // Flush the buffer before adding b to the buffer if b is not small. -+ // https://github.com/python/cpython/issues/87426 -+ if (bytes_len >= self->chunk_size) { -+ // _textiowrapper_writeflush() calls buffer.write(). -+ // self->pending_bytes can be appended during buffer->write() -+ // or other thread. -+ // We need to loop until buffer becomes empty. -+ // https://github.com/python/cpython/issues/118138 -+ // https://github.com/python/cpython/issues/119506 -+ while (self->pending_bytes != NULL) { -+ if (_textiowrapper_writeflush(self) < 0) { -+ Py_DECREF(b); -+ return NULL; -+ } - } -+ } -+ -+ if (self->pending_bytes == NULL) { -+ assert(self->pending_bytes_count == 0); - self->pending_bytes = b; - } - else if (!PyList_CheckExact(self->pending_bytes)) { -@@ -1741,6 +1751,9 @@ - Py_DECREF(b); - return NULL; - } -+ // Since Python 3.12, allocating GC object won't trigger GC and release -+ // GIL. See https://github.com/python/cpython/issues/97922 -+ assert(!PyList_CheckExact(self->pending_bytes)); - PyList_SET_ITEM(list, 0, self->pending_bytes); - PyList_SET_ITEM(list, 1, b); - self->pending_bytes = list; -diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c -index cbd036fdf2a..f080b97034c 100644 ---- a/Modules/_localemodule.c -+++ b/Modules/_localemodule.c -@@ -62,7 +62,7 @@ - [clinic start generated code]*/ - /*[clinic end generated code: output=da39a3ee5e6b4b0d input=ed98569b726feada]*/ - --/* support functions for formatting floating point numbers */ -+/* support functions for formatting floating-point numbers */ - - /* the grouping is terminated by either 0 or CHAR_MAX */ - static PyObject* -diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c -index 257de4387c0..2c82b18c0e1 100644 ---- a/Modules/_lsprof.c -+++ b/Modules/_lsprof.c -@@ -56,6 +56,7 @@ - #define POF_ENABLED 0x001 - #define POF_SUBCALLS 0x002 - #define POF_BUILTINS 0x004 -+#define POF_EXT_TIMER 0x008 - #define POF_NOMEMORY 0x100 - - /*[clinic input] -@@ -84,7 +85,14 @@ - - static _PyTime_t CallExternalTimer(ProfilerObject *pObj) - { -- PyObject *o = _PyObject_CallNoArgs(pObj->externalTimer); -+ PyObject *o = NULL; -+ -+ // External timer can do arbitrary things so we need a flag to prevent -+ // horrible things to happen -+ pObj->flags |= POF_EXT_TIMER; -+ o = _PyObject_CallNoArgs(pObj->externalTimer); -+ pObj->flags &= ~POF_EXT_TIMER; -+ - if (o == NULL) { - PyErr_WriteUnraisable(pObj->externalTimer); - return 0; -@@ -773,6 +781,11 @@ - static PyObject* - profiler_disable(ProfilerObject *self, PyObject* noarg) - { -+ if (self->flags & POF_EXT_TIMER) { -+ PyErr_SetString(PyExc_RuntimeError, -+ "cannot disable profiler in external timer"); -+ return NULL; -+ } - if (self->flags & POF_ENABLED) { - PyObject* result = NULL; - PyObject* monitoring = _PyImport_GetModuleAttrString("sys", "monitoring"); -@@ -826,6 +839,11 @@ - static PyObject* - profiler_clear(ProfilerObject *pObj, PyObject* noarg) - { -+ if (pObj->flags & POF_EXT_TIMER) { -+ PyErr_SetString(PyExc_RuntimeError, -+ "cannot clear profiler in external timer"); -+ return NULL; -+ } - clearEntries(pObj); - Py_RETURN_NONE; - } -@@ -834,6 +852,7 @@ - profiler_traverse(ProfilerObject *op, visitproc visit, void *arg) - { - Py_VISIT(Py_TYPE(op)); -+ Py_VISIT(op->externalTimer); - return 0; - } - -diff --git a/Modules/_pickle.c b/Modules/_pickle.c -index 2bf9977f0b7..831d53bc82f 100644 ---- a/Modules/_pickle.c -+++ b/Modules/_pickle.c -@@ -1876,10 +1876,10 @@ - if (_PyUnicode_EqualToASCIIString(subpath, "")) { - if (obj == NULL) - PyErr_Format(PyExc_AttributeError, -- "Can't pickle local object %R", name); -+ "Can't get local object %R", name); - else - PyErr_Format(PyExc_AttributeError, -- "Can't pickle local attribute %R on %R", name, obj); -+ "Can't get local attribute %R on %R", name, obj); - Py_DECREF(dotted_path); - return NULL; - } -@@ -2566,7 +2566,7 @@ - { - if (self->proto < 5) { - PyErr_SetString(st->PicklingError, -- "PickleBuffer can only pickled with protocol >= 5"); -+ "PickleBuffer can only be pickled with protocol >= 5"); - return -1; - } - const Py_buffer* view = PyPickleBuffer_GetBuffer(obj); -@@ -3188,6 +3188,7 @@ - if (!PyTuple_Check(obj) || PyTuple_Size(obj) != 2) { - PyErr_SetString(PyExc_TypeError, "dict items " - "iterator must return 2-tuples"); -+ Py_DECREF(obj); - return -1; - } - i = save(state, self, PyTuple_GET_ITEM(obj, 0), 0); -@@ -3651,7 +3652,6 @@ - PyObject *module = NULL; - PyObject *parent = NULL; - PyObject *dotted_path = NULL; -- PyObject *lastname = NULL; - PyObject *cls; - int status = 0; - -@@ -3692,10 +3692,7 @@ - obj, module_name); - goto error; - } -- lastname = Py_NewRef(PyList_GET_ITEM(dotted_path, -- PyList_GET_SIZE(dotted_path) - 1)); - cls = get_deep_attribute(module, dotted_path, &parent); -- Py_CLEAR(dotted_path); - if (cls == NULL) { - PyErr_Format(st->PicklingError, - "Can't pickle %R: attribute lookup %S on %S failed", -@@ -3728,31 +3725,23 @@ - code_obj = PyDict_GetItemWithError(st->extension_registry, - extension_key); - Py_DECREF(extension_key); -- /* The object is not registered in the extension registry. -- This is the most likely code path. */ - if (code_obj == NULL) { - if (PyErr_Occurred()) { - goto error; - } -+ /* The object is not registered in the extension registry. -+ This is the most likely code path. */ - goto gen_global; - } - -- /* XXX: pickle.py doesn't check neither the type, nor the range -- of the value returned by the extension_registry. It should for -- consistency. */ -- -- /* Verify code_obj has the right type and value. */ -- if (!PyLong_Check(code_obj)) { -- PyErr_Format(st->PicklingError, -- "Can't pickle %R: extension code %R isn't an integer", -- obj, code_obj); -- goto error; -- } -- code = PyLong_AS_LONG(code_obj); -+ Py_INCREF(code_obj); -+ code = PyLong_AsLong(code_obj); -+ Py_DECREF(code_obj); - if (code <= 0 || code > 0x7fffffffL) { -+ /* Should never happen in normal circumstances, since the type and -+ the value of the code are checked in copyreg.add_extension(). */ - if (!PyErr_Occurred()) -- PyErr_Format(st->PicklingError, "Can't pickle %R: extension " -- "code %ld is out of range", obj, code); -+ PyErr_Format(PyExc_RuntimeError, "extension code %ld is out of range", code); - goto error; - } - -@@ -3783,7 +3772,10 @@ - else { - gen_global: - if (parent == module) { -- Py_SETREF(global_name, Py_NewRef(lastname)); -+ Py_SETREF(global_name, -+ Py_NewRef(PyList_GET_ITEM(dotted_path, -+ PyList_GET_SIZE(dotted_path) - 1))); -+ Py_CLEAR(dotted_path); - } - if (self->proto >= 4) { - const char stack_global_op = STACK_GLOBAL; -@@ -3796,20 +3788,30 @@ - if (_Pickler_Write(self, &stack_global_op, 1) < 0) - goto error; - } -- else if (parent != module) { -- PyObject *reduce_value = Py_BuildValue("(O(OO))", -- st->getattr, parent, lastname); -- if (reduce_value == NULL) -- goto error; -- status = save_reduce(st, self, reduce_value, NULL); -- Py_DECREF(reduce_value); -- if (status < 0) -- goto error; -- } - else { - /* Generate a normal global opcode if we are using a pickle - protocol < 4, or if the object is not registered in the -- extension registry. */ -+ extension registry. -+ -+ Objects with multi-part __qualname__ are represented as -+ getattr(getattr(..., attrname1), attrname2). */ -+ const char mark_op = MARK; -+ const char tupletwo_op = (self->proto < 2) ? TUPLE : TUPLE2; -+ const char reduce_op = REDUCE; -+ Py_ssize_t i; -+ if (dotted_path) { -+ if (PyList_GET_SIZE(dotted_path) > 1) { -+ Py_SETREF(global_name, Py_NewRef(PyList_GET_ITEM(dotted_path, 0))); -+ } -+ for (i = 1; i < PyList_GET_SIZE(dotted_path); i++) { -+ if (save(st, self, st->getattr, 0) < 0 || -+ (self->proto < 2 && _Pickler_Write(self, &mark_op, 1) < 0)) -+ { -+ goto error; -+ } -+ } -+ } -+ - PyObject *encoded; - PyObject *(*unicode_encoder)(PyObject *); - -@@ -3871,6 +3873,17 @@ - Py_DECREF(encoded); - if (_Pickler_Write(self, "\n", 1) < 0) - goto error; -+ -+ if (dotted_path) { -+ for (i = 1; i < PyList_GET_SIZE(dotted_path); i++) { -+ if (save(st, self, PyList_GET_ITEM(dotted_path, i), 0) < 0 || -+ _Pickler_Write(self, &tupletwo_op, 1) < 0 || -+ _Pickler_Write(self, &reduce_op, 1) < 0) -+ { -+ goto error; -+ } -+ } -+ } - } - /* Memoize the object. */ - if (memo_put(st, self, obj) < 0) -@@ -3886,7 +3899,6 @@ - Py_XDECREF(module); - Py_XDECREF(parent); - Py_XDECREF(dotted_path); -- Py_XDECREF(lastname); - - return status; - } -@@ -6623,11 +6635,13 @@ - if (result == NULL) { - Pdata_clear(self->stack, i + 1); - Py_SET_SIZE(self->stack, mark); -+ Py_DECREF(add_func); - return -1; - } - Py_DECREF(result); - } - Py_SET_SIZE(self->stack, mark); -+ Py_DECREF(add_func); - } - - return 0; -diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c -index 0547390454a..6d9843bb76d 100644 ---- a/Modules/_sre/sre.c -+++ b/Modules/_sre/sre.c -@@ -1572,6 +1572,7 @@ - } - self->items[i].literal = Py_XNewRef(literal); - } -+ PyObject_GC_Track(self); - return (PyObject*) self; - - bad_template: -@@ -2166,6 +2167,8 @@ - return -1; - } - -+ // Check that i*2 cannot overflow to make static analyzers happy -+ assert(i <= SRE_MAXGROUPS); - return i; - } - -diff --git a/Modules/_struct.c b/Modules/_struct.c -index 55efc0c6cfe..e8d9f86e190 100644 ---- a/Modules/_struct.c -+++ b/Modules/_struct.c -@@ -278,7 +278,7 @@ - #define RANGE_ERROR(state, f, flag) return _range_error(state, f, flag) - - --/* Floating point helpers */ -+/* Floating-point helpers */ - - static PyObject * - unpack_halffloat(const char *p, /* start of 2-byte string */ -diff --git a/Modules/_testcapi/numbers.c b/Modules/_testcapi/numbers.c -index 6f7fa3fa7a4..e16ff737440 100644 ---- a/Modules/_testcapi/numbers.c -+++ b/Modules/_testcapi/numbers.c -@@ -1,7 +1,168 @@ - #include "parts.h" - #include "util.h" - -+ -+static PyObject * -+number_check(PyObject *Py_UNUSED(module), PyObject *obj) -+{ -+ NULLABLE(obj); -+ return PyLong_FromLong(PyNumber_Check(obj)); -+} -+ -+#define BINARYFUNC(funcsuffix, methsuffix) \ -+ static PyObject * \ -+ number_##methsuffix(PyObject *Py_UNUSED(module), PyObject *args) \ -+ { \ -+ PyObject *o1, *o2; \ -+ \ -+ if (!PyArg_ParseTuple(args, "OO", &o1, &o2)) { \ -+ return NULL; \ -+ } \ -+ \ -+ NULLABLE(o1); \ -+ NULLABLE(o2); \ -+ return PyNumber_##funcsuffix(o1, o2); \ -+ }; -+ -+BINARYFUNC(Add, add) -+BINARYFUNC(Subtract, subtract) -+BINARYFUNC(Multiply, multiply) -+BINARYFUNC(MatrixMultiply, matrixmultiply) -+BINARYFUNC(FloorDivide, floordivide) -+BINARYFUNC(TrueDivide, truedivide) -+BINARYFUNC(Remainder, remainder) -+BINARYFUNC(Divmod, divmod) -+ -+#define TERNARYFUNC(funcsuffix, methsuffix) \ -+ static PyObject * \ -+ number_##methsuffix(PyObject *Py_UNUSED(module), PyObject *args) \ -+ { \ -+ PyObject *o1, *o2, *o3 = Py_None; \ -+ \ -+ if (!PyArg_ParseTuple(args, "OO|O", &o1, &o2, &o3)) { \ -+ return NULL; \ -+ } \ -+ \ -+ NULLABLE(o1); \ -+ NULLABLE(o2); \ -+ return PyNumber_##funcsuffix(o1, o2, o3); \ -+ }; -+ -+TERNARYFUNC(Power, power) -+ -+#define UNARYFUNC(funcsuffix, methsuffix) \ -+ static PyObject * \ -+ number_##methsuffix(PyObject *Py_UNUSED(module), PyObject *obj) \ -+ { \ -+ NULLABLE(obj); \ -+ return PyNumber_##funcsuffix(obj); \ -+ }; -+ -+UNARYFUNC(Negative, negative) -+UNARYFUNC(Positive, positive) -+UNARYFUNC(Absolute, absolute) -+UNARYFUNC(Invert, invert) -+ -+BINARYFUNC(Lshift, lshift) -+BINARYFUNC(Rshift, rshift) -+BINARYFUNC(And, and) -+BINARYFUNC(Xor, xor) -+BINARYFUNC(Or, or) -+ -+BINARYFUNC(InPlaceAdd, inplaceadd) -+BINARYFUNC(InPlaceSubtract, inplacesubtract) -+BINARYFUNC(InPlaceMultiply, inplacemultiply) -+BINARYFUNC(InPlaceMatrixMultiply, inplacematrixmultiply) -+BINARYFUNC(InPlaceFloorDivide, inplacefloordivide) -+BINARYFUNC(InPlaceTrueDivide, inplacetruedivide) -+BINARYFUNC(InPlaceRemainder, inplaceremainder) -+ -+TERNARYFUNC(InPlacePower, inplacepower) -+ -+BINARYFUNC(InPlaceLshift, inplacelshift) -+BINARYFUNC(InPlaceRshift, inplacershift) -+BINARYFUNC(InPlaceAnd, inplaceand) -+BINARYFUNC(InPlaceXor, inplacexor) -+BINARYFUNC(InPlaceOr, inplaceor) -+ -+UNARYFUNC(Long, long) -+UNARYFUNC(Float, float) -+UNARYFUNC(Index, index) -+ -+static PyObject * -+number_tobase(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *n; -+ int base; -+ -+ if (!PyArg_ParseTuple(args, "Oi", &n, &base)) { -+ return NULL; -+ } -+ -+ NULLABLE(n); -+ return PyNumber_ToBase(n, base); -+} -+ -+static PyObject * -+number_asssizet(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *o, *exc; -+ Py_ssize_t ret; -+ -+ if (!PyArg_ParseTuple(args, "OO", &o, &exc)) { -+ return NULL; -+ } -+ -+ NULLABLE(o); -+ NULLABLE(exc); -+ ret = PyNumber_AsSsize_t(o, exc); -+ -+ if (ret == (Py_ssize_t)(-1) && PyErr_Occurred()) { -+ return NULL; -+ } -+ -+ return PyLong_FromSsize_t(ret); -+} -+ -+ - static PyMethodDef test_methods[] = { -+ {"number_check", number_check, METH_O}, -+ {"number_add", number_add, METH_VARARGS}, -+ {"number_subtract", number_subtract, METH_VARARGS}, -+ {"number_multiply", number_multiply, METH_VARARGS}, -+ {"number_matrixmultiply", number_matrixmultiply, METH_VARARGS}, -+ {"number_floordivide", number_floordivide, METH_VARARGS}, -+ {"number_truedivide", number_truedivide, METH_VARARGS}, -+ {"number_remainder", number_remainder, METH_VARARGS}, -+ {"number_divmod", number_divmod, METH_VARARGS}, -+ {"number_power", number_power, METH_VARARGS}, -+ {"number_negative", number_negative, METH_O}, -+ {"number_positive", number_positive, METH_O}, -+ {"number_absolute", number_absolute, METH_O}, -+ {"number_invert", number_invert, METH_O}, -+ {"number_lshift", number_lshift, METH_VARARGS}, -+ {"number_rshift", number_rshift, METH_VARARGS}, -+ {"number_and", number_and, METH_VARARGS}, -+ {"number_xor", number_xor, METH_VARARGS}, -+ {"number_or", number_or, METH_VARARGS}, -+ {"number_inplaceadd", number_inplaceadd, METH_VARARGS}, -+ {"number_inplacesubtract", number_inplacesubtract, METH_VARARGS}, -+ {"number_inplacemultiply", number_inplacemultiply, METH_VARARGS}, -+ {"number_inplacematrixmultiply", number_inplacematrixmultiply, METH_VARARGS}, -+ {"number_inplacefloordivide", number_inplacefloordivide, METH_VARARGS}, -+ {"number_inplacetruedivide", number_inplacetruedivide, METH_VARARGS}, -+ {"number_inplaceremainder", number_inplaceremainder, METH_VARARGS}, -+ {"number_inplacepower", number_inplacepower, METH_VARARGS}, -+ {"number_inplacelshift", number_inplacelshift, METH_VARARGS}, -+ {"number_inplacershift", number_inplacershift, METH_VARARGS}, -+ {"number_inplaceand", number_inplaceand, METH_VARARGS}, -+ {"number_inplacexor", number_inplacexor, METH_VARARGS}, -+ {"number_inplaceor", number_inplaceor, METH_VARARGS}, -+ {"number_long", number_long, METH_O}, -+ {"number_float", number_float, METH_O}, -+ {"number_index", number_index, METH_O}, -+ {"number_tobase", number_tobase, METH_VARARGS}, -+ {"number_asssizet", number_asssizet, METH_VARARGS}, - {NULL}, - }; - -diff --git a/Modules/_testcapi/tuple.c b/Modules/_testcapi/tuple.c -index 95dde8c0eda..23ea4e1dbce 100644 ---- a/Modules/_testcapi/tuple.c -+++ b/Modules/_testcapi/tuple.c -@@ -2,14 +2,240 @@ - #include "util.h" - - -+static PyObject * -+tuple_get_size(PyObject *Py_UNUSED(module), PyObject *obj) -+{ -+ NULLABLE(obj); -+ RETURN_SIZE(PyTuple_GET_SIZE(obj)); -+} -+ -+static PyObject * -+tuple_get_item(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *obj; -+ Py_ssize_t i; -+ if (!PyArg_ParseTuple(args, "On", &obj, &i)) { -+ return NULL; -+ } -+ NULLABLE(obj); -+ return Py_XNewRef(PyTuple_GET_ITEM(obj, i)); -+} -+ -+static PyObject * -+tuple_copy(PyObject *tuple) -+{ -+ Py_ssize_t size = PyTuple_GET_SIZE(tuple); -+ PyObject *newtuple = PyTuple_New(size); -+ if (!newtuple) { -+ return NULL; -+ } -+ for (Py_ssize_t n = 0; n < size; n++) { -+ PyTuple_SET_ITEM(newtuple, n, Py_XNewRef(PyTuple_GET_ITEM(tuple, n))); -+ } -+ return newtuple; -+} -+ -+static PyObject * -+tuple_set_item(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *obj, *value, *newtuple; -+ Py_ssize_t i; -+ if (!PyArg_ParseTuple(args, "OnO", &obj, &i, &value)) { -+ return NULL; -+ } -+ NULLABLE(value); -+ if (PyTuple_CheckExact(obj)) { -+ newtuple = tuple_copy(obj); -+ if (!newtuple) { -+ return NULL; -+ } -+ -+ PyObject *val = PyTuple_GET_ITEM(newtuple, i); -+ PyTuple_SET_ITEM(newtuple, i, Py_XNewRef(value)); -+ Py_DECREF(val); -+ return newtuple; -+ } -+ else { -+ NULLABLE(obj); -+ -+ PyObject *val = PyTuple_GET_ITEM(obj, i); -+ PyTuple_SET_ITEM(obj, i, Py_XNewRef(value)); -+ Py_DECREF(val); -+ return Py_XNewRef(obj); -+ } -+} -+ -+static PyObject * -+_tuple_resize(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *tup; -+ Py_ssize_t newsize; -+ int new = 1; -+ if (!PyArg_ParseTuple(args, "On|p", &tup, &newsize, &new)) { -+ return NULL; -+ } -+ if (new) { -+ tup = tuple_copy(tup); -+ if (!tup) { -+ return NULL; -+ } -+ } -+ else { -+ NULLABLE(tup); -+ Py_XINCREF(tup); -+ } -+ int r = _PyTuple_Resize(&tup, newsize); -+ if (r == -1) { -+ assert(tup == NULL); -+ return NULL; -+ } -+ return tup; -+} -+ -+static PyObject * -+_check_tuple_item_is_NULL(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *obj; -+ Py_ssize_t i; -+ if (!PyArg_ParseTuple(args, "On", &obj, &i)) { -+ return NULL; -+ } -+ return PyLong_FromLong(PyTuple_GET_ITEM(obj, i) == NULL); -+} -+ -+static PyObject * -+tuple_check(PyObject* Py_UNUSED(module), PyObject *obj) -+{ -+ NULLABLE(obj); -+ return PyLong_FromLong(PyTuple_Check(obj)); -+} -+ -+static PyObject * -+tuple_checkexact(PyObject* Py_UNUSED(module), PyObject *obj) -+{ -+ NULLABLE(obj); -+ return PyLong_FromLong(PyTuple_CheckExact(obj)); -+} -+ -+static PyObject * -+tuple_new(PyObject* Py_UNUSED(module), PyObject *len) -+{ -+ return PyTuple_New(PyLong_AsSsize_t(len)); -+} -+ -+static PyObject * -+tuple_pack(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *arg1 = NULL, *arg2 = NULL; -+ Py_ssize_t size; -+ -+ if (!PyArg_ParseTuple(args, "n|OO", &size, &arg1, &arg2)) { -+ return NULL; -+ } -+ if (arg1) { -+ NULLABLE(arg1); -+ if (arg2) { -+ NULLABLE(arg2); -+ return PyTuple_Pack(size, arg1, arg2); -+ } -+ return PyTuple_Pack(size, arg1); -+ } -+ return PyTuple_Pack(size); -+} -+ -+static PyObject * -+tuple_size(PyObject *Py_UNUSED(module), PyObject *obj) -+{ -+ NULLABLE(obj); -+ RETURN_SIZE(PyTuple_Size(obj)); -+} -+ -+static PyObject * -+tuple_getitem(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *obj; -+ Py_ssize_t i; -+ if (!PyArg_ParseTuple(args, "On", &obj, &i)) { -+ return NULL; -+ } -+ NULLABLE(obj); -+ return Py_XNewRef(PyTuple_GetItem(obj, i)); -+} -+ -+static PyObject * -+tuple_getslice(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *obj; -+ Py_ssize_t ilow, ihigh; -+ if (!PyArg_ParseTuple(args, "Onn", &obj, &ilow, &ihigh)) { -+ return NULL; -+ } -+ NULLABLE(obj); -+ return PyTuple_GetSlice(obj, ilow, ihigh); -+} -+ -+static PyObject * -+tuple_setitem(PyObject *Py_UNUSED(module), PyObject *args) -+{ -+ PyObject *obj, *value, *newtuple = NULL; -+ Py_ssize_t i; -+ if (!PyArg_ParseTuple(args, "OnO", &obj, &i, &value)) { -+ return NULL; -+ } -+ NULLABLE(value); -+ if (PyTuple_CheckExact(obj)) { -+ Py_ssize_t size = PyTuple_Size(obj); -+ newtuple = PyTuple_New(size); -+ if (!newtuple) { -+ return NULL; -+ } -+ for (Py_ssize_t n = 0; n < size; n++) { -+ if (PyTuple_SetItem(newtuple, n, -+ Py_XNewRef(PyTuple_GetItem(obj, n))) == -1) { -+ Py_DECREF(newtuple); -+ return NULL; -+ } -+ } -+ -+ if (PyTuple_SetItem(newtuple, i, Py_XNewRef(value)) == -1) { -+ Py_DECREF(newtuple); -+ return NULL; -+ } -+ return newtuple; -+ } -+ else { -+ NULLABLE(obj); -+ -+ if (PyTuple_SetItem(obj, i, Py_XNewRef(value)) == -1) { -+ return NULL; -+ } -+ return Py_XNewRef(obj); -+ } -+} -+ -+ - static PyMethodDef test_methods[] = { -+ {"tuple_get_size", tuple_get_size, METH_O}, -+ {"tuple_get_item", tuple_get_item, METH_VARARGS}, -+ {"tuple_set_item", tuple_set_item, METH_VARARGS}, -+ {"_tuple_resize", _tuple_resize, METH_VARARGS}, -+ {"_check_tuple_item_is_NULL", _check_tuple_item_is_NULL, METH_VARARGS}, -+ /* Limited C API */ -+ {"tuple_check", tuple_check, METH_O}, -+ {"tuple_checkexact", tuple_checkexact, METH_O}, -+ {"tuple_new", tuple_new, METH_O}, -+ {"tuple_pack", tuple_pack, METH_VARARGS}, -+ {"tuple_size", tuple_size, METH_O}, -+ {"tuple_getitem", tuple_getitem, METH_VARARGS}, -+ {"tuple_getslice", tuple_getslice, METH_VARARGS}, -+ {"tuple_setitem", tuple_setitem, METH_VARARGS}, - {NULL}, - }; - - int - _PyTestCapi_Init_Tuple(PyObject *m) - { -- if (PyModule_AddFunctions(m, test_methods) < 0){ -+ if (PyModule_AddFunctions(m, test_methods) < 0) { - return -1; - } - -diff --git a/Modules/_testcapi/vectorcall.c b/Modules/_testcapi/vectorcall.c -index dcbc973c9fb..bf91d0b423e 100644 ---- a/Modules/_testcapi/vectorcall.c -+++ b/Modules/_testcapi/vectorcall.c -@@ -349,6 +349,9 @@ - MethodDescriptor2_new(PyTypeObject* type, PyObject* args, PyObject *kw) - { - MethodDescriptor2Object *op = PyObject_New(MethodDescriptor2Object, type); -+ if (op == NULL) { -+ return NULL; -+ } - op->base.vectorcall = NULL; - op->vectorcall = MethodDescriptor_vectorcall; - return (PyObject *)op; -diff --git a/Modules/_testclinic.c b/Modules/_testclinic.c -index 676535f5463..29cc106a429 100644 ---- a/Modules/_testclinic.c -+++ b/Modules/_testclinic.c -@@ -1034,6 +1034,25 @@ - } - - -+/*[clinic input] -+vararg_with_default2 -+ -+ a: object -+ *args: object -+ b: object = None -+ c: object = None -+ -+[clinic start generated code]*/ -+ -+static PyObject * -+vararg_with_default2_impl(PyObject *module, PyObject *a, PyObject *args, -+ PyObject *b, PyObject *c) -+/*[clinic end generated code: output=a0fb7c37796e2129 input=59fb22f5f0a8925f]*/ -+{ -+ return pack_arguments_newref(4, a, args, b, c); -+} -+ -+ - /*[clinic input] - vararg_with_only_defaults - -@@ -1274,6 +1293,7 @@ - VARARG_AND_POSONLY_METHODDEF - VARARG_METHODDEF - VARARG_WITH_DEFAULT_METHODDEF -+ VARARG_WITH_DEFAULT2_METHODDEF - VARARG_WITH_ONLY_DEFAULTS_METHODDEF - GH_32092_OOB_METHODDEF - GH_32092_KW_PASS_METHODDEF -diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c -index 8dca940b3f1..6b5fcb8a365 100644 ---- a/Modules/_tkinter.c -+++ b/Modules/_tkinter.c -@@ -71,6 +71,12 @@ - #define USE_DEPRECATED_TOMMATH_API 1 - #endif - -+// As suggested by https://core.tcl-lang.org/tcl/wiki?name=Migrating+C+extensions+to+Tcl+9 -+#ifndef TCL_SIZE_MAX -+typedef int Tcl_Size; -+#define TCL_SIZE_MAX INT_MAX -+#endif -+ - #if !(defined(MS_WINDOWS) || defined(__CYGWIN__)) - #define HAVE_CREATEFILEHANDLER - #endif -@@ -491,24 +497,28 @@ - } - - static PyObject * --unicodeFromTclObj(Tcl_Obj *value) -+unicodeFromTclObj(TkappObject *tkapp, Tcl_Obj *value) - { -- int len; -+ Tcl_Size len; - #if USE_TCL_UNICODE -- int byteorder = NATIVE_BYTEORDER; -- const Tcl_UniChar *u = Tcl_GetUnicodeFromObj(value, &len); -- if (sizeof(Tcl_UniChar) == 2) -- return PyUnicode_DecodeUTF16((const char *)u, len * 2, -- "surrogatepass", &byteorder); -- else if (sizeof(Tcl_UniChar) == 4) -- return PyUnicode_DecodeUTF32((const char *)u, len * 4, -- "surrogatepass", &byteorder); -- else -- Py_UNREACHABLE(); --#else -+ if (value->typePtr != NULL && tkapp != NULL && -+ (value->typePtr == tkapp->StringType || -+ value->typePtr == tkapp->UTF32StringType)) -+ { -+ int byteorder = NATIVE_BYTEORDER; -+ const Tcl_UniChar *u = Tcl_GetUnicodeFromObj(value, &len); -+ if (sizeof(Tcl_UniChar) == 2) -+ return PyUnicode_DecodeUTF16((const char *)u, len * 2, -+ "surrogatepass", &byteorder); -+ else if (sizeof(Tcl_UniChar) == 4) -+ return PyUnicode_DecodeUTF32((const char *)u, len * 4, -+ "surrogatepass", &byteorder); -+ else -+ Py_UNREACHABLE(); -+ } -+#endif /* USE_TCL_UNICODE */ - const char *s = Tcl_GetStringFromObj(value, &len); - return unicodeFromTclStringAndSize(s, len); --#endif - } - - /*[clinic input] -@@ -521,6 +531,10 @@ - - /**** Tkapp Object ****/ - -+#if TK_MAJOR_VERSION >= 9 -+int Tcl_AppInit(Tcl_Interp *); -+#endif -+ - #ifndef WITH_APPINIT - int - Tcl_AppInit(Tcl_Interp *interp) -@@ -786,7 +800,7 @@ - PyTclObject_string(PyTclObject *self, void *ignored) - { - if (!self->string) { -- self->string = unicodeFromTclObj(self->value); -+ self->string = unicodeFromTclObj(NULL, self->value); - if (!self->string) - return NULL; - } -@@ -800,7 +814,7 @@ - return Py_NewRef(self->string); - } - /* XXX Could cache result if it is non-ASCII. */ -- return unicodeFromTclObj(self->value); -+ return unicodeFromTclObj(NULL, self->value); - } - - static PyObject * -@@ -1010,7 +1024,9 @@ - PyErr_SetString(PyExc_OverflowError, "string is too long"); - return NULL; - } -- if (PyUnicode_IS_ASCII(value)) { -+ if (PyUnicode_IS_ASCII(value) && -+ strlen(PyUnicode_DATA(value)) == (size_t)PyUnicode_GET_LENGTH(value)) -+ { - return Tcl_NewStringObj((const char *)PyUnicode_DATA(value), - (int)size); - } -@@ -1025,9 +1041,6 @@ - "surrogatepass", NATIVE_BYTEORDER); - else - Py_UNREACHABLE(); --#else -- encoded = _PyUnicode_AsUTF8String(value, "surrogateescape"); --#endif - if (!encoded) { - return NULL; - } -@@ -1037,12 +1050,39 @@ - PyErr_SetString(PyExc_OverflowError, "string is too long"); - return NULL; - } --#if USE_TCL_UNICODE - result = Tcl_NewUnicodeObj((const Tcl_UniChar *)PyBytes_AS_STRING(encoded), - (int)(size / sizeof(Tcl_UniChar))); - #else -+ encoded = _PyUnicode_AsUTF8String(value, "surrogateescape"); -+ if (!encoded) { -+ return NULL; -+ } -+ size = PyBytes_GET_SIZE(encoded); -+ if (strlen(PyBytes_AS_STRING(encoded)) != (size_t)size) { -+ /* The string contains embedded null characters. -+ * Tcl needs a null character to be represented as \xc0\x80 in -+ * the Modified UTF-8 encoding. Otherwise the string can be -+ * truncated in some internal operations. -+ * -+ * NOTE: stringlib_replace() could be used here, but optimizing -+ * this obscure case isn't worth it unless stringlib_replace() -+ * was already exposed in the C API for other reasons. */ -+ Py_SETREF(encoded, -+ PyObject_CallMethod(encoded, "replace", "y#y#", -+ "\0", (Py_ssize_t)1, -+ "\xc0\x80", (Py_ssize_t)2)); -+ if (!encoded) { -+ return NULL; -+ } -+ size = PyBytes_GET_SIZE(encoded); -+ } -+ if (size > INT_MAX) { -+ Py_DECREF(encoded); -+ PyErr_SetString(PyExc_OverflowError, "string is too long"); -+ return NULL; -+ } - result = Tcl_NewStringObj(PyBytes_AS_STRING(encoded), (int)size); --#endif -+#endif /* USE_TCL_UNICODE */ - Py_DECREF(encoded); - return result; - } -@@ -1139,7 +1179,7 @@ - Tcl_Interp *interp = Tkapp_Interp(tkapp); - - if (value->typePtr == NULL) { -- return unicodeFromTclObj(value); -+ return unicodeFromTclObj(tkapp, value); - } - - if (value->typePtr == tkapp->BooleanType || -@@ -1148,7 +1188,7 @@ - } - - if (value->typePtr == tkapp->ByteArrayType) { -- int size; -+ Tcl_Size size; - char *data = (char*)Tcl_GetByteArrayFromObj(value, &size); - return PyBytes_FromStringAndSize(data, size); - } -@@ -1174,8 +1214,8 @@ - } - - if (value->typePtr == tkapp->ListType) { -- int size; -- int i, status; -+ Tcl_Size i, size; -+ int status; - PyObject *elem; - Tcl_Obj *tcl_elem; - -@@ -1204,7 +1244,7 @@ - if (value->typePtr == tkapp->StringType || - value->typePtr == tkapp->UTF32StringType) - { -- return unicodeFromTclObj(value); -+ return unicodeFromTclObj(tkapp, value); - } - - if (tkapp->BignumType == NULL && -@@ -1231,9 +1271,9 @@ - } Tkapp_CallEvent; - - static void --Tkapp_CallDeallocArgs(Tcl_Obj** objv, Tcl_Obj** objStore, int objc) -+Tkapp_CallDeallocArgs(Tcl_Obj** objv, Tcl_Obj** objStore, Tcl_Size objc) - { -- int i; -+ Tcl_Size i; - for (i = 0; i < objc; i++) - Tcl_DecrRefCount(objv[i]); - if (objv != objStore) -@@ -1244,7 +1284,7 @@ - interpreter thread, which may or may not be the calling thread. */ - - static Tcl_Obj** --Tkapp_CallArgs(PyObject *args, Tcl_Obj** objStore, int *pobjc) -+Tkapp_CallArgs(PyObject *args, Tcl_Obj** objStore, Tcl_Size *pobjc) - { - Tcl_Obj **objv = objStore; - Py_ssize_t objc = 0, i; -@@ -1292,10 +1332,10 @@ - Tcl_IncrRefCount(objv[i]); - } - } -- *pobjc = (int)objc; -+ *pobjc = (Tcl_Size)objc; - return objv; - finally: -- Tkapp_CallDeallocArgs(objv, objStore, (int)objc); -+ Tkapp_CallDeallocArgs(objv, objStore, (Tcl_Size)objc); - return NULL; - } - -@@ -1304,7 +1344,7 @@ - static PyObject * - Tkapp_UnicodeResult(TkappObject *self) - { -- return unicodeFromTclObj(Tcl_GetObjResult(self->interp)); -+ return unicodeFromTclObj(self, Tcl_GetObjResult(self->interp)); - } - - -@@ -1323,7 +1363,7 @@ - res = FromObj(self, value); - Tcl_DecrRefCount(value); - } else { -- res = unicodeFromTclObj(value); -+ res = unicodeFromTclObj(self, value); - } - return res; - } -@@ -1361,7 +1401,7 @@ - { - Tcl_Obj *objStore[ARGSZ]; - Tcl_Obj **objv; -- int objc; -+ Tcl_Size objc; - int i; - ENTER_PYTHON - if (e->self->trace && !Tkapp_Trace(e->self, PyTuple_Pack(1, e->args))) { -@@ -1417,7 +1457,7 @@ - { - Tcl_Obj *objStore[ARGSZ]; - Tcl_Obj **objv = NULL; -- int objc, i; -+ Tcl_Size objc; - PyObject *res = NULL; - TkappObject *self = (TkappObject*)selfptr; - int flags = TCL_EVAL_DIRECT | TCL_EVAL_GLOBAL; -@@ -1464,6 +1504,7 @@ - { - TRACE(self, ("(O)", args)); - -+ int i; - objv = Tkapp_CallArgs(args, objStore, &objc); - if (!objv) - return NULL; -@@ -1852,7 +1893,7 @@ - res = FromObj(self, tres); - } - else { -- res = unicodeFromTclObj(tres); -+ res = unicodeFromTclObj(self, tres); - } - } - LEAVE_OVERLAP_TCL -@@ -2197,13 +2238,12 @@ - /*[clinic end generated code: output=13b51d34386d36fb input=2b2e13351e3c0b53]*/ - { - char *list; -- int argc; -+ Tcl_Size argc, i; - const char **argv; - PyObject *v; -- int i; - - if (PyTclObject_Check(arg)) { -- int objc; -+ Tcl_Size objc; - Tcl_Obj **objv; - if (Tcl_ListObjGetElements(Tkapp_Interp(self), - ((PyTclObject*)arg)->value, -@@ -2298,7 +2338,7 @@ - return PythonCmd_Error(interp); - - for (i = 0; i < (objc - 1); i++) { -- PyObject *s = unicodeFromTclObj(objv[i + 1]); -+ PyObject *s = unicodeFromTclObj((TkappObject *)data->self, objv[i + 1]); - if (!s) { - Py_DECREF(args); - return PythonCmd_Error(interp); -diff --git a/Modules/_winapi.c b/Modules/_winapi.c -index edb1181809c..76f18c71a07 100644 ---- a/Modules/_winapi.c -+++ b/Modules/_winapi.c -@@ -2268,7 +2268,7 @@ - } - - err = RegOpenKeyExW(hkcr, ext, 0, KEY_READ, &subkey); -- if (err == ERROR_FILE_NOT_FOUND) { -+ if (err == ERROR_FILE_NOT_FOUND || err == ERROR_ACCESS_DENIED) { - err = ERROR_SUCCESS; - continue; - } else if (err != ERROR_SUCCESS) { -diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c -index 8fc86162410..47e40125cf8 100644 ---- a/Modules/_zoneinfo.c -+++ b/Modules/_zoneinfo.c -@@ -954,6 +954,7 @@ - static int - load_data(zoneinfo_state *state, PyZoneInfo_ZoneInfo *self, PyObject *file_obj) - { -+ int rv = 0; - PyObject *data_tuple = NULL; - - long *utcoff = NULL; -@@ -1230,7 +1231,6 @@ - } - } - -- int rv = 0; - goto cleanup; - error: - // These resources only need to be freed if we have failed, if we succeed -diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c -index 19ee83d24c8..d24c5989af3 100644 ---- a/Modules/arraymodule.c -+++ b/Modules/arraymodule.c -@@ -2739,7 +2739,7 @@ - - PyDoc_STRVAR(module_doc, - "This module defines an object type which can efficiently represent\n\ --an array of basic values: characters, integers, floating point\n\ -+an array of basic values: characters, integers, floating-point\n\ - numbers. Arrays are sequence types and behave very much like lists,\n\ - except that the type of objects stored in them is constrained.\n"); - -@@ -2767,8 +2767,8 @@ - 'L' unsigned integer 4\n\ - 'q' signed integer 8 (see note)\n\ - 'Q' unsigned integer 8 (see note)\n\ -- 'f' floating point 4\n\ -- 'd' floating point 8\n\ -+ 'f' floating-point 4\n\ -+ 'd' floating-point 8\n\ - \n\ - NOTE: The 'u' typecode corresponds to Python's unicode character. On\n\ - narrow builds this is 2-bytes on wide builds this is 4-bytes.\n\ -diff --git a/Modules/clinic/_testclinic.c.h b/Modules/clinic/_testclinic.c.h -index 3fce2624591..a9bcbf753d2 100644 ---- a/Modules/clinic/_testclinic.c.h -+++ b/Modules/clinic/_testclinic.c.h -@@ -2547,6 +2547,78 @@ - return return_value; - } - -+PyDoc_STRVAR(vararg_with_default2__doc__, -+"vararg_with_default2($module, /, a, *args, b=None, c=None)\n" -+"--\n" -+"\n"); -+ -+#define VARARG_WITH_DEFAULT2_METHODDEF \ -+ {"vararg_with_default2", _PyCFunction_CAST(vararg_with_default2), METH_FASTCALL|METH_KEYWORDS, vararg_with_default2__doc__}, -+ -+static PyObject * -+vararg_with_default2_impl(PyObject *module, PyObject *a, PyObject *args, -+ PyObject *b, PyObject *c); -+ -+static PyObject * -+vararg_with_default2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) -+{ -+ PyObject *return_value = NULL; -+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) -+ -+ #define NUM_KEYWORDS 3 -+ static struct { -+ PyGC_Head _this_is_not_used; -+ PyObject_VAR_HEAD -+ PyObject *ob_item[NUM_KEYWORDS]; -+ } _kwtuple = { -+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) -+ .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), }, -+ }; -+ #undef NUM_KEYWORDS -+ #define KWTUPLE (&_kwtuple.ob_base.ob_base) -+ -+ #else // !Py_BUILD_CORE -+ # define KWTUPLE NULL -+ #endif // !Py_BUILD_CORE -+ -+ static const char * const _keywords[] = {"a", "b", "c", NULL}; -+ static _PyArg_Parser _parser = { -+ .keywords = _keywords, -+ .fname = "vararg_with_default2", -+ .kwtuple = KWTUPLE, -+ }; -+ #undef KWTUPLE -+ PyObject *argsbuf[4]; -+ Py_ssize_t noptargs = Py_MIN(nargs, 1) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; -+ PyObject *a; -+ PyObject *__clinic_args = NULL; -+ PyObject *b = Py_None; -+ PyObject *c = Py_None; -+ -+ args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, 1, argsbuf); -+ if (!args) { -+ goto exit; -+ } -+ a = args[0]; -+ __clinic_args = args[1]; -+ if (!noptargs) { -+ goto skip_optional_kwonly; -+ } -+ if (args[2]) { -+ b = args[2]; -+ if (!--noptargs) { -+ goto skip_optional_kwonly; -+ } -+ } -+ c = args[3]; -+skip_optional_kwonly: -+ return_value = vararg_with_default2_impl(module, a, __clinic_args, b, c); -+ -+exit: -+ Py_XDECREF(__clinic_args); -+ return return_value; -+} -+ - PyDoc_STRVAR(vararg_with_only_defaults__doc__, - "vararg_with_only_defaults($module, /, *args, b=None)\n" - "--\n" -@@ -3097,4 +3169,4 @@ - } - return _testclinic_TestClass_meth_method_no_params_impl(self, cls); - } --/*[clinic end generated code: output=999de26ba394ab5d input=a9049054013a1b77]*/ -+/*[clinic end generated code: output=d1fcf6ab8867f4ad input=a9049054013a1b77]*/ -diff --git a/Modules/clinic/mathmodule.c.h b/Modules/clinic/mathmodule.c.h -index c16c1b08398..a2754639d92 100644 ---- a/Modules/clinic/mathmodule.c.h -+++ b/Modules/clinic/mathmodule.c.h -@@ -34,9 +34,9 @@ - "fsum($module, seq, /)\n" - "--\n" - "\n" --"Return an accurate floating point sum of values in the iterable seq.\n" -+"Return an accurate floating-point sum of values in the iterable seq.\n" - "\n" --"Assumes IEEE-754 floating point arithmetic."); -+"Assumes IEEE-754 floating-point arithmetic."); - - #define MATH_FSUM_METHODDEF \ - {"fsum", (PyCFunction)math_fsum, METH_O, math_fsum__doc__}, -@@ -549,7 +549,7 @@ - "isclose($module, /, a, b, *, rel_tol=1e-09, abs_tol=0.0)\n" - "--\n" - "\n" --"Determine whether two floating point numbers are close in value.\n" -+"Determine whether two floating-point numbers are close in value.\n" - "\n" - " rel_tol\n" - " maximum difference for being considered \"close\", relative to the\n" -@@ -950,4 +950,4 @@ - exit: - return return_value; - } --/*[clinic end generated code: output=91a0357265a2a553 input=a9049054013a1b77]*/ -+/*[clinic end generated code: output=bd6c271030b9698b input=a9049054013a1b77]*/ -diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h -index 02cb95a6c5e..066c2f8de14 100644 ---- a/Modules/clinic/posixmodule.c.h -+++ b/Modules/clinic/posixmodule.c.h -@@ -5985,7 +5985,7 @@ - "\n" - "The object returned behaves like a named tuple with these fields:\n" - " (utime, stime, cutime, cstime, elapsed_time)\n" --"All fields are floating point numbers."); -+"All fields are floating-point numbers."); - - #define OS_TIMES_METHODDEF \ - {"times", (PyCFunction)os_times, METH_NOARGS, os_times__doc__}, -@@ -12002,4 +12002,4 @@ - #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF - #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF - #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ --/*[clinic end generated code: output=e2cf3ab750346780 input=a9049054013a1b77]*/ -+/*[clinic end generated code: output=6f0c08f692891c72 input=a9049054013a1b77]*/ -diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h -index f44ca1d70a1..086fab5ba84 100644 ---- a/Modules/clinic/selectmodule.c.h -+++ b/Modules/clinic/selectmodule.c.h -@@ -24,7 +24,7 @@ - "gotten from a fileno() method call on one of those.\n" - "\n" - "The optional 4th argument specifies a timeout in seconds; it may be\n" --"a floating point number to specify fractions of seconds. If it is absent\n" -+"a floating-point number to specify fractions of seconds. If it is absent\n" - "or None, the call will never time out.\n" - "\n" - "The return value is a tuple of three lists corresponding to the first three\n" -@@ -1309,4 +1309,4 @@ - #ifndef SELECT_KQUEUE_CONTROL_METHODDEF - #define SELECT_KQUEUE_CONTROL_METHODDEF - #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ --/*[clinic end generated code: output=64516114287e894d input=a9049054013a1b77]*/ -+/*[clinic end generated code: output=4d031b2402ee40e7 input=a9049054013a1b77]*/ -diff --git a/Modules/clinic/signalmodule.c.h b/Modules/clinic/signalmodule.c.h -index 3b3c6ba150a..7206298cc60 100644 ---- a/Modules/clinic/signalmodule.c.h -+++ b/Modules/clinic/signalmodule.c.h -@@ -526,7 +526,7 @@ - "\n" - "Like sigwaitinfo(), but with a timeout.\n" - "\n" --"The timeout is specified in seconds, with floating point numbers allowed."); -+"The timeout is specified in seconds, with floating-point numbers allowed."); - - #define SIGNAL_SIGTIMEDWAIT_METHODDEF \ - {"sigtimedwait", _PyCFunction_CAST(signal_sigtimedwait), METH_FASTCALL, signal_sigtimedwait__doc__}, -@@ -705,4 +705,4 @@ - #ifndef SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF - #define SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF - #endif /* !defined(SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF) */ --/*[clinic end generated code: output=2b54dc607f6e3146 input=a9049054013a1b77]*/ -+/*[clinic end generated code: output=29cc8fb029d04c97 input=a9049054013a1b77]*/ -diff --git a/Modules/expat/expat.h b/Modules/expat/expat.h -index c2770be3897..d0d6015a662 100644 ---- a/Modules/expat/expat.h -+++ b/Modules/expat/expat.h -@@ -1066,7 +1066,7 @@ - */ - #define XML_MAJOR_VERSION 2 - #define XML_MINOR_VERSION 6 --#define XML_MICRO_VERSION 2 -+#define XML_MICRO_VERSION 3 - - #ifdef __cplusplus - } -diff --git a/Modules/expat/siphash.h b/Modules/expat/siphash.h -index a1ed99e687b..04f6f74585b 100644 ---- a/Modules/expat/siphash.h -+++ b/Modules/expat/siphash.h -@@ -126,8 +126,7 @@ - | ((uint64_t)((p)[4]) << 32) | ((uint64_t)((p)[5]) << 40) \ - | ((uint64_t)((p)[6]) << 48) | ((uint64_t)((p)[7]) << 56)) - --#define SIPHASH_INITIALIZER \ -- { 0, 0, 0, 0, {0}, 0, 0 } -+#define SIPHASH_INITIALIZER {0, 0, 0, 0, {0}, 0, 0} - - struct siphash { - uint64_t v0, v1, v2, v3; -diff --git a/Modules/expat/xmlparse.c b/Modules/expat/xmlparse.c -index 2951fec70c5..d9285b213b3 100644 ---- a/Modules/expat/xmlparse.c -+++ b/Modules/expat/xmlparse.c -@@ -1,4 +1,4 @@ --/* 2a14271ad4d35e82bde8ba210b4edb7998794bcbae54deab114046a300f9639a (2.6.2+) -+/* ba4cdf9bdb534f355a9def4c9e25d20ee8e72f95b0a4d930be52e563f5080196 (2.6.3+) - __ __ _ - ___\ \/ /_ __ __ _| |_ - / _ \\ /| '_ \ / _` | __| -@@ -39,6 +39,7 @@ - Copyright (c) 2022 Sean McBride - Copyright (c) 2023 Owain Davies - Copyright (c) 2023-2024 Sony Corporation / Snild Dolkow -+ Copyright (c) 2024 Berkay Eren Ürün - Licensed under the MIT license: - - Permission is hereby granted, free of charge, to any person obtaining -@@ -294,7 +295,7 @@ - The name of the element is stored in both the document and API - encodings. The memory buffer 'buf' is a separately-allocated - memory area which stores the name. During the XML_Parse()/ -- XMLParseBuffer() when the element is open, the memory for the 'raw' -+ XML_ParseBuffer() when the element is open, the memory for the 'raw' - version of the name (in the document encoding) is shared with the - document buffer. If the element is open across calls to - XML_Parse()/XML_ParseBuffer(), the buffer is re-allocated to -@@ -2038,6 +2039,12 @@ - - if (parser == NULL) - return XML_STATUS_ERROR; -+ -+ if (len < 0) { -+ parser->m_errorCode = XML_ERROR_INVALID_ARGUMENT; -+ return XML_STATUS_ERROR; -+ } -+ - switch (parser->m_parsingStatus.parsing) { - case XML_SUSPENDED: - parser->m_errorCode = XML_ERROR_SUSPENDED; -@@ -5846,18 +5853,17 @@ - /* Set a safe default value in case 'next' does not get set */ - next = textStart; - --#ifdef XML_DTD - if (entity->is_param) { - int tok - = XmlPrologTok(parser->m_internalEncoding, textStart, textEnd, &next); - result = doProlog(parser, parser->m_internalEncoding, textStart, textEnd, - tok, next, &next, XML_FALSE, XML_FALSE, - XML_ACCOUNT_ENTITY_EXPANSION); -- } else --#endif /* XML_DTD */ -+ } else { - result = doContent(parser, parser->m_tagLevel, parser->m_internalEncoding, - textStart, textEnd, &next, XML_FALSE, - XML_ACCOUNT_ENTITY_EXPANSION); -+ } - - if (result == XML_ERROR_NONE) { - if (textEnd != next && parser->m_parsingStatus.parsing == XML_SUSPENDED) { -@@ -5894,18 +5900,17 @@ - /* Set a safe default value in case 'next' does not get set */ - next = textStart; - --#ifdef XML_DTD - if (entity->is_param) { - int tok - = XmlPrologTok(parser->m_internalEncoding, textStart, textEnd, &next); - result = doProlog(parser, parser->m_internalEncoding, textStart, textEnd, - tok, next, &next, XML_FALSE, XML_TRUE, - XML_ACCOUNT_ENTITY_EXPANSION); -- } else --#endif /* XML_DTD */ -+ } else { - result = doContent(parser, openEntity->startTagLevel, - parser->m_internalEncoding, textStart, textEnd, &next, - XML_FALSE, XML_ACCOUNT_ENTITY_EXPANSION); -+ } - - if (result != XML_ERROR_NONE) - return result; -@@ -5932,7 +5937,6 @@ - return XML_ERROR_NONE; - } - --#ifdef XML_DTD - if (entity->is_param) { - int tok; - parser->m_processor = prologProcessor; -@@ -5940,9 +5944,7 @@ - return doProlog(parser, parser->m_encoding, s, end, tok, next, nextPtr, - (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_TRUE, - XML_ACCOUNT_DIRECT); -- } else --#endif /* XML_DTD */ -- { -+ } else { - parser->m_processor = contentProcessor; - /* see externalEntityContentProcessor vs contentProcessor */ - result = doContent(parser, parser->m_parentParser ? 1 : 0, -@@ -7016,6 +7018,16 @@ - if (! newE) - return 0; - if (oldE->nDefaultAtts) { -+ /* Detect and prevent integer overflow. -+ * The preprocessor guard addresses the "always false" warning -+ * from -Wtype-limits on platforms where -+ * sizeof(int) < sizeof(size_t), e.g. on x86_64. */ -+#if UINT_MAX >= SIZE_MAX -+ if ((size_t)oldE->nDefaultAtts -+ > ((size_t)(-1) / sizeof(DEFAULT_ATTRIBUTE))) { -+ return 0; -+ } -+#endif - newE->defaultAtts - = ms->malloc_fcn(oldE->nDefaultAtts * sizeof(DEFAULT_ATTRIBUTE)); - if (! newE->defaultAtts) { -@@ -7558,6 +7570,15 @@ - int next; - - if (! dtd->scaffIndex) { -+ /* Detect and prevent integer overflow. -+ * The preprocessor guard addresses the "always false" warning -+ * from -Wtype-limits on platforms where -+ * sizeof(unsigned int) < sizeof(size_t), e.g. on x86_64. */ -+#if UINT_MAX >= SIZE_MAX -+ if (parser->m_groupSize > ((size_t)(-1) / sizeof(int))) { -+ return -1; -+ } -+#endif - dtd->scaffIndex = (int *)MALLOC(parser, parser->m_groupSize * sizeof(int)); - if (! dtd->scaffIndex) - return -1; -diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c -index da26b7bcf4f..96cc6d8ca11 100644 ---- a/Modules/faulthandler.c -+++ b/Modules/faulthandler.c -@@ -70,7 +70,7 @@ - #ifdef SIGILL - {SIGILL, 0, "Illegal instruction", }, - #endif -- {SIGFPE, 0, "Floating point exception", }, -+ {SIGFPE, 0, "Floating-point exception", }, - {SIGABRT, 0, "Aborted", }, - /* define SIGSEGV at the end to make it the default choice if searching the - handler fails in faulthandler_fatal_error() */ -diff --git a/Modules/getpath.c b/Modules/getpath.c -index 0a310000751..83a2bc469ae 100644 ---- a/Modules/getpath.c -+++ b/Modules/getpath.c -@@ -15,6 +15,7 @@ - #endif - - #ifdef __APPLE__ -+# include "TargetConditionals.h" - # include - #endif - -@@ -759,7 +760,7 @@ - return winmodule_to_dict(dict, key, PyWin_DLLhModule); - } - #endif --#elif defined(WITH_NEXT_FRAMEWORK) -+#elif defined(WITH_NEXT_FRAMEWORK) && !defined(TARGET_OS_IPHONE) - static char modPath[MAXPATHLEN + 1]; - static int modPathInitialized = -1; - if (modPathInitialized < 0) { -@@ -953,4 +954,3 @@ - - return _PyStatus_OK(); - } -- -diff --git a/Modules/main.c b/Modules/main.c -index 1b189b45616..b602272b78b 100644 ---- a/Modules/main.c -+++ b/Modules/main.c -@@ -540,6 +540,10 @@ - return; - } - -+ if (PySys_Audit("cpython.run_stdin", NULL) < 0) { -+ return; -+ } -+ - PyCompilerFlags cf = _PyCompilerFlags_INIT; - int res = PyRun_AnyFileFlags(stdin, "", &cf); - *exitcode = (res != 0); -diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c -index bbd6bd010e1..000803981ed 100644 ---- a/Modules/mathmodule.c -+++ b/Modules/mathmodule.c -@@ -106,7 +106,7 @@ - static DoubleLength - dl_fast_sum(double a, double b) - { -- /* Algorithm 1.1. Compensated summation of two floating point numbers. */ -+ /* Algorithm 1.1. Compensated summation of two floating-point numbers. */ - assert(fabs(a) >= fabs(b)); - double x = a + b; - double y = (a - x) + b; -@@ -1347,14 +1347,14 @@ - seq: object - / - --Return an accurate floating point sum of values in the iterable seq. -+Return an accurate floating-point sum of values in the iterable seq. - --Assumes IEEE-754 floating point arithmetic. -+Assumes IEEE-754 floating-point arithmetic. - [clinic start generated code]*/ - - static PyObject * - math_fsum(PyObject *module, PyObject *seq) --/*[clinic end generated code: output=ba5c672b87fe34fc input=c51b7d8caf6f6e82]*/ -+/*[clinic end generated code: output=ba5c672b87fe34fc input=4506244ded6057dc]*/ - { - PyObject *item, *iter, *sum = NULL; - Py_ssize_t i, j, n = 0, m = NUM_PARTIALS; -@@ -2411,7 +2411,7 @@ - To minimize loss of information during the accumulation of fractional - values, each term has a separate accumulator. This also breaks up - sequential dependencies in the inner loop so the CPU can maximize --floating point throughput. [4] On an Apple M1 Max, hypot(*vec) -+floating-point throughput. [4] On an Apple M1 Max, hypot(*vec) - takes only 3.33 µsec when len(vec) == 1000. - - The square root differential correction is needed because a -@@ -3093,7 +3093,7 @@ - maximum difference for being considered "close", regardless of the - magnitude of the input values - --Determine whether two floating point numbers are close in value. -+Determine whether two floating-point numbers are close in value. - - Return True if a is close in value to b, and False otherwise. - -@@ -3108,7 +3108,7 @@ - static int - math_isclose_impl(PyObject *module, double a, double b, double rel_tol, - double abs_tol) --/*[clinic end generated code: output=b73070207511952d input=f28671871ea5bfba]*/ -+/*[clinic end generated code: output=b73070207511952d input=12d41764468bfdb8]*/ - { - double diff = 0.0; - -diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c -index 9cc53c86f6e..2277caee58f 100644 ---- a/Modules/posixmodule.c -+++ b/Modules/posixmodule.c -@@ -7579,6 +7579,7 @@ - } - #endif /* HAVE_FORK */ - -+#if defined(HAVE_FORK1) || defined(HAVE_FORKPTY) || defined(HAVE_FORK) - // Common code to raise a warning if we detect there is more than one thread - // running in the process. Best effort, silent if unable to count threads. - // Constraint: Quick. Never overcounts. Never leaves an error set. -@@ -7677,6 +7678,7 @@ - PyErr_Clear(); - } - } -+#endif // HAVE_FORK1 || HAVE_FORKPTY || HAVE_FORK - - #ifdef HAVE_FORK1 - /*[clinic input] -@@ -10054,12 +10056,12 @@ - - The object returned behaves like a named tuple with these fields: - (utime, stime, cutime, cstime, elapsed_time) --All fields are floating point numbers. -+All fields are floating-point numbers. - [clinic start generated code]*/ - - static PyObject * - os_times_impl(PyObject *module) --/*[clinic end generated code: output=35f640503557d32a input=2bf9df3d6ab2e48b]*/ -+/*[clinic end generated code: output=35f640503557d32a input=8dbfe33a2dcc3df3]*/ - #ifdef MS_WINDOWS - { - FILETIME create, exit, kernel, user; -@@ -11783,6 +11785,7 @@ - #endif /* defined(HAVE_MKNOD) && defined(HAVE_MAKEDEV) */ - - -+#ifdef HAVE_DEVICE_MACROS - static PyObject * - major_minor_conv(unsigned int value) - { -@@ -11805,7 +11808,6 @@ - return (dev_t)(unsigned int)value == value; - } - --#ifdef HAVE_DEVICE_MACROS - /*[clinic input] - os.major - -diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c -index 97f1db20f67..50788e5344c 100644 ---- a/Modules/selectmodule.c -+++ b/Modules/selectmodule.c -@@ -263,7 +263,7 @@ - gotten from a fileno() method call on one of those. - - The optional 4th argument specifies a timeout in seconds; it may be --a floating point number to specify fractions of seconds. If it is absent -+a floating-point number to specify fractions of seconds. If it is absent - or None, the call will never time out. - - The return value is a tuple of three lists corresponding to the first three -@@ -278,7 +278,7 @@ - static PyObject * - select_select_impl(PyObject *module, PyObject *rlist, PyObject *wlist, - PyObject *xlist, PyObject *timeout_obj) --/*[clinic end generated code: output=2b3cfa824f7ae4cf input=e467f5d68033de00]*/ -+/*[clinic end generated code: output=2b3cfa824f7ae4cf input=1199d5e101abca4a]*/ - { - #ifdef SELECT_USES_HEAP - pylist *rfd2obj, *wfd2obj, *efd2obj; -diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c -index 00ea4343735..4f4e6a39683 100644 ---- a/Modules/signalmodule.c -+++ b/Modules/signalmodule.c -@@ -637,7 +637,7 @@ - res = "Aborted"; - break; - case SIGFPE: -- res = "Floating point exception"; -+ res = "Floating-point exception"; - break; - case SIGSEGV: - res = "Segmentation fault"; -@@ -1199,13 +1199,13 @@ - - Like sigwaitinfo(), but with a timeout. - --The timeout is specified in seconds, with floating point numbers allowed. -+The timeout is specified in seconds, with floating-point numbers allowed. - [clinic start generated code]*/ - - static PyObject * - signal_sigtimedwait_impl(PyObject *module, sigset_t sigset, - PyObject *timeout_obj) --/*[clinic end generated code: output=59c8971e8ae18a64 input=87fd39237cf0b7ba]*/ -+/*[clinic end generated code: output=59c8971e8ae18a64 input=955773219c1596cd]*/ - { - _PyTime_t timeout; - if (_PyTime_FromSecondsObject(&timeout, -diff --git a/Modules/timemodule.c b/Modules/timemodule.c -index 9038c372815..8613fccfe02 100644 ---- a/Modules/timemodule.c -+++ b/Modules/timemodule.c -@@ -159,7 +159,7 @@ - - - PyDoc_STRVAR(time_doc, --"time() -> floating point number\n\ -+"time() -> floating-point number\n\ - \n\ - Return the current time in seconds since the Epoch.\n\ - Fractions of a second may be present if the system clock provides them."); -@@ -373,7 +373,7 @@ - } - - PyDoc_STRVAR(clock_getres_doc, --"clock_getres(clk_id) -> floating point number\n\ -+"clock_getres(clk_id) -> floating-point number\n\ - \n\ - Return the resolution (precision) of the specified clock clk_id."); - -@@ -432,7 +432,7 @@ - "sleep(seconds)\n\ - \n\ - Delay execution for a given number of seconds. The argument may be\n\ --a floating point number for subsecond precision."); -+a floating-point number for subsecond precision."); - - static PyStructSequence_Field struct_time_type_fields[] = { - {"tm_year", "year, for example, 1993"}, -@@ -1123,7 +1123,7 @@ - } - - PyDoc_STRVAR(mktime_doc, --"mktime(tuple) -> floating point number\n\ -+"mktime(tuple) -> floating-point number\n\ - \n\ - Convert a time tuple in local time to seconds since the Epoch.\n\ - Note that mktime(gmtime(0)) will not generally return zero for most\n\ -@@ -1913,7 +1913,7 @@ - \n\ - There are two standard representations of time. One is the number\n\ - of seconds since the Epoch, in UTC (a.k.a. GMT). It may be an integer\n\ --or a floating point number (to represent fractions of seconds).\n\ -+or a floating-point number (to represent fractions of seconds).\n\ - The epoch is the point where the time starts, the return value of time.gmtime(0).\n\ - It is January 1, 1970, 00:00:00 (UTC) on all platforms.\n\ - \n\ -diff --git a/Objects/boolobject.c b/Objects/boolobject.c -index f43e26f3f24..74e16dd2700 100644 ---- a/Objects/boolobject.c -+++ b/Objects/boolobject.c -@@ -71,8 +71,8 @@ - bool_invert(PyObject *v) - { - if (PyErr_WarnEx(PyExc_DeprecationWarning, -- "Bitwise inversion '~' on bool is deprecated. This " -- "returns the bitwise inversion of the underlying int " -+ "Bitwise inversion '~' on bool is deprecated and will be removed in " -+ "Python 3.16. This returns the bitwise inversion of the underlying int " - "object and is usually not what you expect from negating " - "a bool. Use the 'not' operator for boolean negation or " - "~int(x) if you really want the bitwise inversion of the " -diff --git a/Objects/clinic/floatobject.c.h b/Objects/clinic/floatobject.c.h -index a99fd74e4b6..ed166538af9 100644 ---- a/Objects/clinic/floatobject.c.h -+++ b/Objects/clinic/floatobject.c.h -@@ -201,7 +201,7 @@ - "float(x=0, /)\n" - "--\n" - "\n" --"Convert a string or number to a floating point number, if possible."); -+"Convert a string or number to a floating-point number, if possible."); - - static PyObject * - float_new_impl(PyTypeObject *type, PyObject *x); -@@ -260,7 +260,7 @@ - "It exists mainly to be used in Python\'s test suite.\n" - "\n" - "This function returns whichever of \'unknown\', \'IEEE, big-endian\' or \'IEEE,\n" --"little-endian\' best describes the format of floating point numbers used by the\n" -+"little-endian\' best describes the format of floating-point numbers used by the\n" - "C type named by typestr."); - - #define FLOAT___GETFORMAT___METHODDEF \ -@@ -325,4 +325,4 @@ - exit: - return return_value; - } --/*[clinic end generated code: output=ea329577074911b9 input=a9049054013a1b77]*/ -+/*[clinic end generated code: output=e6e3f5f833b37eba input=a9049054013a1b77]*/ -diff --git a/Objects/descrobject.c b/Objects/descrobject.c -index 18876fd2b88..a6c90e7ac13 100644 ---- a/Objects/descrobject.c -+++ b/Objects/descrobject.c -@@ -1788,22 +1788,9 @@ - /* if no docstring given and the getter has one, use that one */ - else if (fget != NULL) { - int rc = _PyObject_LookupAttr(fget, &_Py_ID(__doc__), &prop_doc); -- if (rc <= 0) { -+ if (rc < 0) { - return rc; - } -- if (!Py_IS_TYPE(self, &PyProperty_Type) && -- prop_doc != NULL && prop_doc != Py_None) { -- // This oddity preserves the long existing behavior of surfacing -- // an AttributeError when using a dict-less (__slots__) property -- // subclass as a decorator on a getter method with a docstring. -- // See PropertySubclassTest.test_slots_docstring_copy_exception. -- int err = PyObject_SetAttr( -- (PyObject *)self, &_Py_ID(__doc__), prop_doc); -- if (err < 0) { -- Py_DECREF(prop_doc); // release our new reference. -- return -1; -- } -- } - if (prop_doc == Py_None) { - prop_doc = NULL; - Py_DECREF(Py_None); -@@ -1831,7 +1818,9 @@ - Py_DECREF(prop_doc); - if (err < 0) { - assert(PyErr_Occurred()); -- if (PyErr_ExceptionMatches(PyExc_AttributeError)) { -+ if (!self->getter_doc && -+ PyErr_ExceptionMatches(PyExc_AttributeError)) -+ { - PyErr_Clear(); - // https://github.com/python/cpython/issues/98963#issuecomment-1574413319 - // Python silently dropped this doc assignment through 3.11. -diff --git a/Objects/dictobject.c b/Objects/dictobject.c -index 254cd9ad2f9..a99f32a9c84 100644 ---- a/Objects/dictobject.c -+++ b/Objects/dictobject.c -@@ -1251,10 +1251,6 @@ - MAINTAIN_TRACKING(mp, key, value); - - if (ix == DKIX_EMPTY) { -- uint64_t new_version = _PyDict_NotifyEvent( -- interp, PyDict_EVENT_ADDED, mp, key, value); -- /* Insert into new slot. */ -- mp->ma_keys->dk_version = 0; - assert(old_value == NULL); - if (mp->ma_keys->dk_usable <= 0) { - /* Need to resize. */ -@@ -1262,6 +1258,11 @@ - goto Fail; - } - -+ uint64_t new_version = _PyDict_NotifyEvent( -+ interp, PyDict_EVENT_ADDED, mp, key, value); -+ /* Insert into new slot. */ -+ mp->ma_keys->dk_version = 0; -+ - Py_ssize_t hashpos = find_empty_slot(mp->ma_keys, hash); - dictkeys_set_index(mp->ma_keys, hashpos, mp->ma_keys->dk_nentries); - -@@ -1335,9 +1336,6 @@ - { - assert(mp->ma_keys == Py_EMPTY_KEYS); - -- uint64_t new_version = _PyDict_NotifyEvent( -- interp, PyDict_EVENT_ADDED, mp, key, value); -- - int unicode = PyUnicode_CheckExact(key); - PyDictKeysObject *newkeys = new_keys_object( - interp, PyDict_LOG_MINSIZE, unicode); -@@ -1346,6 +1344,9 @@ - Py_DECREF(value); - return -1; - } -+ uint64_t new_version = _PyDict_NotifyEvent( -+ interp, PyDict_EVENT_ADDED, mp, key, value); -+ - /* We don't decref Py_EMPTY_KEYS here because it is immortal. */ - mp->ma_keys = newkeys; - mp->ma_values = NULL; -@@ -3324,15 +3325,15 @@ - return NULL; - - if (ix == DKIX_EMPTY) { -- uint64_t new_version = _PyDict_NotifyEvent( -- interp, PyDict_EVENT_ADDED, mp, key, defaultobj); -- mp->ma_keys->dk_version = 0; - value = defaultobj; - if (mp->ma_keys->dk_usable <= 0) { - if (insertion_resize(interp, mp, 1) < 0) { - return NULL; - } - } -+ uint64_t new_version = _PyDict_NotifyEvent( -+ interp, PyDict_EVENT_ADDED, mp, key, defaultobj); -+ mp->ma_keys->dk_version = 0; - Py_ssize_t hashpos = find_empty_slot(mp->ma_keys, hash); - dictkeys_set_index(mp->ma_keys, hashpos, mp->ma_keys->dk_nentries); - if (DK_IS_UNICODE(mp->ma_keys)) { -diff --git a/Objects/exceptions.c b/Objects/exceptions.c -index e3217c922ee..4f2153b1935 100644 ---- a/Objects/exceptions.c -+++ b/Objects/exceptions.c -@@ -3254,7 +3254,7 @@ - * FloatingPointError extends ArithmeticError - */ - SimpleExtendsException(PyExc_ArithmeticError, FloatingPointError, -- "Floating point operation failed."); -+ "Floating-point operation failed."); - - - /* -diff --git a/Objects/floatobject.c b/Objects/floatobject.c -index 7a882bfd88b..92d40e8acad 100644 ---- a/Objects/floatobject.c -+++ b/Objects/floatobject.c -@@ -1644,12 +1644,12 @@ - x: object(c_default="NULL") = 0 - / - --Convert a string or number to a floating point number, if possible. -+Convert a string or number to a floating-point number, if possible. - [clinic start generated code]*/ - - static PyObject * - float_new_impl(PyTypeObject *type, PyObject *x) --/*[clinic end generated code: output=ccf1e8dc460ba6ba input=f43661b7de03e9d8]*/ -+/*[clinic end generated code: output=ccf1e8dc460ba6ba input=55909f888aa0c8a6]*/ - { - if (type != &PyFloat_Type) { - if (x == NULL) { -@@ -1745,13 +1745,13 @@ - It exists mainly to be used in Python's test suite. - - This function returns whichever of 'unknown', 'IEEE, big-endian' or 'IEEE, --little-endian' best describes the format of floating point numbers used by the -+little-endian' best describes the format of floating-point numbers used by the - C type named by typestr. - [clinic start generated code]*/ - - static PyObject * - float___getformat___impl(PyTypeObject *type, const char *typestr) --/*[clinic end generated code: output=2bfb987228cc9628 input=d5a52600f835ad67]*/ -+/*[clinic end generated code: output=2bfb987228cc9628 input=90d5e246409a246e]*/ - { - float_format_type r; - -@@ -1937,7 +1937,7 @@ - float_format_type detected_double_format, detected_float_format; - - /* We attempt to determine if this machine is using IEEE -- floating point formats by peering at the bits of some -+ floating-point formats by peering at the bits of some - carefully chosen values. If it looks like we are on an - IEEE platform, the float packing/unpacking routines can - just copy bits, if not they resort to arithmetic & shifts -diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c -index 117b4e8dfb9..7f89e68340b 100644 ---- a/Objects/genericaliasobject.c -+++ b/Objects/genericaliasobject.c -@@ -564,6 +564,10 @@ - } - - PyObject *res = Py_GenericAlias(alias->origin, newargs); -+ if (res == NULL) { -+ Py_DECREF(newargs); -+ return NULL; -+ } - ((gaobject *)res)->starred = alias->starred; - - Py_DECREF(newargs); -diff --git a/Objects/genobject.c b/Objects/genobject.c -index dc034a4b723..474abe1094b 100644 ---- a/Objects/genobject.c -+++ b/Objects/genobject.c -@@ -374,6 +374,7 @@ - gen_close(PyGenObject *gen, PyObject *args) - { - PyObject *retval; -+ PyObject *yf = _PyGen_yf(gen); - int err = 0; - - if (gen->gi_frame_state == FRAME_CREATED) { -@@ -383,7 +384,6 @@ - if (gen->gi_frame_state >= FRAME_COMPLETED) { - Py_RETURN_NONE; - } -- PyObject *yf = _PyGen_yf(gen); - if (yf) { - PyFrameState state = gen->gi_frame_state; - gen->gi_frame_state = FRAME_EXECUTING; -@@ -396,14 +396,12 @@ - * YIELD_VALUE if the debugger has changed the lineno. */ - if (err == 0 && is_yield(frame->prev_instr)) { - assert(is_resume(frame->prev_instr + 1)); -- int exception_handler_depth = frame->prev_instr[0].op.arg; -+ int exception_handler_depth = frame->prev_instr[0].op.code; - assert(exception_handler_depth > 0); - /* We can safely ignore the outermost try block - * as it automatically generated to handle - * StopIteration. */ - if (exception_handler_depth == 1) { -- gen->gi_frame_state = FRAME_COMPLETED; -- _PyFrame_ClearLocals((_PyInterpreterFrame *)gen->gi_iframe); - Py_RETURN_NONE; - } - } -diff --git a/Objects/listobject.c b/Objects/listobject.c -index f59abe2e644..d017f34b94f 100644 ---- a/Objects/listobject.c -+++ b/Objects/listobject.c -@@ -2759,7 +2759,14 @@ - } - - /* Compare the final item again using the proper operator */ -- return PyObject_RichCompare(vl->ob_item[i], wl->ob_item[i], op); -+ PyObject *vitem = vl->ob_item[i]; -+ PyObject *witem = wl->ob_item[i]; -+ Py_INCREF(vitem); -+ Py_INCREF(witem); -+ PyObject *result = PyObject_RichCompare(vl->ob_item[i], wl->ob_item[i], op); -+ Py_DECREF(vitem); -+ Py_DECREF(witem); -+ return result; - } - - /*[clinic input] -@@ -2928,6 +2935,23 @@ - } - } - -+static Py_ssize_t -+adjust_slice_indexes(PyListObject *lst, -+ Py_ssize_t *start, Py_ssize_t *stop, -+ Py_ssize_t step) -+{ -+ Py_ssize_t slicelength = PySlice_AdjustIndices(Py_SIZE(lst), start, stop, -+ step); -+ -+ /* Make sure s[5:2] = [..] inserts at the right place: -+ before 5, not before 2. */ -+ if ((step < 0 && *start < *stop) || -+ (step > 0 && *start > *stop)) -+ *stop = *start; -+ -+ return slicelength; -+} -+ - static int - list_ass_subscript(PyListObject* self, PyObject* item, PyObject* value) - { -@@ -2940,22 +2964,11 @@ - return list_ass_item(self, i, value); - } - else if (PySlice_Check(item)) { -- Py_ssize_t start, stop, step, slicelength; -+ Py_ssize_t start, stop, step; - - if (PySlice_Unpack(item, &start, &stop, &step) < 0) { - return -1; - } -- slicelength = PySlice_AdjustIndices(Py_SIZE(self), &start, &stop, -- step); -- -- if (step == 1) -- return list_ass_slice(self, start, stop, value); -- -- /* Make sure s[5:2] = [..] inserts at the right place: -- before 5, not before 2. */ -- if ((step < 0 && start < stop) || -- (step > 0 && start > stop)) -- stop = start; - - if (value == NULL) { - /* delete slice */ -@@ -2964,6 +2977,12 @@ - Py_ssize_t i; - int res; - -+ Py_ssize_t slicelength = adjust_slice_indexes(self, &start, &stop, -+ step); -+ -+ if (step == 1) -+ return list_ass_slice(self, start, stop, value); -+ - if (slicelength <= 0) - return 0; - -@@ -3039,6 +3058,15 @@ - if (!seq) - return -1; - -+ Py_ssize_t slicelength = adjust_slice_indexes(self, &start, &stop, -+ step); -+ -+ if (step == 1) { -+ int res = list_ass_slice(self, start, stop, seq); -+ Py_DECREF(seq); -+ return res; -+ } -+ - if (PySequence_Fast_GET_SIZE(seq) != slicelength) { - PyErr_Format(PyExc_ValueError, - "attempt to assign sequence of " -diff --git a/Objects/longobject.c b/Objects/longobject.c -index c72e1643c9f..c366034fe4b 100644 ---- a/Objects/longobject.c -+++ b/Objects/longobject.c -@@ -484,11 +484,18 @@ - do_decref = 1; - } - if (_PyLong_IsCompact(v)) { --#if SIZEOF_LONG < SIZEOF_VOID_P -- intptr_t tmp = _PyLong_CompactValue(v); -- res = (long)tmp; -- if (res != tmp) { -- *overflow = tmp < 0 ? -1 : 1; -+#if SIZEOF_LONG < SIZEOF_SIZE_T -+ Py_ssize_t tmp = _PyLong_CompactValue(v); -+ if (tmp < LONG_MIN) { -+ *overflow = -1; -+ res = -1; -+ } -+ else if (tmp > LONG_MAX) { -+ *overflow = 1; -+ res = -1; -+ } -+ else { -+ res = (long)tmp; - } - #else - res = _PyLong_CompactValue(v); -@@ -633,14 +640,15 @@ - - v = (PyLongObject *)vv; - if (_PyLong_IsNonNegativeCompact(v)) { --#if SIZEOF_LONG < SIZEOF_VOID_P -- intptr_t tmp = _PyLong_CompactValue(v); -+#if SIZEOF_LONG < SIZEOF_SIZE_T -+ size_t tmp = (size_t)_PyLong_CompactValue(v); - unsigned long res = (unsigned long)tmp; - if (res != tmp) { - goto overflow; - } -+ return res; - #else -- return _PyLong_CompactValue(v); -+ return (unsigned long)(size_t)_PyLong_CompactValue(v); - #endif - } - if (_PyLong_IsNegative(v)) { -@@ -686,7 +694,7 @@ - - v = (PyLongObject *)vv; - if (_PyLong_IsNonNegativeCompact(v)) { -- return _PyLong_CompactValue(v); -+ return (size_t)_PyLong_CompactValue(v); - } - if (_PyLong_IsNegative(v)) { - PyErr_SetString(PyExc_OverflowError, -@@ -723,7 +731,11 @@ - } - v = (PyLongObject *)vv; - if (_PyLong_IsCompact(v)) { -- return (unsigned long)_PyLong_CompactValue(v); -+#if SIZEOF_LONG < SIZEOF_SIZE_T -+ return (unsigned long)(size_t)_PyLong_CompactValue(v); -+#else -+ return (unsigned long)(long)_PyLong_CompactValue(v); -+#endif - } - i = _PyLong_DigitCount(v); - int sign = _PyLong_NonCompactSign(v); -@@ -1267,7 +1279,18 @@ - v = (PyLongObject*)vv; - if (_PyLong_IsNonNegativeCompact(v)) { - res = 0; -- bytes = _PyLong_CompactValue(v); -+#if SIZEOF_LONG_LONG < SIZEOF_SIZE_T -+ size_t tmp = (size_t)_PyLong_CompactValue(v); -+ bytes = (unsigned long long)tmp; -+ if (bytes != tmp) { -+ PyErr_SetString(PyExc_OverflowError, -+ "Python int too large to convert " -+ "to C unsigned long long"); -+ res = -1; -+ } -+#else -+ bytes = (unsigned long long)(size_t)_PyLong_CompactValue(v); -+#endif - } - else { - res = _PyLong_AsByteArray((PyLongObject *)vv, (unsigned char *)&bytes, -@@ -1298,7 +1321,11 @@ - } - v = (PyLongObject *)vv; - if (_PyLong_IsCompact(v)) { -- return (unsigned long long)(signed long long)_PyLong_CompactValue(v); -+#if SIZEOF_LONG_LONG < SIZEOF_SIZE_T -+ return (unsigned long long)(size_t)_PyLong_CompactValue(v); -+#else -+ return (unsigned long long)(long long)_PyLong_CompactValue(v); -+#endif - } - i = _PyLong_DigitCount(v); - sign = _PyLong_NonCompactSign(v); -@@ -1370,7 +1397,22 @@ - do_decref = 1; - } - if (_PyLong_IsCompact(v)) { -+#if SIZEOF_LONG_LONG < SIZEOF_SIZE_T -+ Py_ssize_t tmp = _PyLong_CompactValue(v); -+ if (tmp < LLONG_MIN) { -+ *overflow = -1; -+ res = -1; -+ } -+ else if (tmp > LLONG_MAX) { -+ *overflow = 1; -+ res = -1; -+ } -+ else { -+ res = (long long)tmp; -+ } -+#else - res = _PyLong_CompactValue(v); -+#endif - } - else { - i = _PyLong_DigitCount(v); -@@ -3308,7 +3350,7 @@ - int sign; - - if (_PyLong_IsCompact(v)) { -- x = _PyLong_CompactValue(v); -+ x = (Py_uhash_t)_PyLong_CompactValue(v); - if (x == (Py_uhash_t)-1) { - x = (Py_uhash_t)-2; - } -@@ -6209,7 +6251,7 @@ - int(x, base=10) -> integer\n\ - \n\ - Convert a number or string to an integer, or return 0 if no arguments\n\ --are given. If x is a number, return x.__int__(). For floating point\n\ -+are given. If x is a number, return x.__int__(). For floating-point\n\ - numbers, this truncates towards zero.\n\ - \n\ - If x is not a number or if base is given, then x must be a string,\n\ -diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c -index b0168044d9f..3c88859acc1 100644 ---- a/Objects/memoryobject.c -+++ b/Objects/memoryobject.c -@@ -264,7 +264,7 @@ - /* Assumptions: ndim >= 1. The macro tests for a corner case that should - perhaps be explicitly forbidden in the PEP. */ - #define HAVE_SUBOFFSETS_IN_LAST_DIM(view) \ -- (view->suboffsets && view->suboffsets[dest->ndim-1] >= 0) -+ (view->suboffsets && view->suboffsets[view->ndim-1] >= 0) - - static inline int - last_dim_is_contiguous(const Py_buffer *dest, const Py_buffer *src) -diff --git a/Objects/structseq.c b/Objects/structseq.c -index 8b189595710..246d4c7630c 100644 ---- a/Objects/structseq.c -+++ b/Objects/structseq.c -@@ -41,12 +41,20 @@ - get_type_attr_as_size(tp, &_Py_ID(n_sequence_fields)) - #define REAL_SIZE_TP(tp) \ - get_type_attr_as_size(tp, &_Py_ID(n_fields)) --#define REAL_SIZE(op) REAL_SIZE_TP(Py_TYPE(op)) -+#define REAL_SIZE(op) get_real_size((PyObject *)op) - - #define UNNAMED_FIELDS_TP(tp) \ - get_type_attr_as_size(tp, &_Py_ID(n_unnamed_fields)) - #define UNNAMED_FIELDS(op) UNNAMED_FIELDS_TP(Py_TYPE(op)) - -+static Py_ssize_t -+get_real_size(PyObject *op) -+{ -+ // Compute the real size from the visible size (i.e., Py_SIZE()) and the -+ // number of non-sequence fields accounted for in tp_basicsize. -+ Py_ssize_t hidden = Py_TYPE(op)->tp_basicsize - offsetof(PyStructSequence, ob_item); -+ return Py_SIZE(op) + hidden / sizeof(PyObject *); -+} - - PyObject * - PyStructSequence_New(PyTypeObject *type) -@@ -107,6 +115,9 @@ - PyObject_GC_UnTrack(obj); - - PyTypeObject *tp = Py_TYPE(obj); -+ // gh-122527: We can't use REAL_SIZE_TP() or any macros that access the -+ // type's dictionary here, because the dictionary may have already been -+ // cleared by the garbage collector. - size = REAL_SIZE(obj); - for (i = 0; i < size; ++i) { - Py_XDECREF(obj->ob_item[i]); -@@ -467,10 +478,14 @@ - - static void - initialize_static_fields(PyTypeObject *type, PyStructSequence_Desc *desc, -- PyMemberDef *tp_members, unsigned long tp_flags) -+ PyMemberDef *tp_members, Py_ssize_t n_members, -+ unsigned long tp_flags) - { - type->tp_name = desc->name; -- type->tp_basicsize = sizeof(PyStructSequence) - sizeof(PyObject *); -+ // Account for hidden members in tp_basicsize because they are not -+ // included in the variable size. -+ Py_ssize_t n_hidden = n_members - desc->n_in_sequence; -+ type->tp_basicsize = sizeof(PyStructSequence) + (n_hidden - 1) * sizeof(PyObject *); - type->tp_itemsize = sizeof(PyObject *); - type->tp_dealloc = (destructor)structseq_dealloc; - type->tp_repr = (reprfunc)structseq_repr; -@@ -520,7 +535,7 @@ - if (members == NULL) { - goto error; - } -- initialize_static_fields(type, desc, members, tp_flags); -+ initialize_static_fields(type, desc, members, n_members, tp_flags); - - _Py_SetImmortal(type); - } -@@ -582,7 +597,7 @@ - if (members == NULL) { - return -1; - } -- initialize_static_fields(type, desc, members, 0); -+ initialize_static_fields(type, desc, members, n_members, 0); - if (initialize_static_type(type, desc, n_members, n_unnamed_members) < 0) { - PyMem_Free(members); - return -1; -@@ -658,7 +673,8 @@ - /* The name in this PyType_Spec is statically allocated so it is */ - /* expected that it'll outlive the PyType_Spec */ - spec.name = desc->name; -- spec.basicsize = sizeof(PyStructSequence) - sizeof(PyObject *); -+ Py_ssize_t hidden = n_members - desc->n_in_sequence; -+ spec.basicsize = (int)(sizeof(PyStructSequence) + (hidden - 1) * sizeof(PyObject *)); - spec.itemsize = sizeof(PyObject *); - spec.flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | tp_flags; - spec.slots = slots; -diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c -index 991edcc8667..918654fae89 100644 ---- a/Objects/tupleobject.c -+++ b/Objects/tupleobject.c -@@ -1139,7 +1139,7 @@ - return NULL; - } - assert(size > 0); -- if (size < PyTuple_MAXSAVESIZE) { -+ if (size <= PyTuple_MAXSAVESIZE) { - Py_ssize_t index = size - 1; - PyTupleObject *op = STATE.free_list[index]; - if (op != NULL) { -diff --git a/Objects/typeobject.c b/Objects/typeobject.c -index bf2be42f73f..e8ec98bad98 100644 ---- a/Objects/typeobject.c -+++ b/Objects/typeobject.c -@@ -116,6 +116,7 @@ - self->tp_subclasses = NULL; - } - -+ - static inline static_builtin_state * - static_builtin_state_get(PyInterpreterState *interp, PyTypeObject *self) - { -@@ -5709,7 +5710,6 @@ - static int - object_set_class(PyObject *self, PyObject *value, void *closure) - { -- PyTypeObject *oldto = Py_TYPE(self); - - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, -@@ -5729,6 +5729,8 @@ - return -1; - } - -+ PyTypeObject *oldto = Py_TYPE(self); -+ - /* In versions of CPython prior to 3.5, the code in - compatible_for_assignment was not set up to correctly check for memory - layout / slot / etc. compatibility for non-HEAPTYPE classes, so we just -@@ -7584,6 +7586,7 @@ - if (res < 0) { - static_builtin_state_clear(interp, self); - } -+ - return res; - } - -@@ -10076,6 +10079,84 @@ - return 0; - } - -+static int -+expect_manually_inherited(PyTypeObject *type, void **slot) -+{ -+ PyObject *typeobj = (PyObject *)type; -+ if (slot == (void *)&type->tp_init) { -+ /* This is a best-effort list of builtin exception types -+ that have their own tp_init function. */ -+ if (typeobj != PyExc_BaseException -+ && typeobj != PyExc_BaseExceptionGroup -+ && typeobj != PyExc_ImportError -+ && typeobj != PyExc_NameError -+ && typeobj != PyExc_OSError -+ && typeobj != PyExc_StopIteration -+ && typeobj != PyExc_SyntaxError -+ && typeobj != PyExc_UnicodeDecodeError -+ && typeobj != PyExc_UnicodeEncodeError -+ -+ && type != &PyBool_Type -+ && type != &PyBytes_Type -+ && type != &PyMemoryView_Type -+ && type != &PyComplex_Type -+ && type != &PyEnum_Type -+ && type != &PyFilter_Type -+ && type != &PyFloat_Type -+ && type != &PyFrozenSet_Type -+ && type != &PyLong_Type -+ && type != &PyMap_Type -+ && type != &PyRange_Type -+ && type != &PyReversed_Type -+ && type != &PySlice_Type -+ && type != &PyTuple_Type -+ && type != &PyUnicode_Type -+ && type != &PyZip_Type) -+ -+ { -+ return 1; -+ } -+ } -+ else if (slot == (void *)&type->tp_str) { -+ /* This is a best-effort list of builtin exception types -+ that have their own tp_str function. */ -+ if (typeobj == PyExc_AttributeError || typeobj == PyExc_NameError) { -+ return 1; -+ } -+ } -+ else if (slot == (void *)&type->tp_getattr -+ || slot == (void *)&type->tp_getattro) -+ { -+ /* This is a best-effort list of builtin types -+ that have their own tp_getattr function. */ -+ if (typeobj == PyExc_BaseException -+ || type == &PyByteArray_Type -+ || type == &PyBytes_Type -+ || type == &PyComplex_Type -+ || type == &PyDict_Type -+ || type == &PyEnum_Type -+ || type == &PyFilter_Type -+ || type == &PyLong_Type -+ || type == &PyList_Type -+ || type == &PyMap_Type -+ || type == &PyMemoryView_Type -+ || type == &PyProperty_Type -+ || type == &PyRange_Type -+ || type == &PyReversed_Type -+ || type == &PySet_Type -+ || type == &PySlice_Type -+ || type == &PySuper_Type -+ || type == &PyTuple_Type -+ || type == &PyZip_Type) -+ { -+ return 1; -+ } -+ } -+ -+ /* It must be inherited (see type_ready_inherit()).. */ -+ return 0; -+} -+ - /* This function is called by PyType_Ready() to populate the type's - dictionary with method descriptors for function slots. For each - function slot (like tp_repr) that's defined in the type, one or more -@@ -10120,6 +10201,26 @@ - ptr = slotptr(type, p->offset); - if (!ptr || !*ptr) - continue; -+ if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN -+ && type->tp_base != NULL) -+ { -+ /* Also ignore when the type slot has been inherited. */ -+ void **ptr_base = slotptr(type->tp_base, p->offset); -+ if (ptr_base && *ptr == *ptr_base) { -+ /* Ideally we would always ignore any manually inherited -+ slots, Which would mean inheriting the slot wrapper -+ using normal attribute lookup rather than keeping -+ a distinct copy. However, that would introduce -+ a slight change in behavior that could break -+ existing code. -+ -+ In the meantime, look the other way when the definition -+ explicitly inherits the slot. */ -+ if (!expect_manually_inherited(type, ptr)) { -+ continue; -+ } -+ } -+ } - int r = PyDict_Contains(dict, p->name_strobj); - if (r > 0) - continue; -diff --git a/PCbuild/find_python.bat b/PCbuild/find_python.bat -index d3f62c93869..d8c7cec15ef 100644 ---- a/PCbuild/find_python.bat -+++ b/PCbuild/find_python.bat -@@ -36,13 +36,15 @@ - @if "%_Py_EXTERNALS_DIR%"=="" (set _Py_EXTERNALS_DIR=%_Py_D%\..\externals) - - @rem If we have Python in externals, use that one --@if exist "%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" ("%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" -Ec "import sys; assert sys.version_info[:2] >= (3, 8)" >nul 2>nul) && (set PYTHON="%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe") && (set _Py_Python_Source=found in externals directory) && goto :found || rmdir /Q /S "%_Py_EXTERNALS_DIR%\pythonx86" -+@if exist "%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" ("%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" -Ec "import sys; assert sys.version_info[:2] >= (3, 10)" >nul 2>nul) && (set PYTHON="%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe") && (set _Py_Python_Source=found in externals directory) && goto :found || rmdir /Q /S "%_Py_EXTERNALS_DIR%\pythonx86" - - @rem If HOST_PYTHON is recent enough, use that --@if NOT "%HOST_PYTHON%"=="" @%HOST_PYTHON% -Ec "import sys; assert sys.version_info[:2] >= (3, 9)" >nul 2>nul && (set PYTHON="%HOST_PYTHON%") && (set _Py_Python_Source=found as HOST_PYTHON) && goto :found -+@if NOT "%HOST_PYTHON%"=="" @%HOST_PYTHON% -Ec "import sys; assert sys.version_info[:2] >= (3, 10)" >nul 2>nul && (set PYTHON="%HOST_PYTHON%") && (set _Py_Python_Source=found as HOST_PYTHON) && goto :found - - @rem If py.exe finds a recent enough version, use that one --@for %%p in (3.11 3.10 3.9) do @py -%%p -EV >nul 2>&1 && (set PYTHON=py -%%p) && (set _Py_Python_Source=found %%p with py.exe) && goto :found -+@rem It is fine to add new versions to this list when they have released, -+@rem but we do not use prerelease builds here. -+@for %%p in (3.12 3.11 3.10) do @py -%%p -EV >nul 2>&1 && (set PYTHON=py -%%p) && (set _Py_Python_Source=found %%p with py.exe) && goto :found - - @if NOT exist "%_Py_EXTERNALS_DIR%" mkdir "%_Py_EXTERNALS_DIR%" - @set _Py_NUGET=%NUGET% -diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat -index 1b741d6e49f..e0144bcdf18 100644 ---- a/PCbuild/get_externals.bat -+++ b/PCbuild/get_externals.bat -@@ -53,7 +53,7 @@ - set libraries= - set libraries=%libraries% bzip2-1.0.8 - if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.4.4 --if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-3.0.13 -+if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-3.0.15 - set libraries=%libraries% sqlite-3.45.3.0 - if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.13.0 - if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.13.0 -@@ -77,7 +77,7 @@ - - set binaries= - if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi-3.4.4 --if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-3.0.13 -+if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-3.0.15 - if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.13.0 - if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 - -diff --git a/PCbuild/python.props b/PCbuild/python.props -index d799948fa31..b893960f996 100644 ---- a/PCbuild/python.props -+++ b/PCbuild/python.props -@@ -74,8 +74,8 @@ - $(ExternalsDir)libffi-3.4.4\ - $(libffiDir)$(ArchName)\ - $(libffiOutDir)include -- $(ExternalsDir)openssl-3.0.13\ -- $(ExternalsDir)openssl-bin-3.0.13\$(ArchName)\ -+ $(ExternalsDir)openssl-3.0.15\ -+ $(ExternalsDir)openssl-bin-3.0.15\$(ArchName)\ - $(opensslOutDir)include - $(ExternalsDir)\nasm-2.11.06\ - $(ExternalsDir)\zlib-1.3.1\ -diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c -index e68a9cac259..45a32a0213a 100644 ---- a/Parser/action_helpers.c -+++ b/Parser/action_helpers.c -@@ -1,1680 +1,1600 @@ - #include - - #include "pegen.h" --#include "tokenizer.h" -+#include "pycore_runtime.h" // _PyRuntime - #include "string_parser.h" --#include "pycore_runtime.h" // _PyRuntime -+#include "tokenizer.h" - --void * --_PyPegen_dummy_name(Parser *p, ...) --{ -- return &_PyRuntime.parser.dummy_name; -+void *_PyPegen_dummy_name(Parser *p, ...) { -+ return &_PyRuntime.parser.dummy_name; - } - - /* Creates a single-element asdl_seq* that contains a */ --asdl_seq * --_PyPegen_singleton_seq(Parser *p, void *a) --{ -- assert(a != NULL); -- asdl_seq *seq = (asdl_seq*)_Py_asdl_generic_seq_new(1, p->arena); -- if (!seq) { -- return NULL; -- } -- asdl_seq_SET_UNTYPED(seq, 0, a); -- return seq; -+asdl_seq *_PyPegen_singleton_seq(Parser *p, void *a) { -+ assert(a != NULL); -+ asdl_seq *seq = (asdl_seq *)_Py_asdl_generic_seq_new(1, p->arena); -+ if (!seq) { -+ return NULL; -+ } -+ asdl_seq_SET_UNTYPED(seq, 0, a); -+ return seq; - } - - /* Creates a copy of seq and prepends a to it */ --asdl_seq * --_PyPegen_seq_insert_in_front(Parser *p, void *a, asdl_seq *seq) --{ -- assert(a != NULL); -- if (!seq) { -- return _PyPegen_singleton_seq(p, a); -- } -+asdl_seq *_PyPegen_seq_insert_in_front(Parser *p, void *a, asdl_seq *seq) { -+ assert(a != NULL); -+ if (!seq) { -+ return _PyPegen_singleton_seq(p, a); -+ } - -- asdl_seq *new_seq = (asdl_seq*)_Py_asdl_generic_seq_new(asdl_seq_LEN(seq) + 1, p->arena); -- if (!new_seq) { -- return NULL; -- } -+ asdl_seq *new_seq = -+ (asdl_seq *)_Py_asdl_generic_seq_new(asdl_seq_LEN(seq) + 1, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } - -- asdl_seq_SET_UNTYPED(new_seq, 0, a); -- for (Py_ssize_t i = 1, l = asdl_seq_LEN(new_seq); i < l; i++) { -- asdl_seq_SET_UNTYPED(new_seq, i, asdl_seq_GET_UNTYPED(seq, i - 1)); -- } -- return new_seq; -+ asdl_seq_SET_UNTYPED(new_seq, 0, a); -+ for (Py_ssize_t i = 1, l = asdl_seq_LEN(new_seq); i < l; i++) { -+ asdl_seq_SET_UNTYPED(new_seq, i, asdl_seq_GET_UNTYPED(seq, i - 1)); -+ } -+ return new_seq; - } - - /* Creates a copy of seq and appends a to it */ --asdl_seq * --_PyPegen_seq_append_to_end(Parser *p, asdl_seq *seq, void *a) --{ -- assert(a != NULL); -- if (!seq) { -- return _PyPegen_singleton_seq(p, a); -- } -- -- asdl_seq *new_seq = (asdl_seq*)_Py_asdl_generic_seq_new(asdl_seq_LEN(seq) + 1, p->arena); -- if (!new_seq) { -- return NULL; -- } -- -- for (Py_ssize_t i = 0, l = asdl_seq_LEN(new_seq); i + 1 < l; i++) { -- asdl_seq_SET_UNTYPED(new_seq, i, asdl_seq_GET_UNTYPED(seq, i)); -- } -- asdl_seq_SET_UNTYPED(new_seq, asdl_seq_LEN(new_seq) - 1, a); -- return new_seq; --} -- --static Py_ssize_t --_get_flattened_seq_size(asdl_seq *seqs) --{ -- Py_ssize_t size = 0; -- for (Py_ssize_t i = 0, l = asdl_seq_LEN(seqs); i < l; i++) { -- asdl_seq *inner_seq = asdl_seq_GET_UNTYPED(seqs, i); -- size += asdl_seq_LEN(inner_seq); -- } -- return size; -+asdl_seq *_PyPegen_seq_append_to_end(Parser *p, asdl_seq *seq, void *a) { -+ assert(a != NULL); -+ if (!seq) { -+ return _PyPegen_singleton_seq(p, a); -+ } -+ -+ asdl_seq *new_seq = -+ (asdl_seq *)_Py_asdl_generic_seq_new(asdl_seq_LEN(seq) + 1, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ -+ for (Py_ssize_t i = 0, l = asdl_seq_LEN(new_seq); i + 1 < l; i++) { -+ asdl_seq_SET_UNTYPED(new_seq, i, asdl_seq_GET_UNTYPED(seq, i)); -+ } -+ asdl_seq_SET_UNTYPED(new_seq, asdl_seq_LEN(new_seq) - 1, a); -+ return new_seq; -+} -+ -+static Py_ssize_t _get_flattened_seq_size(asdl_seq *seqs) { -+ Py_ssize_t size = 0; -+ for (Py_ssize_t i = 0, l = asdl_seq_LEN(seqs); i < l; i++) { -+ asdl_seq *inner_seq = asdl_seq_GET_UNTYPED(seqs, i); -+ size += asdl_seq_LEN(inner_seq); -+ } -+ return size; - } - - /* Flattens an asdl_seq* of asdl_seq*s */ --asdl_seq * --_PyPegen_seq_flatten(Parser *p, asdl_seq *seqs) --{ -- Py_ssize_t flattened_seq_size = _get_flattened_seq_size(seqs); -- assert(flattened_seq_size > 0); -- -- asdl_seq *flattened_seq = (asdl_seq*)_Py_asdl_generic_seq_new(flattened_seq_size, p->arena); -- if (!flattened_seq) { -- return NULL; -- } -+asdl_seq *_PyPegen_seq_flatten(Parser *p, asdl_seq *seqs) { -+ Py_ssize_t flattened_seq_size = _get_flattened_seq_size(seqs); -+ assert(flattened_seq_size > 0); - -- int flattened_seq_idx = 0; -- for (Py_ssize_t i = 0, l = asdl_seq_LEN(seqs); i < l; i++) { -- asdl_seq *inner_seq = asdl_seq_GET_UNTYPED(seqs, i); -- for (Py_ssize_t j = 0, li = asdl_seq_LEN(inner_seq); j < li; j++) { -- asdl_seq_SET_UNTYPED(flattened_seq, flattened_seq_idx++, asdl_seq_GET_UNTYPED(inner_seq, j)); -- } -+ asdl_seq *flattened_seq = -+ (asdl_seq *)_Py_asdl_generic_seq_new(flattened_seq_size, p->arena); -+ if (!flattened_seq) { -+ return NULL; -+ } -+ -+ int flattened_seq_idx = 0; -+ for (Py_ssize_t i = 0, l = asdl_seq_LEN(seqs); i < l; i++) { -+ asdl_seq *inner_seq = asdl_seq_GET_UNTYPED(seqs, i); -+ for (Py_ssize_t j = 0, li = asdl_seq_LEN(inner_seq); j < li; j++) { -+ asdl_seq_SET_UNTYPED(flattened_seq, flattened_seq_idx++, -+ asdl_seq_GET_UNTYPED(inner_seq, j)); - } -- assert(flattened_seq_idx == flattened_seq_size); -+ } -+ assert(flattened_seq_idx == flattened_seq_size); - -- return flattened_seq; -+ return flattened_seq; - } - --void * --_PyPegen_seq_last_item(asdl_seq *seq) --{ -- Py_ssize_t len = asdl_seq_LEN(seq); -- return asdl_seq_GET_UNTYPED(seq, len - 1); -+void *_PyPegen_seq_last_item(asdl_seq *seq) { -+ Py_ssize_t len = asdl_seq_LEN(seq); -+ return asdl_seq_GET_UNTYPED(seq, len - 1); - } - --void * --_PyPegen_seq_first_item(asdl_seq *seq) --{ -- return asdl_seq_GET_UNTYPED(seq, 0); -+void *_PyPegen_seq_first_item(asdl_seq *seq) { -+ return asdl_seq_GET_UNTYPED(seq, 0); - } - - /* Creates a new name of the form . */ --expr_ty --_PyPegen_join_names_with_dot(Parser *p, expr_ty first_name, expr_ty second_name) --{ -- assert(first_name != NULL && second_name != NULL); -- PyObject *first_identifier = first_name->v.Name.id; -- PyObject *second_identifier = second_name->v.Name.id; -- -- if (PyUnicode_READY(first_identifier) == -1) { -- return NULL; -- } -- if (PyUnicode_READY(second_identifier) == -1) { -- return NULL; -- } -- const char *first_str = PyUnicode_AsUTF8(first_identifier); -- if (!first_str) { -- return NULL; -- } -- const char *second_str = PyUnicode_AsUTF8(second_identifier); -- if (!second_str) { -- return NULL; -- } -- Py_ssize_t len = strlen(first_str) + strlen(second_str) + 1; // +1 for the dot -- -- PyObject *str = PyBytes_FromStringAndSize(NULL, len); -- if (!str) { -- return NULL; -- } -- -- char *s = PyBytes_AS_STRING(str); -- if (!s) { -- return NULL; -- } -- -- strcpy(s, first_str); -- s += strlen(first_str); -- *s++ = '.'; -- strcpy(s, second_str); -- s += strlen(second_str); -- *s = '\0'; -- -- PyObject *uni = PyUnicode_DecodeUTF8(PyBytes_AS_STRING(str), PyBytes_GET_SIZE(str), NULL); -- Py_DECREF(str); -- if (!uni) { -- return NULL; -- } -- PyUnicode_InternInPlace(&uni); -- if (_PyArena_AddPyObject(p->arena, uni) < 0) { -- Py_DECREF(uni); -- return NULL; -- } -- -- return _PyAST_Name(uni, Load, EXTRA_EXPR(first_name, second_name)); -+expr_ty _PyPegen_join_names_with_dot(Parser *p, expr_ty first_name, -+ expr_ty second_name) { -+ assert(first_name != NULL && second_name != NULL); -+ PyObject *first_identifier = first_name->v.Name.id; -+ PyObject *second_identifier = second_name->v.Name.id; -+ -+ if (PyUnicode_READY(first_identifier) == -1) { -+ return NULL; -+ } -+ if (PyUnicode_READY(second_identifier) == -1) { -+ return NULL; -+ } -+ const char *first_str = PyUnicode_AsUTF8(first_identifier); -+ if (!first_str) { -+ return NULL; -+ } -+ const char *second_str = PyUnicode_AsUTF8(second_identifier); -+ if (!second_str) { -+ return NULL; -+ } -+ Py_ssize_t len = strlen(first_str) + strlen(second_str) + 1; // +1 for the dot -+ -+ PyObject *str = PyBytes_FromStringAndSize(NULL, len); -+ if (!str) { -+ return NULL; -+ } -+ -+ char *s = PyBytes_AS_STRING(str); -+ if (!s) { -+ return NULL; -+ } -+ -+ strcpy(s, first_str); -+ s += strlen(first_str); -+ *s++ = '.'; -+ strcpy(s, second_str); -+ s += strlen(second_str); -+ *s = '\0'; -+ -+ PyObject *uni = -+ PyUnicode_DecodeUTF8(PyBytes_AS_STRING(str), PyBytes_GET_SIZE(str), NULL); -+ Py_DECREF(str); -+ if (!uni) { -+ return NULL; -+ } -+ PyUnicode_InternInPlace(&uni); -+ if (_PyArena_AddPyObject(p->arena, uni) < 0) { -+ Py_DECREF(uni); -+ return NULL; -+ } -+ -+ return _PyAST_Name(uni, Load, EXTRA_EXPR(first_name, second_name)); - } - - /* Counts the total number of dots in seq's tokens */ --int --_PyPegen_seq_count_dots(asdl_seq *seq) --{ -- int number_of_dots = 0; -- for (Py_ssize_t i = 0, l = asdl_seq_LEN(seq); i < l; i++) { -- Token *current_expr = asdl_seq_GET_UNTYPED(seq, i); -- switch (current_expr->type) { -- case ELLIPSIS: -- number_of_dots += 3; -- break; -- case DOT: -- number_of_dots += 1; -- break; -- default: -- Py_UNREACHABLE(); -- } -+int _PyPegen_seq_count_dots(asdl_seq *seq) { -+ int number_of_dots = 0; -+ for (Py_ssize_t i = 0, l = asdl_seq_LEN(seq); i < l; i++) { -+ Token *current_expr = asdl_seq_GET_UNTYPED(seq, i); -+ switch (current_expr->type) { -+ case ELLIPSIS: -+ number_of_dots += 3; -+ break; -+ case DOT: -+ number_of_dots += 1; -+ break; -+ default: -+ Py_UNREACHABLE(); - } -+ } - -- return number_of_dots; -+ return number_of_dots; - } - - /* Creates an alias with '*' as the identifier name */ --alias_ty --_PyPegen_alias_for_star(Parser *p, int lineno, int col_offset, int end_lineno, -- int end_col_offset, PyArena *arena) { -- PyObject *str = PyUnicode_InternFromString("*"); -- if (!str) { -- return NULL; -- } -- if (_PyArena_AddPyObject(p->arena, str) < 0) { -- Py_DECREF(str); -- return NULL; -- } -- return _PyAST_alias(str, NULL, lineno, col_offset, end_lineno, end_col_offset, arena); -+alias_ty _PyPegen_alias_for_star(Parser *p, int lineno, int col_offset, -+ int end_lineno, int end_col_offset, -+ PyArena *arena) { -+ PyObject *str = PyUnicode_InternFromString("*"); -+ if (!str) { -+ return NULL; -+ } -+ if (_PyArena_AddPyObject(p->arena, str) < 0) { -+ Py_DECREF(str); -+ return NULL; -+ } -+ return _PyAST_alias(str, NULL, lineno, col_offset, end_lineno, end_col_offset, -+ arena); - } - - /* Creates a new asdl_seq* with the identifiers of all the names in seq */ --asdl_identifier_seq * --_PyPegen_map_names_to_ids(Parser *p, asdl_expr_seq *seq) --{ -- Py_ssize_t len = asdl_seq_LEN(seq); -- assert(len > 0); -- -- asdl_identifier_seq *new_seq = _Py_asdl_identifier_seq_new(len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- expr_ty e = asdl_seq_GET(seq, i); -- asdl_seq_SET(new_seq, i, e->v.Name.id); -- } -- return new_seq; -+asdl_identifier_seq *_PyPegen_map_names_to_ids(Parser *p, asdl_expr_seq *seq) { -+ Py_ssize_t len = asdl_seq_LEN(seq); -+ assert(len > 0); -+ -+ asdl_identifier_seq *new_seq = _Py_asdl_identifier_seq_new(len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ expr_ty e = asdl_seq_GET(seq, i); -+ asdl_seq_SET(new_seq, i, e->v.Name.id); -+ } -+ return new_seq; - } - - /* Constructs a CmpopExprPair */ --CmpopExprPair * --_PyPegen_cmpop_expr_pair(Parser *p, cmpop_ty cmpop, expr_ty expr) --{ -- assert(expr != NULL); -- CmpopExprPair *a = _PyArena_Malloc(p->arena, sizeof(CmpopExprPair)); -- if (!a) { -- return NULL; -- } -- a->cmpop = cmpop; -- a->expr = expr; -- return a; -+CmpopExprPair *_PyPegen_cmpop_expr_pair(Parser *p, cmpop_ty cmpop, -+ expr_ty expr) { -+ assert(expr != NULL); -+ CmpopExprPair *a = _PyArena_Malloc(p->arena, sizeof(CmpopExprPair)); -+ if (!a) { -+ return NULL; -+ } -+ a->cmpop = cmpop; -+ a->expr = expr; -+ return a; - } - --asdl_int_seq * --_PyPegen_get_cmpops(Parser *p, asdl_seq *seq) --{ -- Py_ssize_t len = asdl_seq_LEN(seq); -- assert(len > 0); -+asdl_int_seq *_PyPegen_get_cmpops(Parser *p, asdl_seq *seq) { -+ Py_ssize_t len = asdl_seq_LEN(seq); -+ assert(len > 0); - -- asdl_int_seq *new_seq = _Py_asdl_int_seq_new(len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- CmpopExprPair *pair = asdl_seq_GET_UNTYPED(seq, i); -- asdl_seq_SET(new_seq, i, pair->cmpop); -- } -- return new_seq; -+ asdl_int_seq *new_seq = _Py_asdl_int_seq_new(len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ CmpopExprPair *pair = asdl_seq_GET_UNTYPED(seq, i); -+ asdl_seq_SET(new_seq, i, pair->cmpop); -+ } -+ return new_seq; - } - --asdl_expr_seq * --_PyPegen_get_exprs(Parser *p, asdl_seq *seq) --{ -- Py_ssize_t len = asdl_seq_LEN(seq); -- assert(len > 0); -+asdl_expr_seq *_PyPegen_get_exprs(Parser *p, asdl_seq *seq) { -+ Py_ssize_t len = asdl_seq_LEN(seq); -+ assert(len > 0); - -- asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- CmpopExprPair *pair = asdl_seq_GET_UNTYPED(seq, i); -- asdl_seq_SET(new_seq, i, pair->expr); -- } -- return new_seq; -+ asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ CmpopExprPair *pair = asdl_seq_GET_UNTYPED(seq, i); -+ asdl_seq_SET(new_seq, i, pair->expr); -+ } -+ return new_seq; - } - --/* Creates an asdl_seq* where all the elements have been changed to have ctx as context */ --static asdl_expr_seq * --_set_seq_context(Parser *p, asdl_expr_seq *seq, expr_context_ty ctx) --{ -- Py_ssize_t len = asdl_seq_LEN(seq); -- if (len == 0) { -- return NULL; -- } -+/* Creates an asdl_seq* where all the elements have been changed to have ctx as -+ * context */ -+static asdl_expr_seq *_set_seq_context(Parser *p, asdl_expr_seq *seq, -+ expr_context_ty ctx) { -+ Py_ssize_t len = asdl_seq_LEN(seq); -+ if (len == 0) { -+ return NULL; -+ } - -- asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- expr_ty e = asdl_seq_GET(seq, i); -- asdl_seq_SET(new_seq, i, _PyPegen_set_expr_context(p, e, ctx)); -- } -- return new_seq; -+ asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ expr_ty e = asdl_seq_GET(seq, i); -+ asdl_seq_SET(new_seq, i, _PyPegen_set_expr_context(p, e, ctx)); -+ } -+ return new_seq; - } - --static expr_ty --_set_name_context(Parser *p, expr_ty e, expr_context_ty ctx) --{ -- return _PyAST_Name(e->v.Name.id, ctx, EXTRA_EXPR(e, e)); -+static expr_ty _set_name_context(Parser *p, expr_ty e, expr_context_ty ctx) { -+ return _PyAST_Name(e->v.Name.id, ctx, EXTRA_EXPR(e, e)); - } - --static expr_ty --_set_tuple_context(Parser *p, expr_ty e, expr_context_ty ctx) --{ -- return _PyAST_Tuple( -- _set_seq_context(p, e->v.Tuple.elts, ctx), -- ctx, -- EXTRA_EXPR(e, e)); -+static expr_ty _set_tuple_context(Parser *p, expr_ty e, expr_context_ty ctx) { -+ return _PyAST_Tuple(_set_seq_context(p, e->v.Tuple.elts, ctx), ctx, -+ EXTRA_EXPR(e, e)); - } - --static expr_ty --_set_list_context(Parser *p, expr_ty e, expr_context_ty ctx) --{ -- return _PyAST_List( -- _set_seq_context(p, e->v.List.elts, ctx), -- ctx, -- EXTRA_EXPR(e, e)); -+static expr_ty _set_list_context(Parser *p, expr_ty e, expr_context_ty ctx) { -+ return _PyAST_List(_set_seq_context(p, e->v.List.elts, ctx), ctx, -+ EXTRA_EXPR(e, e)); - } - --static expr_ty --_set_subscript_context(Parser *p, expr_ty e, expr_context_ty ctx) --{ -- return _PyAST_Subscript(e->v.Subscript.value, e->v.Subscript.slice, -- ctx, EXTRA_EXPR(e, e)); -+static expr_ty _set_subscript_context(Parser *p, expr_ty e, -+ expr_context_ty ctx) { -+ return _PyAST_Subscript(e->v.Subscript.value, e->v.Subscript.slice, ctx, -+ EXTRA_EXPR(e, e)); - } - --static expr_ty --_set_attribute_context(Parser *p, expr_ty e, expr_context_ty ctx) --{ -- return _PyAST_Attribute(e->v.Attribute.value, e->v.Attribute.attr, -- ctx, EXTRA_EXPR(e, e)); -+static expr_ty _set_attribute_context(Parser *p, expr_ty e, -+ expr_context_ty ctx) { -+ return _PyAST_Attribute(e->v.Attribute.value, e->v.Attribute.attr, ctx, -+ EXTRA_EXPR(e, e)); - } - --static expr_ty --_set_starred_context(Parser *p, expr_ty e, expr_context_ty ctx) --{ -- return _PyAST_Starred(_PyPegen_set_expr_context(p, e->v.Starred.value, ctx), -- ctx, EXTRA_EXPR(e, e)); -+static expr_ty _set_starred_context(Parser *p, expr_ty e, expr_context_ty ctx) { -+ return _PyAST_Starred(_PyPegen_set_expr_context(p, e->v.Starred.value, ctx), -+ ctx, EXTRA_EXPR(e, e)); - } - - /* Creates an `expr_ty` equivalent to `expr` but with `ctx` as context */ --expr_ty --_PyPegen_set_expr_context(Parser *p, expr_ty expr, expr_context_ty ctx) --{ -- assert(expr != NULL); -- -- expr_ty new = NULL; -- switch (expr->kind) { -- case Name_kind: -- new = _set_name_context(p, expr, ctx); -- break; -- case Tuple_kind: -- new = _set_tuple_context(p, expr, ctx); -- break; -- case List_kind: -- new = _set_list_context(p, expr, ctx); -- break; -- case Subscript_kind: -- new = _set_subscript_context(p, expr, ctx); -- break; -- case Attribute_kind: -- new = _set_attribute_context(p, expr, ctx); -- break; -- case Starred_kind: -- new = _set_starred_context(p, expr, ctx); -- break; -- default: -- new = expr; -- } -- return new; --} -- --/* Constructs a KeyValuePair that is used when parsing a dict's key value pairs */ --KeyValuePair * --_PyPegen_key_value_pair(Parser *p, expr_ty key, expr_ty value) --{ -- KeyValuePair *a = _PyArena_Malloc(p->arena, sizeof(KeyValuePair)); -- if (!a) { -- return NULL; -- } -- a->key = key; -- a->value = value; -- return a; -+expr_ty _PyPegen_set_expr_context(Parser *p, expr_ty expr, -+ expr_context_ty ctx) { -+ assert(expr != NULL); -+ -+ expr_ty new = NULL; -+ switch (expr->kind) { -+ case Name_kind: -+ new = _set_name_context(p, expr, ctx); -+ break; -+ case Tuple_kind: -+ new = _set_tuple_context(p, expr, ctx); -+ break; -+ case List_kind: -+ new = _set_list_context(p, expr, ctx); -+ break; -+ case Subscript_kind: -+ new = _set_subscript_context(p, expr, ctx); -+ break; -+ case Attribute_kind: -+ new = _set_attribute_context(p, expr, ctx); -+ break; -+ case Starred_kind: -+ new = _set_starred_context(p, expr, ctx); -+ break; -+ default: -+ new = expr; -+ } -+ return new; -+} -+ -+/* Constructs a KeyValuePair that is used when parsing a dict's key value pairs -+ */ -+KeyValuePair *_PyPegen_key_value_pair(Parser *p, expr_ty key, expr_ty value) { -+ KeyValuePair *a = _PyArena_Malloc(p->arena, sizeof(KeyValuePair)); -+ if (!a) { -+ return NULL; -+ } -+ a->key = key; -+ a->value = value; -+ return a; - } - - /* Extracts all keys from an asdl_seq* of KeyValuePair*'s */ --asdl_expr_seq * --_PyPegen_get_keys(Parser *p, asdl_seq *seq) --{ -- Py_ssize_t len = asdl_seq_LEN(seq); -- asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- KeyValuePair *pair = asdl_seq_GET_UNTYPED(seq, i); -- asdl_seq_SET(new_seq, i, pair->key); -- } -- return new_seq; -+asdl_expr_seq *_PyPegen_get_keys(Parser *p, asdl_seq *seq) { -+ Py_ssize_t len = asdl_seq_LEN(seq); -+ asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ KeyValuePair *pair = asdl_seq_GET_UNTYPED(seq, i); -+ asdl_seq_SET(new_seq, i, pair->key); -+ } -+ return new_seq; - } - - /* Extracts all values from an asdl_seq* of KeyValuePair*'s */ --asdl_expr_seq * --_PyPegen_get_values(Parser *p, asdl_seq *seq) --{ -- Py_ssize_t len = asdl_seq_LEN(seq); -- asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- KeyValuePair *pair = asdl_seq_GET_UNTYPED(seq, i); -- asdl_seq_SET(new_seq, i, pair->value); -- } -- return new_seq; --} -- --/* Constructs a KeyPatternPair that is used when parsing mapping & class patterns */ --KeyPatternPair * --_PyPegen_key_pattern_pair(Parser *p, expr_ty key, pattern_ty pattern) --{ -- KeyPatternPair *a = _PyArena_Malloc(p->arena, sizeof(KeyPatternPair)); -- if (!a) { -- return NULL; -- } -- a->key = key; -- a->pattern = pattern; -- return a; -+asdl_expr_seq *_PyPegen_get_values(Parser *p, asdl_seq *seq) { -+ Py_ssize_t len = asdl_seq_LEN(seq); -+ asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ KeyValuePair *pair = asdl_seq_GET_UNTYPED(seq, i); -+ asdl_seq_SET(new_seq, i, pair->value); -+ } -+ return new_seq; -+} -+ -+/* Constructs a KeyPatternPair that is used when parsing mapping & class -+ * patterns */ -+KeyPatternPair *_PyPegen_key_pattern_pair(Parser *p, expr_ty key, -+ pattern_ty pattern) { -+ KeyPatternPair *a = _PyArena_Malloc(p->arena, sizeof(KeyPatternPair)); -+ if (!a) { -+ return NULL; -+ } -+ a->key = key; -+ a->pattern = pattern; -+ return a; - } - - /* Extracts all keys from an asdl_seq* of KeyPatternPair*'s */ --asdl_expr_seq * --_PyPegen_get_pattern_keys(Parser *p, asdl_seq *seq) --{ -- Py_ssize_t len = asdl_seq_LEN(seq); -- asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- KeyPatternPair *pair = asdl_seq_GET_UNTYPED(seq, i); -- asdl_seq_SET(new_seq, i, pair->key); -- } -- return new_seq; -+asdl_expr_seq *_PyPegen_get_pattern_keys(Parser *p, asdl_seq *seq) { -+ Py_ssize_t len = asdl_seq_LEN(seq); -+ asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ KeyPatternPair *pair = asdl_seq_GET_UNTYPED(seq, i); -+ asdl_seq_SET(new_seq, i, pair->key); -+ } -+ return new_seq; - } - - /* Extracts all patterns from an asdl_seq* of KeyPatternPair*'s */ --asdl_pattern_seq * --_PyPegen_get_patterns(Parser *p, asdl_seq *seq) --{ -- Py_ssize_t len = asdl_seq_LEN(seq); -- asdl_pattern_seq *new_seq = _Py_asdl_pattern_seq_new(len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- KeyPatternPair *pair = asdl_seq_GET_UNTYPED(seq, i); -- asdl_seq_SET(new_seq, i, pair->pattern); -- } -- return new_seq; -+asdl_pattern_seq *_PyPegen_get_patterns(Parser *p, asdl_seq *seq) { -+ Py_ssize_t len = asdl_seq_LEN(seq); -+ asdl_pattern_seq *new_seq = _Py_asdl_pattern_seq_new(len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ KeyPatternPair *pair = asdl_seq_GET_UNTYPED(seq, i); -+ asdl_seq_SET(new_seq, i, pair->pattern); -+ } -+ return new_seq; - } - - /* Constructs a NameDefaultPair */ --NameDefaultPair * --_PyPegen_name_default_pair(Parser *p, arg_ty arg, expr_ty value, Token *tc) --{ -- NameDefaultPair *a = _PyArena_Malloc(p->arena, sizeof(NameDefaultPair)); -- if (!a) { -- return NULL; -- } -- a->arg = _PyPegen_add_type_comment_to_arg(p, arg, tc); -- a->value = value; -- return a; -+NameDefaultPair *_PyPegen_name_default_pair(Parser *p, arg_ty arg, -+ expr_ty value, Token *tc) { -+ NameDefaultPair *a = _PyArena_Malloc(p->arena, sizeof(NameDefaultPair)); -+ if (!a) { -+ return NULL; -+ } -+ a->arg = _PyPegen_add_type_comment_to_arg(p, arg, tc); -+ a->value = value; -+ return a; - } - - /* Constructs a SlashWithDefault */ --SlashWithDefault * --_PyPegen_slash_with_default(Parser *p, asdl_arg_seq *plain_names, asdl_seq *names_with_defaults) --{ -- SlashWithDefault *a = _PyArena_Malloc(p->arena, sizeof(SlashWithDefault)); -- if (!a) { -- return NULL; -- } -- a->plain_names = plain_names; -- a->names_with_defaults = names_with_defaults; -- return a; -+SlashWithDefault *_PyPegen_slash_with_default(Parser *p, -+ asdl_arg_seq *plain_names, -+ asdl_seq *names_with_defaults) { -+ SlashWithDefault *a = _PyArena_Malloc(p->arena, sizeof(SlashWithDefault)); -+ if (!a) { -+ return NULL; -+ } -+ a->plain_names = plain_names; -+ a->names_with_defaults = names_with_defaults; -+ return a; - } - - /* Constructs a StarEtc */ --StarEtc * --_PyPegen_star_etc(Parser *p, arg_ty vararg, asdl_seq *kwonlyargs, arg_ty kwarg) --{ -- StarEtc *a = _PyArena_Malloc(p->arena, sizeof(StarEtc)); -- if (!a) { -- return NULL; -- } -- a->vararg = vararg; -- a->kwonlyargs = kwonlyargs; -- a->kwarg = kwarg; -- return a; --} -- --asdl_seq * --_PyPegen_join_sequences(Parser *p, asdl_seq *a, asdl_seq *b) --{ -- Py_ssize_t first_len = asdl_seq_LEN(a); -- Py_ssize_t second_len = asdl_seq_LEN(b); -- asdl_seq *new_seq = (asdl_seq*)_Py_asdl_generic_seq_new(first_len + second_len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- -- int k = 0; -- for (Py_ssize_t i = 0; i < first_len; i++) { -- asdl_seq_SET_UNTYPED(new_seq, k++, asdl_seq_GET_UNTYPED(a, i)); -- } -- for (Py_ssize_t i = 0; i < second_len; i++) { -- asdl_seq_SET_UNTYPED(new_seq, k++, asdl_seq_GET_UNTYPED(b, i)); -- } -- -- return new_seq; --} -- --static asdl_arg_seq* --_get_names(Parser *p, asdl_seq *names_with_defaults) --{ -- Py_ssize_t len = asdl_seq_LEN(names_with_defaults); -- asdl_arg_seq *seq = _Py_asdl_arg_seq_new(len, p->arena); -- if (!seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- NameDefaultPair *pair = asdl_seq_GET_UNTYPED(names_with_defaults, i); -- asdl_seq_SET(seq, i, pair->arg); -- } -- return seq; --} -- --static asdl_expr_seq * --_get_defaults(Parser *p, asdl_seq *names_with_defaults) --{ -- Py_ssize_t len = asdl_seq_LEN(names_with_defaults); -- asdl_expr_seq *seq = _Py_asdl_expr_seq_new(len, p->arena); -- if (!seq) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < len; i++) { -- NameDefaultPair *pair = asdl_seq_GET_UNTYPED(names_with_defaults, i); -- asdl_seq_SET(seq, i, pair->value); -- } -- return seq; --} -- --static int --_make_posonlyargs(Parser *p, -- asdl_arg_seq *slash_without_default, -- SlashWithDefault *slash_with_default, -- asdl_arg_seq **posonlyargs) { -- if (slash_without_default != NULL) { -- *posonlyargs = slash_without_default; -- } -- else if (slash_with_default != NULL) { -- asdl_arg_seq *slash_with_default_names = -- _get_names(p, slash_with_default->names_with_defaults); -- if (!slash_with_default_names) { -- return -1; -- } -- *posonlyargs = (asdl_arg_seq*)_PyPegen_join_sequences( -- p, -- (asdl_seq*)slash_with_default->plain_names, -- (asdl_seq*)slash_with_default_names); -- } -- else { -- *posonlyargs = _Py_asdl_arg_seq_new(0, p->arena); -- } -- return *posonlyargs == NULL ? -1 : 0; --} -- --static int --_make_posargs(Parser *p, -- asdl_arg_seq *plain_names, -- asdl_seq *names_with_default, -- asdl_arg_seq **posargs) { -- if (plain_names != NULL && names_with_default != NULL) { -- asdl_arg_seq *names_with_default_names = _get_names(p, names_with_default); -- if (!names_with_default_names) { -- return -1; -- } -- *posargs = (asdl_arg_seq*)_PyPegen_join_sequences( -- p,(asdl_seq*)plain_names, (asdl_seq*)names_with_default_names); -- } -- else if (plain_names == NULL && names_with_default != NULL) { -- *posargs = _get_names(p, names_with_default); -- } -- else if (plain_names != NULL && names_with_default == NULL) { -- *posargs = plain_names; -- } -- else { -- *posargs = _Py_asdl_arg_seq_new(0, p->arena); -- } -- return *posargs == NULL ? -1 : 0; --} -- --static int --_make_posdefaults(Parser *p, -- SlashWithDefault *slash_with_default, -- asdl_seq *names_with_default, -- asdl_expr_seq **posdefaults) { -- if (slash_with_default != NULL && names_with_default != NULL) { -- asdl_expr_seq *slash_with_default_values = -- _get_defaults(p, slash_with_default->names_with_defaults); -- if (!slash_with_default_values) { -- return -1; -- } -- asdl_expr_seq *names_with_default_values = _get_defaults(p, names_with_default); -- if (!names_with_default_values) { -- return -1; -- } -- *posdefaults = (asdl_expr_seq*)_PyPegen_join_sequences( -- p, -- (asdl_seq*)slash_with_default_values, -- (asdl_seq*)names_with_default_values); -- } -- else if (slash_with_default == NULL && names_with_default != NULL) { -- *posdefaults = _get_defaults(p, names_with_default); -- } -- else if (slash_with_default != NULL && names_with_default == NULL) { -- *posdefaults = _get_defaults(p, slash_with_default->names_with_defaults); -- } -- else { -- *posdefaults = _Py_asdl_expr_seq_new(0, p->arena); -- } -- return *posdefaults == NULL ? -1 : 0; --} -- --static int --_make_kwargs(Parser *p, StarEtc *star_etc, -- asdl_arg_seq **kwonlyargs, -- asdl_expr_seq **kwdefaults) { -- if (star_etc != NULL && star_etc->kwonlyargs != NULL) { -- *kwonlyargs = _get_names(p, star_etc->kwonlyargs); -- } -- else { -- *kwonlyargs = _Py_asdl_arg_seq_new(0, p->arena); -- } -- -- if (*kwonlyargs == NULL) { -- return -1; -- } -- -- if (star_etc != NULL && star_etc->kwonlyargs != NULL) { -- *kwdefaults = _get_defaults(p, star_etc->kwonlyargs); -- } -- else { -- *kwdefaults = _Py_asdl_expr_seq_new(0, p->arena); -- } -- -- if (*kwdefaults == NULL) { -- return -1; -- } -- -- return 0; --} -- --/* Constructs an arguments_ty object out of all the parsed constructs in the parameters rule */ --arguments_ty --_PyPegen_make_arguments(Parser *p, asdl_arg_seq *slash_without_default, -- SlashWithDefault *slash_with_default, asdl_arg_seq *plain_names, -- asdl_seq *names_with_default, StarEtc *star_etc) --{ -- asdl_arg_seq *posonlyargs; -- if (_make_posonlyargs(p, slash_without_default, slash_with_default, &posonlyargs) == -1) { -- return NULL; -- } -- -- asdl_arg_seq *posargs; -- if (_make_posargs(p, plain_names, names_with_default, &posargs) == -1) { -- return NULL; -- } -- -- asdl_expr_seq *posdefaults; -- if (_make_posdefaults(p,slash_with_default, names_with_default, &posdefaults) == -1) { -- return NULL; -- } -- -- arg_ty vararg = NULL; -- if (star_etc != NULL && star_etc->vararg != NULL) { -- vararg = star_etc->vararg; -- } -- -- asdl_arg_seq *kwonlyargs; -- asdl_expr_seq *kwdefaults; -- if (_make_kwargs(p, star_etc, &kwonlyargs, &kwdefaults) == -1) { -- return NULL; -- } -- -- arg_ty kwarg = NULL; -- if (star_etc != NULL && star_etc->kwarg != NULL) { -- kwarg = star_etc->kwarg; -- } -- -- return _PyAST_arguments(posonlyargs, posargs, vararg, kwonlyargs, -- kwdefaults, kwarg, posdefaults, p->arena); --} -- -- --/* Constructs an empty arguments_ty object, that gets used when a function accepts no -- * arguments. */ --arguments_ty --_PyPegen_empty_arguments(Parser *p) --{ -- asdl_arg_seq *posonlyargs = _Py_asdl_arg_seq_new(0, p->arena); -- if (!posonlyargs) { -- return NULL; -- } -- asdl_arg_seq *posargs = _Py_asdl_arg_seq_new(0, p->arena); -- if (!posargs) { -- return NULL; -- } -- asdl_expr_seq *posdefaults = _Py_asdl_expr_seq_new(0, p->arena); -- if (!posdefaults) { -- return NULL; -- } -- asdl_arg_seq *kwonlyargs = _Py_asdl_arg_seq_new(0, p->arena); -- if (!kwonlyargs) { -- return NULL; -- } -- asdl_expr_seq *kwdefaults = _Py_asdl_expr_seq_new(0, p->arena); -- if (!kwdefaults) { -- return NULL; -- } -- -- return _PyAST_arguments(posonlyargs, posargs, NULL, kwonlyargs, -- kwdefaults, NULL, posdefaults, p->arena); -+StarEtc *_PyPegen_star_etc(Parser *p, arg_ty vararg, asdl_seq *kwonlyargs, -+ arg_ty kwarg) { -+ StarEtc *a = _PyArena_Malloc(p->arena, sizeof(StarEtc)); -+ if (!a) { -+ return NULL; -+ } -+ a->vararg = vararg; -+ a->kwonlyargs = kwonlyargs; -+ a->kwarg = kwarg; -+ return a; -+} -+ -+asdl_seq *_PyPegen_join_sequences(Parser *p, asdl_seq *a, asdl_seq *b) { -+ Py_ssize_t first_len = asdl_seq_LEN(a); -+ Py_ssize_t second_len = asdl_seq_LEN(b); -+ asdl_seq *new_seq = -+ (asdl_seq *)_Py_asdl_generic_seq_new(first_len + second_len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ -+ int k = 0; -+ for (Py_ssize_t i = 0; i < first_len; i++) { -+ asdl_seq_SET_UNTYPED(new_seq, k++, asdl_seq_GET_UNTYPED(a, i)); -+ } -+ for (Py_ssize_t i = 0; i < second_len; i++) { -+ asdl_seq_SET_UNTYPED(new_seq, k++, asdl_seq_GET_UNTYPED(b, i)); -+ } -+ -+ return new_seq; -+} -+ -+static asdl_arg_seq *_get_names(Parser *p, asdl_seq *names_with_defaults) { -+ Py_ssize_t len = asdl_seq_LEN(names_with_defaults); -+ asdl_arg_seq *seq = _Py_asdl_arg_seq_new(len, p->arena); -+ if (!seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ NameDefaultPair *pair = asdl_seq_GET_UNTYPED(names_with_defaults, i); -+ asdl_seq_SET(seq, i, pair->arg); -+ } -+ return seq; -+} -+ -+static asdl_expr_seq *_get_defaults(Parser *p, asdl_seq *names_with_defaults) { -+ Py_ssize_t len = asdl_seq_LEN(names_with_defaults); -+ asdl_expr_seq *seq = _Py_asdl_expr_seq_new(len, p->arena); -+ if (!seq) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < len; i++) { -+ NameDefaultPair *pair = asdl_seq_GET_UNTYPED(names_with_defaults, i); -+ asdl_seq_SET(seq, i, pair->value); -+ } -+ return seq; -+} -+ -+static int _make_posonlyargs(Parser *p, asdl_arg_seq *slash_without_default, -+ SlashWithDefault *slash_with_default, -+ asdl_arg_seq **posonlyargs) { -+ if (slash_without_default != NULL) { -+ *posonlyargs = slash_without_default; -+ } else if (slash_with_default != NULL) { -+ asdl_arg_seq *slash_with_default_names = -+ _get_names(p, slash_with_default->names_with_defaults); -+ if (!slash_with_default_names) { -+ return -1; -+ } -+ *posonlyargs = (asdl_arg_seq *)_PyPegen_join_sequences( -+ p, (asdl_seq *)slash_with_default->plain_names, -+ (asdl_seq *)slash_with_default_names); -+ } else { -+ *posonlyargs = _Py_asdl_arg_seq_new(0, p->arena); -+ } -+ return *posonlyargs == NULL ? -1 : 0; -+} -+ -+static int _make_posargs(Parser *p, asdl_arg_seq *plain_names, -+ asdl_seq *names_with_default, asdl_arg_seq **posargs) { -+ if (plain_names != NULL && names_with_default != NULL) { -+ asdl_arg_seq *names_with_default_names = _get_names(p, names_with_default); -+ if (!names_with_default_names) { -+ return -1; -+ } -+ *posargs = (asdl_arg_seq *)_PyPegen_join_sequences( -+ p, (asdl_seq *)plain_names, (asdl_seq *)names_with_default_names); -+ } else if (plain_names == NULL && names_with_default != NULL) { -+ *posargs = _get_names(p, names_with_default); -+ } else if (plain_names != NULL && names_with_default == NULL) { -+ *posargs = plain_names; -+ } else { -+ *posargs = _Py_asdl_arg_seq_new(0, p->arena); -+ } -+ return *posargs == NULL ? -1 : 0; -+} -+ -+static int _make_posdefaults(Parser *p, SlashWithDefault *slash_with_default, -+ asdl_seq *names_with_default, -+ asdl_expr_seq **posdefaults) { -+ if (slash_with_default != NULL && names_with_default != NULL) { -+ asdl_expr_seq *slash_with_default_values = -+ _get_defaults(p, slash_with_default->names_with_defaults); -+ if (!slash_with_default_values) { -+ return -1; -+ } -+ asdl_expr_seq *names_with_default_values = -+ _get_defaults(p, names_with_default); -+ if (!names_with_default_values) { -+ return -1; -+ } -+ *posdefaults = (asdl_expr_seq *)_PyPegen_join_sequences( -+ p, (asdl_seq *)slash_with_default_values, -+ (asdl_seq *)names_with_default_values); -+ } else if (slash_with_default == NULL && names_with_default != NULL) { -+ *posdefaults = _get_defaults(p, names_with_default); -+ } else if (slash_with_default != NULL && names_with_default == NULL) { -+ *posdefaults = _get_defaults(p, slash_with_default->names_with_defaults); -+ } else { -+ *posdefaults = _Py_asdl_expr_seq_new(0, p->arena); -+ } -+ return *posdefaults == NULL ? -1 : 0; -+} -+ -+static int _make_kwargs(Parser *p, StarEtc *star_etc, asdl_arg_seq **kwonlyargs, -+ asdl_expr_seq **kwdefaults) { -+ if (star_etc != NULL && star_etc->kwonlyargs != NULL) { -+ *kwonlyargs = _get_names(p, star_etc->kwonlyargs); -+ } else { -+ *kwonlyargs = _Py_asdl_arg_seq_new(0, p->arena); -+ } -+ -+ if (*kwonlyargs == NULL) { -+ return -1; -+ } -+ -+ if (star_etc != NULL && star_etc->kwonlyargs != NULL) { -+ *kwdefaults = _get_defaults(p, star_etc->kwonlyargs); -+ } else { -+ *kwdefaults = _Py_asdl_expr_seq_new(0, p->arena); -+ } -+ -+ if (*kwdefaults == NULL) { -+ return -1; -+ } -+ -+ return 0; -+} -+ -+/* Constructs an arguments_ty object out of all the parsed constructs in the -+ * parameters rule */ -+arguments_ty _PyPegen_make_arguments(Parser *p, -+ asdl_arg_seq *slash_without_default, -+ SlashWithDefault *slash_with_default, -+ asdl_arg_seq *plain_names, -+ asdl_seq *names_with_default, -+ StarEtc *star_etc) { -+ asdl_arg_seq *posonlyargs; -+ if (_make_posonlyargs(p, slash_without_default, slash_with_default, -+ &posonlyargs) == -1) { -+ return NULL; -+ } -+ -+ asdl_arg_seq *posargs; -+ if (_make_posargs(p, plain_names, names_with_default, &posargs) == -1) { -+ return NULL; -+ } -+ -+ asdl_expr_seq *posdefaults; -+ if (_make_posdefaults(p, slash_with_default, names_with_default, -+ &posdefaults) == -1) { -+ return NULL; -+ } -+ -+ arg_ty vararg = NULL; -+ if (star_etc != NULL && star_etc->vararg != NULL) { -+ vararg = star_etc->vararg; -+ } -+ -+ asdl_arg_seq *kwonlyargs; -+ asdl_expr_seq *kwdefaults; -+ if (_make_kwargs(p, star_etc, &kwonlyargs, &kwdefaults) == -1) { -+ return NULL; -+ } -+ -+ arg_ty kwarg = NULL; -+ if (star_etc != NULL && star_etc->kwarg != NULL) { -+ kwarg = star_etc->kwarg; -+ } -+ -+ return _PyAST_arguments(posonlyargs, posargs, vararg, kwonlyargs, kwdefaults, -+ kwarg, posdefaults, p->arena); -+} -+ -+/* Constructs an empty arguments_ty object, that gets used when a function -+ * accepts no arguments. */ -+arguments_ty _PyPegen_empty_arguments(Parser *p) { -+ asdl_arg_seq *posonlyargs = _Py_asdl_arg_seq_new(0, p->arena); -+ if (!posonlyargs) { -+ return NULL; -+ } -+ asdl_arg_seq *posargs = _Py_asdl_arg_seq_new(0, p->arena); -+ if (!posargs) { -+ return NULL; -+ } -+ asdl_expr_seq *posdefaults = _Py_asdl_expr_seq_new(0, p->arena); -+ if (!posdefaults) { -+ return NULL; -+ } -+ asdl_arg_seq *kwonlyargs = _Py_asdl_arg_seq_new(0, p->arena); -+ if (!kwonlyargs) { -+ return NULL; -+ } -+ asdl_expr_seq *kwdefaults = _Py_asdl_expr_seq_new(0, p->arena); -+ if (!kwdefaults) { -+ return NULL; -+ } -+ -+ return _PyAST_arguments(posonlyargs, posargs, NULL, kwonlyargs, kwdefaults, -+ NULL, posdefaults, p->arena); - } - - /* Encapsulates the value of an operator_ty into an AugOperator struct */ --AugOperator * --_PyPegen_augoperator(Parser *p, operator_ty kind) --{ -- AugOperator *a = _PyArena_Malloc(p->arena, sizeof(AugOperator)); -- if (!a) { -- return NULL; -- } -- a->kind = kind; -- return a; -+AugOperator *_PyPegen_augoperator(Parser *p, operator_ty kind) { -+ AugOperator *a = _PyArena_Malloc(p->arena, sizeof(AugOperator)); -+ if (!a) { -+ return NULL; -+ } -+ a->kind = kind; -+ return a; - } - - /* Construct a FunctionDef equivalent to function_def, but with decorators */ --stmt_ty --_PyPegen_function_def_decorators(Parser *p, asdl_expr_seq *decorators, stmt_ty function_def) --{ -- assert(function_def != NULL); -- if (function_def->kind == AsyncFunctionDef_kind) { -- return _PyAST_AsyncFunctionDef( -- function_def->v.AsyncFunctionDef.name, -- function_def->v.AsyncFunctionDef.args, -- function_def->v.AsyncFunctionDef.body, decorators, -- function_def->v.AsyncFunctionDef.returns, -- function_def->v.AsyncFunctionDef.type_comment, -- function_def->v.AsyncFunctionDef.type_params, -- function_def->lineno, function_def->col_offset, -- function_def->end_lineno, function_def->end_col_offset, p->arena); -- } -- -- return _PyAST_FunctionDef( -- function_def->v.FunctionDef.name, -- function_def->v.FunctionDef.args, -- function_def->v.FunctionDef.body, decorators, -- function_def->v.FunctionDef.returns, -- function_def->v.FunctionDef.type_comment, -- function_def->v.FunctionDef.type_params, -- function_def->lineno, function_def->col_offset, -- function_def->end_lineno, function_def->end_col_offset, p->arena); -+stmt_ty _PyPegen_function_def_decorators(Parser *p, asdl_expr_seq *decorators, -+ stmt_ty function_def) { -+ assert(function_def != NULL); -+ if (function_def->kind == AsyncFunctionDef_kind) { -+ return _PyAST_AsyncFunctionDef( -+ function_def->v.AsyncFunctionDef.name, -+ function_def->v.AsyncFunctionDef.args, -+ function_def->v.AsyncFunctionDef.body, decorators, -+ function_def->v.AsyncFunctionDef.returns, -+ function_def->v.AsyncFunctionDef.type_comment, -+ function_def->v.AsyncFunctionDef.type_params, function_def->lineno, -+ function_def->col_offset, function_def->end_lineno, -+ function_def->end_col_offset, p->arena); -+ } -+ -+ return _PyAST_FunctionDef( -+ function_def->v.FunctionDef.name, function_def->v.FunctionDef.args, -+ function_def->v.FunctionDef.body, decorators, -+ function_def->v.FunctionDef.returns, -+ function_def->v.FunctionDef.type_comment, -+ function_def->v.FunctionDef.type_params, function_def->lineno, -+ function_def->col_offset, function_def->end_lineno, -+ function_def->end_col_offset, p->arena); - } - - /* Construct a ClassDef equivalent to class_def, but with decorators */ --stmt_ty --_PyPegen_class_def_decorators(Parser *p, asdl_expr_seq *decorators, stmt_ty class_def) --{ -- assert(class_def != NULL); -- return _PyAST_ClassDef( -- class_def->v.ClassDef.name, -- class_def->v.ClassDef.bases, class_def->v.ClassDef.keywords, -- class_def->v.ClassDef.body, decorators, -- class_def->v.ClassDef.type_params, -- class_def->lineno, class_def->col_offset, class_def->end_lineno, -- class_def->end_col_offset, p->arena); -+stmt_ty _PyPegen_class_def_decorators(Parser *p, asdl_expr_seq *decorators, -+ stmt_ty class_def) { -+ assert(class_def != NULL); -+ return _PyAST_ClassDef( -+ class_def->v.ClassDef.name, class_def->v.ClassDef.bases, -+ class_def->v.ClassDef.keywords, class_def->v.ClassDef.body, decorators, -+ class_def->v.ClassDef.type_params, class_def->lineno, -+ class_def->col_offset, class_def->end_lineno, class_def->end_col_offset, -+ p->arena); - } - - /* Construct a KeywordOrStarred */ --KeywordOrStarred * --_PyPegen_keyword_or_starred(Parser *p, void *element, int is_keyword) --{ -- KeywordOrStarred *a = _PyArena_Malloc(p->arena, sizeof(KeywordOrStarred)); -- if (!a) { -- return NULL; -- } -- a->element = element; -- a->is_keyword = is_keyword; -- return a; --} -- --/* Get the number of starred expressions in an asdl_seq* of KeywordOrStarred*s */ --static int --_seq_number_of_starred_exprs(asdl_seq *seq) --{ -- int n = 0; -- for (Py_ssize_t i = 0, l = asdl_seq_LEN(seq); i < l; i++) { -- KeywordOrStarred *k = asdl_seq_GET_UNTYPED(seq, i); -- if (!k->is_keyword) { -- n++; -- } -- } -- return n; -+KeywordOrStarred *_PyPegen_keyword_or_starred(Parser *p, void *element, -+ int is_keyword) { -+ KeywordOrStarred *a = _PyArena_Malloc(p->arena, sizeof(KeywordOrStarred)); -+ if (!a) { -+ return NULL; -+ } -+ a->element = element; -+ a->is_keyword = is_keyword; -+ return a; -+} -+ -+/* Get the number of starred expressions in an asdl_seq* of KeywordOrStarred*s -+ */ -+static int _seq_number_of_starred_exprs(asdl_seq *seq) { -+ int n = 0; -+ for (Py_ssize_t i = 0, l = asdl_seq_LEN(seq); i < l; i++) { -+ KeywordOrStarred *k = asdl_seq_GET_UNTYPED(seq, i); -+ if (!k->is_keyword) { -+ n++; -+ } -+ } -+ return n; - } - - /* Extract the starred expressions of an asdl_seq* of KeywordOrStarred*s */ --asdl_expr_seq * --_PyPegen_seq_extract_starred_exprs(Parser *p, asdl_seq *kwargs) --{ -- int new_len = _seq_number_of_starred_exprs(kwargs); -- if (new_len == 0) { -- return NULL; -- } -- asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(new_len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- -- int idx = 0; -- for (Py_ssize_t i = 0, len = asdl_seq_LEN(kwargs); i < len; i++) { -- KeywordOrStarred *k = asdl_seq_GET_UNTYPED(kwargs, i); -- if (!k->is_keyword) { -- asdl_seq_SET(new_seq, idx++, k->element); -- } -- } -- return new_seq; -+asdl_expr_seq *_PyPegen_seq_extract_starred_exprs(Parser *p, asdl_seq *kwargs) { -+ int new_len = _seq_number_of_starred_exprs(kwargs); -+ if (new_len == 0) { -+ return NULL; -+ } -+ asdl_expr_seq *new_seq = _Py_asdl_expr_seq_new(new_len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ -+ int idx = 0; -+ for (Py_ssize_t i = 0, len = asdl_seq_LEN(kwargs); i < len; i++) { -+ KeywordOrStarred *k = asdl_seq_GET_UNTYPED(kwargs, i); -+ if (!k->is_keyword) { -+ asdl_seq_SET(new_seq, idx++, k->element); -+ } -+ } -+ return new_seq; - } - - /* Return a new asdl_seq* with only the keywords in kwargs */ --asdl_keyword_seq* --_PyPegen_seq_delete_starred_exprs(Parser *p, asdl_seq *kwargs) --{ -- Py_ssize_t len = asdl_seq_LEN(kwargs); -- Py_ssize_t new_len = len - _seq_number_of_starred_exprs(kwargs); -- if (new_len == 0) { -- return NULL; -- } -- asdl_keyword_seq *new_seq = _Py_asdl_keyword_seq_new(new_len, p->arena); -- if (!new_seq) { -- return NULL; -- } -- -- int idx = 0; -- for (Py_ssize_t i = 0; i < len; i++) { -- KeywordOrStarred *k = asdl_seq_GET_UNTYPED(kwargs, i); -- if (k->is_keyword) { -- asdl_seq_SET(new_seq, idx++, k->element); -- } -- } -- return new_seq; --} -- --expr_ty --_PyPegen_ensure_imaginary(Parser *p, expr_ty exp) --{ -- if (exp->kind != Constant_kind || !PyComplex_CheckExact(exp->v.Constant.value)) { -- RAISE_SYNTAX_ERROR_KNOWN_LOCATION(exp, "imaginary number required in complex literal"); -- return NULL; -- } -- return exp; --} -- --expr_ty --_PyPegen_ensure_real(Parser *p, expr_ty exp) --{ -- if (exp->kind != Constant_kind || PyComplex_CheckExact(exp->v.Constant.value)) { -- RAISE_SYNTAX_ERROR_KNOWN_LOCATION(exp, "real number required in complex literal"); -- return NULL; -- } -- return exp; --} -- --mod_ty --_PyPegen_make_module(Parser *p, asdl_stmt_seq *a) { -- asdl_type_ignore_seq *type_ignores = NULL; -- Py_ssize_t num = p->type_ignore_comments.num_items; -- if (num > 0) { -- // Turn the raw (comment, lineno) pairs into TypeIgnore objects in the arena -- type_ignores = _Py_asdl_type_ignore_seq_new(num, p->arena); -- if (type_ignores == NULL) { -- return NULL; -- } -- for (int i = 0; i < num; i++) { -- PyObject *tag = _PyPegen_new_type_comment(p, p->type_ignore_comments.items[i].comment); -- if (tag == NULL) { -- return NULL; -- } -- type_ignore_ty ti = _PyAST_TypeIgnore(p->type_ignore_comments.items[i].lineno, -- tag, p->arena); -- if (ti == NULL) { -- return NULL; -- } -- asdl_seq_SET(type_ignores, i, ti); -- } -- } -- return _PyAST_Module(a, type_ignores, p->arena); --} -- --PyObject * --_PyPegen_new_type_comment(Parser *p, const char *s) --{ -- PyObject *res = PyUnicode_DecodeUTF8(s, strlen(s), NULL); -- if (res == NULL) { -- return NULL; -- } -- if (_PyArena_AddPyObject(p->arena, res) < 0) { -- Py_DECREF(res); -- return NULL; -- } -- return res; --} -- --arg_ty --_PyPegen_add_type_comment_to_arg(Parser *p, arg_ty a, Token *tc) --{ -- if (tc == NULL) { -- return a; -- } -- const char *bytes = PyBytes_AsString(tc->bytes); -- if (bytes == NULL) { -- return NULL; -- } -- PyObject *tco = _PyPegen_new_type_comment(p, bytes); -- if (tco == NULL) { -- return NULL; -- } -- return _PyAST_arg(a->arg, a->annotation, tco, -- a->lineno, a->col_offset, a->end_lineno, a->end_col_offset, -- p->arena); -+asdl_keyword_seq *_PyPegen_seq_delete_starred_exprs(Parser *p, -+ asdl_seq *kwargs) { -+ Py_ssize_t len = asdl_seq_LEN(kwargs); -+ Py_ssize_t new_len = len - _seq_number_of_starred_exprs(kwargs); -+ if (new_len == 0) { -+ return NULL; -+ } -+ asdl_keyword_seq *new_seq = _Py_asdl_keyword_seq_new(new_len, p->arena); -+ if (!new_seq) { -+ return NULL; -+ } -+ -+ int idx = 0; -+ for (Py_ssize_t i = 0; i < len; i++) { -+ KeywordOrStarred *k = asdl_seq_GET_UNTYPED(kwargs, i); -+ if (k->is_keyword) { -+ asdl_seq_SET(new_seq, idx++, k->element); -+ } -+ } -+ return new_seq; -+} -+ -+expr_ty _PyPegen_ensure_imaginary(Parser *p, expr_ty exp) { -+ if (exp->kind != Constant_kind || -+ !PyComplex_CheckExact(exp->v.Constant.value)) { -+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION( -+ exp, "imaginary number required in complex literal"); -+ return NULL; -+ } -+ return exp; -+} -+ -+expr_ty _PyPegen_ensure_real(Parser *p, expr_ty exp) { -+ if (exp->kind != Constant_kind || -+ PyComplex_CheckExact(exp->v.Constant.value)) { -+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION( -+ exp, "real number required in complex literal"); -+ return NULL; -+ } -+ return exp; -+} -+ -+mod_ty _PyPegen_make_module(Parser *p, asdl_stmt_seq *a) { -+ asdl_type_ignore_seq *type_ignores = NULL; -+ Py_ssize_t num = p->type_ignore_comments.num_items; -+ if (num > 0) { -+ // Turn the raw (comment, lineno) pairs into TypeIgnore objects in the arena -+ type_ignores = _Py_asdl_type_ignore_seq_new(num, p->arena); -+ if (type_ignores == NULL) { -+ return NULL; -+ } -+ for (int i = 0; i < num; i++) { -+ PyObject *tag = _PyPegen_new_type_comment( -+ p, p->type_ignore_comments.items[i].comment); -+ if (tag == NULL) { -+ return NULL; -+ } -+ type_ignore_ty ti = _PyAST_TypeIgnore( -+ p->type_ignore_comments.items[i].lineno, tag, p->arena); -+ if (ti == NULL) { -+ return NULL; -+ } -+ asdl_seq_SET(type_ignores, i, ti); -+ } -+ } -+ return _PyAST_Module(a, type_ignores, p->arena); -+} -+ -+PyObject *_PyPegen_new_type_comment(Parser *p, const char *s) { -+ PyObject *res = PyUnicode_DecodeUTF8(s, strlen(s), NULL); -+ if (res == NULL) { -+ return NULL; -+ } -+ if (_PyArena_AddPyObject(p->arena, res) < 0) { -+ Py_DECREF(res); -+ return NULL; -+ } -+ return res; -+} -+ -+arg_ty _PyPegen_add_type_comment_to_arg(Parser *p, arg_ty a, Token *tc) { -+ if (tc == NULL) { -+ return a; -+ } -+ const char *bytes = PyBytes_AsString(tc->bytes); -+ if (bytes == NULL) { -+ return NULL; -+ } -+ PyObject *tco = _PyPegen_new_type_comment(p, bytes); -+ if (tco == NULL) { -+ return NULL; -+ } -+ return _PyAST_arg(a->arg, a->annotation, tco, a->lineno, a->col_offset, -+ a->end_lineno, a->end_col_offset, p->arena); - } - - /* Checks if the NOTEQUAL token is valid given the current parser flags - 0 indicates success and nonzero indicates failure (an exception may be set) */ --int --_PyPegen_check_barry_as_flufl(Parser *p, Token* t) { -- assert(t->bytes != NULL); -- assert(t->type == NOTEQUAL); -- -- const char* tok_str = PyBytes_AS_STRING(t->bytes); -- if (p->flags & PyPARSE_BARRY_AS_BDFL && strcmp(tok_str, "<>") != 0) { -- RAISE_SYNTAX_ERROR("with Barry as BDFL, use '<>' instead of '!='"); -- return -1; -- } -- if (!(p->flags & PyPARSE_BARRY_AS_BDFL)) { -- return strcmp(tok_str, "!="); -- } -+int _PyPegen_check_barry_as_flufl(Parser *p, Token *t) { -+ assert(t->bytes != NULL); -+ assert(t->type == NOTEQUAL); -+ -+ const char *tok_str = PyBytes_AS_STRING(t->bytes); -+ if (p->flags & PyPARSE_BARRY_AS_BDFL && strcmp(tok_str, "<>") != 0) { -+ RAISE_SYNTAX_ERROR("with Barry as BDFL, use '<>' instead of '!='"); -+ return -1; -+ } -+ if (!(p->flags & PyPARSE_BARRY_AS_BDFL)) { -+ return strcmp(tok_str, "!="); -+ } -+ return 0; -+} -+ -+int _PyPegen_check_legacy_stmt(Parser *p, expr_ty name) { -+ if (name->kind != Name_kind) { - return 0; --} -- --int --_PyPegen_check_legacy_stmt(Parser *p, expr_ty name) { -- if (name->kind != Name_kind) { -- return 0; -- } -- const char* candidates[2] = {"print", "exec"}; -- for (int i=0; i<2; i++) { -- if (PyUnicode_CompareWithASCIIString(name->v.Name.id, candidates[i]) == 0) { -- return 1; -- } -+ } -+ const char *candidates[2] = {"print", "exec"}; -+ for (int i = 0; i < 2; i++) { -+ if (PyUnicode_CompareWithASCIIString(name->v.Name.id, candidates[i]) == 0) { -+ return 1; - } -- return 0; -+ } -+ return 0; - } - - static ResultTokenWithMetadata * --result_token_with_metadata(Parser *p, void *result, PyObject *metadata) --{ -- ResultTokenWithMetadata *res = _PyArena_Malloc(p->arena, sizeof(ResultTokenWithMetadata)); -- if (res == NULL) { -- return NULL; -- } -- res->metadata = metadata; -- res->result = result; -- return res; -+result_token_with_metadata(Parser *p, void *result, PyObject *metadata) { -+ ResultTokenWithMetadata *res = -+ _PyArena_Malloc(p->arena, sizeof(ResultTokenWithMetadata)); -+ if (res == NULL) { -+ return NULL; -+ } -+ res->metadata = metadata; -+ res->result = result; -+ return res; - } - - ResultTokenWithMetadata * --_PyPegen_check_fstring_conversion(Parser *p, Token* conv_token, expr_ty conv) --{ -- if (conv_token->lineno != conv->lineno || conv_token->end_col_offset != conv->col_offset) { -- return RAISE_SYNTAX_ERROR_KNOWN_RANGE( -- conv_token, conv, -- "f-string: conversion type must come right after the exclamanation mark" -- ); -- } -- return result_token_with_metadata(p, conv, conv_token->metadata); -+_PyPegen_check_fstring_conversion(Parser *p, Token *conv_token, expr_ty conv) { -+ if (conv_token->lineno != conv->lineno || -+ conv_token->end_col_offset != conv->col_offset) { -+ return RAISE_SYNTAX_ERROR_KNOWN_RANGE(conv_token, conv, -+ "f-string: conversion type must come " -+ "right after the exclamanation mark"); -+ } -+ return result_token_with_metadata(p, conv, conv_token->metadata); - } - -+static asdl_expr_seq * -+unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions); - ResultTokenWithMetadata * --_PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, int lineno, int col_offset, -- int end_lineno, int end_col_offset, PyArena *arena) --{ -- if (!spec) { -- return NULL; -- } -- -- // This is needed to keep compatibility with 3.11, where an empty format spec is parsed -- // as an *empty* JoinedStr node, instead of having an empty constant in it. -- if (asdl_seq_LEN(spec) == 1) { -- expr_ty e = asdl_seq_GET(spec, 0); -- if (e->kind == Constant_kind -- && PyUnicode_Check(e->v.Constant.value) -- && PyUnicode_GetLength(e->v.Constant.value) == 0) { -- spec = _Py_asdl_expr_seq_new(0, arena); -- } -- } -- -- expr_ty res = _PyAST_JoinedStr(spec, lineno, col_offset, end_lineno, end_col_offset, p->arena); -- if (!res) { -- return NULL; -- } -- return result_token_with_metadata(p, res, colon->metadata); --} -- --const char * --_PyPegen_get_expr_name(expr_ty e) --{ -- assert(e != NULL); -- switch (e->kind) { -- case Attribute_kind: -- return "attribute"; -- case Subscript_kind: -- return "subscript"; -- case Starred_kind: -- return "starred"; -- case Name_kind: -- return "name"; -- case List_kind: -- return "list"; -- case Tuple_kind: -- return "tuple"; -- case Lambda_kind: -- return "lambda"; -- case Call_kind: -- return "function call"; -- case BoolOp_kind: -- case BinOp_kind: -- case UnaryOp_kind: -- return "expression"; -- case GeneratorExp_kind: -- return "generator expression"; -- case Yield_kind: -- case YieldFrom_kind: -- return "yield expression"; -- case Await_kind: -- return "await expression"; -- case ListComp_kind: -- return "list comprehension"; -- case SetComp_kind: -- return "set comprehension"; -- case DictComp_kind: -- return "dict comprehension"; -- case Dict_kind: -- return "dict literal"; -- case Set_kind: -- return "set display"; -- case JoinedStr_kind: -- case FormattedValue_kind: -- return "f-string expression"; -- case Constant_kind: { -- PyObject *value = e->v.Constant.value; -- if (value == Py_None) { -- return "None"; -- } -- if (value == Py_False) { -- return "False"; -- } -- if (value == Py_True) { -- return "True"; -- } -- if (value == Py_Ellipsis) { -- return "ellipsis"; -- } -- return "literal"; -- } -- case Compare_kind: -- return "comparison"; -- case IfExp_kind: -- return "conditional expression"; -- case NamedExpr_kind: -- return "named expression"; -- default: -- PyErr_Format(PyExc_SystemError, -- "unexpected expression in assignment %d (line %d)", -- e->kind, e->lineno); -- return NULL; -- } --} -- --expr_ty --_PyPegen_get_last_comprehension_item(comprehension_ty comprehension) { -- if (comprehension->ifs == NULL || asdl_seq_LEN(comprehension->ifs) == 0) { -- return comprehension->iter; -- } -- return PyPegen_last_item(comprehension->ifs, expr_ty); -+_PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, -+ int lineno, int col_offset, int end_lineno, -+ int end_col_offset, PyArena *arena) { -+ if (!spec) { -+ return NULL; -+ } -+ -+ // This is needed to keep compatibility with 3.11, where an empty format spec -+ // is parsed as an *empty* JoinedStr node, instead of having an empty constant -+ // in it. -+ if (asdl_seq_LEN(spec) == 1) { -+ expr_ty e = asdl_seq_GET(spec, 0); -+ if (e->kind == Constant_kind && PyUnicode_Check(e->v.Constant.value) && -+ PyUnicode_GetLength(e->v.Constant.value) == 0) { -+ spec = _Py_asdl_expr_seq_new(0, arena); -+ } -+ } -+ expr_ty res; -+ Py_ssize_t n = asdl_seq_LEN(spec); -+ if (n == 0 || (n == 1 && asdl_seq_GET(spec, 0)->kind == Constant_kind)) { -+ res = _PyAST_JoinedStr(spec, lineno, col_offset, end_lineno, end_col_offset, -+ p->arena); -+ } else { -+ res = _PyPegen_concatenate_strings(p, spec, lineno, col_offset, end_lineno, -+ end_col_offset, arena); -+ } -+ if (!res) { -+ return NULL; -+ } -+ return result_token_with_metadata(p, res, colon->metadata); -+} -+ -+const char *_PyPegen_get_expr_name(expr_ty e) { -+ assert(e != NULL); -+ switch (e->kind) { -+ case Attribute_kind: -+ return "attribute"; -+ case Subscript_kind: -+ return "subscript"; -+ case Starred_kind: -+ return "starred"; -+ case Name_kind: -+ return "name"; -+ case List_kind: -+ return "list"; -+ case Tuple_kind: -+ return "tuple"; -+ case Lambda_kind: -+ return "lambda"; -+ case Call_kind: -+ return "function call"; -+ case BoolOp_kind: -+ case BinOp_kind: -+ case UnaryOp_kind: -+ return "expression"; -+ case GeneratorExp_kind: -+ return "generator expression"; -+ case Yield_kind: -+ case YieldFrom_kind: -+ return "yield expression"; -+ case Await_kind: -+ return "await expression"; -+ case ListComp_kind: -+ return "list comprehension"; -+ case SetComp_kind: -+ return "set comprehension"; -+ case DictComp_kind: -+ return "dict comprehension"; -+ case Dict_kind: -+ return "dict literal"; -+ case Set_kind: -+ return "set display"; -+ case JoinedStr_kind: -+ case FormattedValue_kind: -+ return "f-string expression"; -+ case Constant_kind: { -+ PyObject *value = e->v.Constant.value; -+ if (value == Py_None) { -+ return "None"; -+ } -+ if (value == Py_False) { -+ return "False"; -+ } -+ if (value == Py_True) { -+ return "True"; -+ } -+ if (value == Py_Ellipsis) { -+ return "ellipsis"; -+ } -+ return "literal"; -+ } -+ case Compare_kind: -+ return "comparison"; -+ case IfExp_kind: -+ return "conditional expression"; -+ case NamedExpr_kind: -+ return "named expression"; -+ default: -+ PyErr_Format(PyExc_SystemError, -+ "unexpected expression in assignment %d (line %d)", e->kind, -+ e->lineno); -+ return NULL; -+ } -+} -+ -+expr_ty _PyPegen_get_last_comprehension_item(comprehension_ty comprehension) { -+ if (comprehension->ifs == NULL || asdl_seq_LEN(comprehension->ifs) == 0) { -+ return comprehension->iter; -+ } -+ return PyPegen_last_item(comprehension->ifs, expr_ty); - } - - expr_ty _PyPegen_collect_call_seqs(Parser *p, asdl_expr_seq *a, asdl_seq *b, -- int lineno, int col_offset, int end_lineno, -- int end_col_offset, PyArena *arena) { -- Py_ssize_t args_len = asdl_seq_LEN(a); -- Py_ssize_t total_len = args_len; -+ int lineno, int col_offset, int end_lineno, -+ int end_col_offset, PyArena *arena) { -+ Py_ssize_t args_len = asdl_seq_LEN(a); -+ Py_ssize_t total_len = args_len; - -- if (b == NULL) { -- return _PyAST_Call(_PyPegen_dummy_name(p), a, NULL, lineno, col_offset, -- end_lineno, end_col_offset, arena); -+ if (b == NULL) { -+ return _PyAST_Call(_PyPegen_dummy_name(p), a, NULL, lineno, col_offset, -+ end_lineno, end_col_offset, arena); -+ } - -- } -+ asdl_expr_seq *starreds = _PyPegen_seq_extract_starred_exprs(p, b); -+ asdl_keyword_seq *keywords = _PyPegen_seq_delete_starred_exprs(p, b); - -- asdl_expr_seq *starreds = _PyPegen_seq_extract_starred_exprs(p, b); -- asdl_keyword_seq *keywords = _PyPegen_seq_delete_starred_exprs(p, b); -+ if (starreds) { -+ total_len += asdl_seq_LEN(starreds); -+ } - -- if (starreds) { -- total_len += asdl_seq_LEN(starreds); -- } -+ asdl_expr_seq *args = _Py_asdl_expr_seq_new(total_len, arena); - -- asdl_expr_seq *args = _Py_asdl_expr_seq_new(total_len, arena); -- -- Py_ssize_t i = 0; -- for (i = 0; i < args_len; i++) { -- asdl_seq_SET(args, i, asdl_seq_GET(a, i)); -- } -- for (; i < total_len; i++) { -- asdl_seq_SET(args, i, asdl_seq_GET(starreds, i - args_len)); -- } -+ Py_ssize_t i = 0; -+ for (i = 0; i < args_len; i++) { -+ asdl_seq_SET(args, i, asdl_seq_GET(a, i)); -+ } -+ for (; i < total_len; i++) { -+ asdl_seq_SET(args, i, asdl_seq_GET(starreds, i - args_len)); -+ } - -- return _PyAST_Call(_PyPegen_dummy_name(p), args, keywords, lineno, -- col_offset, end_lineno, end_col_offset, arena); -+ return _PyAST_Call(_PyPegen_dummy_name(p), args, keywords, lineno, col_offset, -+ end_lineno, end_col_offset, arena); - } - - // AST Error reporting helpers - --expr_ty --_PyPegen_get_invalid_target(expr_ty e, TARGETS_TYPE targets_type) --{ -- if (e == NULL) { -- return NULL; -- } -- --#define VISIT_CONTAINER(CONTAINER, TYPE) do { \ -- Py_ssize_t len = asdl_seq_LEN((CONTAINER)->v.TYPE.elts);\ -- for (Py_ssize_t i = 0; i < len; i++) {\ -- expr_ty other = asdl_seq_GET((CONTAINER)->v.TYPE.elts, i);\ -- expr_ty child = _PyPegen_get_invalid_target(other, targets_type);\ -- if (child != NULL) {\ -- return child;\ -- }\ -- }\ -- } while (0) -- -- // We only need to visit List and Tuple nodes recursively as those -- // are the only ones that can contain valid names in targets when -- // they are parsed as expressions. Any other kind of expression -- // that is a container (like Sets or Dicts) is directly invalid and -- // we don't need to visit it recursively. -- -- switch (e->kind) { -- case List_kind: -- VISIT_CONTAINER(e, List); -- return NULL; -- case Tuple_kind: -- VISIT_CONTAINER(e, Tuple); -- return NULL; -- case Starred_kind: -- if (targets_type == DEL_TARGETS) { -- return e; -- } -- return _PyPegen_get_invalid_target(e->v.Starred.value, targets_type); -- case Compare_kind: -- // This is needed, because the `a in b` in `for a in b` gets parsed -- // as a comparison, and so we need to search the left side of the comparison -- // for invalid targets. -- if (targets_type == FOR_TARGETS) { -- cmpop_ty cmpop = (cmpop_ty) asdl_seq_GET(e->v.Compare.ops, 0); -- if (cmpop == In) { -- return _PyPegen_get_invalid_target(e->v.Compare.left, targets_type); -- } -- return NULL; -- } -- return e; -- case Name_kind: -- case Subscript_kind: -- case Attribute_kind: -- return NULL; -- default: -- return e; -- } -+expr_ty _PyPegen_get_invalid_target(expr_ty e, TARGETS_TYPE targets_type) { -+ if (e == NULL) { -+ return NULL; -+ } -+ -+#define VISIT_CONTAINER(CONTAINER, TYPE) \ -+ do { \ -+ Py_ssize_t len = asdl_seq_LEN((CONTAINER)->v.TYPE.elts); \ -+ for (Py_ssize_t i = 0; i < len; i++) { \ -+ expr_ty other = asdl_seq_GET((CONTAINER)->v.TYPE.elts, i); \ -+ expr_ty child = _PyPegen_get_invalid_target(other, targets_type); \ -+ if (child != NULL) { \ -+ return child; \ -+ } \ -+ } \ -+ } while (0) -+ -+ // We only need to visit List and Tuple nodes recursively as those -+ // are the only ones that can contain valid names in targets when -+ // they are parsed as expressions. Any other kind of expression -+ // that is a container (like Sets or Dicts) is directly invalid and -+ // we don't need to visit it recursively. -+ -+ switch (e->kind) { -+ case List_kind: -+ VISIT_CONTAINER(e, List); -+ return NULL; -+ case Tuple_kind: -+ VISIT_CONTAINER(e, Tuple); -+ return NULL; -+ case Starred_kind: -+ if (targets_type == DEL_TARGETS) { -+ return e; -+ } -+ return _PyPegen_get_invalid_target(e->v.Starred.value, targets_type); -+ case Compare_kind: -+ // This is needed, because the `a in b` in `for a in b` gets parsed -+ // as a comparison, and so we need to search the left side of the comparison -+ // for invalid targets. -+ if (targets_type == FOR_TARGETS) { -+ cmpop_ty cmpop = (cmpop_ty)asdl_seq_GET(e->v.Compare.ops, 0); -+ if (cmpop == In) { -+ return _PyPegen_get_invalid_target(e->v.Compare.left, targets_type); -+ } -+ return NULL; -+ } -+ return e; -+ case Name_kind: -+ case Subscript_kind: -+ case Attribute_kind: -+ return NULL; -+ default: -+ return e; -+ } - } - - void *_PyPegen_arguments_parsing_error(Parser *p, expr_ty e) { -- int kwarg_unpacking = 0; -- for (Py_ssize_t i = 0, l = asdl_seq_LEN(e->v.Call.keywords); i < l; i++) { -- keyword_ty keyword = asdl_seq_GET(e->v.Call.keywords, i); -- if (!keyword->arg) { -- kwarg_unpacking = 1; -- } -- } -- -- const char *msg = NULL; -- if (kwarg_unpacking) { -- msg = "positional argument follows keyword argument unpacking"; -- } else { -- msg = "positional argument follows keyword argument"; -- } -- -- return RAISE_SYNTAX_ERROR(msg); --} -- --void * --_PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, asdl_comprehension_seq *comprehensions) --{ -- /* The rule that calls this function is 'args for_if_clauses'. -- For the input f(L, x for x in y), L and x are in args and -- the for is parsed as a for_if_clause. We have to check if -- len <= 1, so that input like dict((a, b) for a, b in x) -- gets successfully parsed and then we pass the last -- argument (x in the above example) as the location of the -- error */ -- Py_ssize_t len = asdl_seq_LEN(args->v.Call.args); -- if (len <= 1) { -- return NULL; -- } -- -- comprehension_ty last_comprehension = PyPegen_last_item(comprehensions, comprehension_ty); -- -- return RAISE_SYNTAX_ERROR_KNOWN_RANGE( -- (expr_ty) asdl_seq_GET(args->v.Call.args, len - 1), -- _PyPegen_get_last_comprehension_item(last_comprehension), -- "Generator expression must be parenthesized" -- ); -+ int kwarg_unpacking = 0; -+ for (Py_ssize_t i = 0, l = asdl_seq_LEN(e->v.Call.keywords); i < l; i++) { -+ keyword_ty keyword = asdl_seq_GET(e->v.Call.keywords, i); -+ if (!keyword->arg) { -+ kwarg_unpacking = 1; -+ } -+ } -+ -+ const char *msg = NULL; -+ if (kwarg_unpacking) { -+ msg = "positional argument follows keyword argument unpacking"; -+ } else { -+ msg = "positional argument follows keyword argument"; -+ } -+ -+ return RAISE_SYNTAX_ERROR(msg); -+} -+ -+void *_PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, -+ asdl_comprehension_seq *comprehensions) { -+ /* The rule that calls this function is 'args for_if_clauses'. -+ For the input f(L, x for x in y), L and x are in args and -+ the for is parsed as a for_if_clause. We have to check if -+ len <= 1, so that input like dict((a, b) for a, b in x) -+ gets successfully parsed and then we pass the last -+ argument (x in the above example) as the location of the -+ error */ -+ Py_ssize_t len = asdl_seq_LEN(args->v.Call.args); -+ if (len <= 1) { -+ return NULL; -+ } -+ -+ comprehension_ty last_comprehension = -+ PyPegen_last_item(comprehensions, comprehension_ty); -+ -+ return RAISE_SYNTAX_ERROR_KNOWN_RANGE( -+ (expr_ty)asdl_seq_GET(args->v.Call.args, len - 1), -+ _PyPegen_get_last_comprehension_item(last_comprehension), -+ "Generator expression must be parenthesized"); - } - - // Fstring stuff - --static expr_ty --_PyPegen_decode_fstring_part(Parser* p, int is_raw, expr_ty constant, Token* token) { -- assert(PyUnicode_CheckExact(constant->v.Constant.value)); -- -- const char* bstr = PyUnicode_AsUTF8(constant->v.Constant.value); -- if (bstr == NULL) { -- return NULL; -- } -- -- size_t len; -- if (strcmp(bstr, "{{") == 0 || strcmp(bstr, "}}") == 0) { -- len = 1; -- } else { -- len = strlen(bstr); -- } -- -- is_raw = is_raw || strchr(bstr, '\\') == NULL; -- PyObject *str = _PyPegen_decode_string(p, is_raw, bstr, len, token); -- if (str == NULL) { -- _Pypegen_raise_decode_error(p); -- return NULL; -- } -- if (_PyArena_AddPyObject(p->arena, str) < 0) { -- Py_DECREF(str); -- return NULL; -- } -- return _PyAST_Constant(str, NULL, constant->lineno, constant->col_offset, -- constant->end_lineno, constant->end_col_offset, -- p->arena); -+static expr_ty _PyPegen_decode_fstring_part(Parser *p, int is_raw, -+ expr_ty constant, Token *token) { -+ assert(PyUnicode_CheckExact(constant->v.Constant.value)); -+ -+ const char *bstr = PyUnicode_AsUTF8(constant->v.Constant.value); -+ if (bstr == NULL) { -+ return NULL; -+ } -+ -+ size_t len; -+ if (strcmp(bstr, "{{") == 0 || strcmp(bstr, "}}") == 0) { -+ len = 1; -+ } else { -+ len = strlen(bstr); -+ } -+ -+ is_raw = is_raw || strchr(bstr, '\\') == NULL; -+ PyObject *str = _PyPegen_decode_string(p, is_raw, bstr, len, token); -+ if (str == NULL) { -+ _Pypegen_raise_decode_error(p); -+ return NULL; -+ } -+ if (_PyArena_AddPyObject(p->arena, str) < 0) { -+ Py_DECREF(str); -+ return NULL; -+ } -+ return _PyAST_Constant(str, NULL, constant->lineno, constant->col_offset, -+ constant->end_lineno, constant->end_col_offset, -+ p->arena); - } - - static asdl_expr_seq * --unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions) --{ -- /* The parser might put multiple f-string values into an individual -- * JoinedStr node at the top level due to stuff like f-string debugging -- * expressions. This function flattens those and promotes them to the -- * upper level. Only simplifies AST, but the compiler already takes care -- * of the regular output, so this is not necessary if you are not going -- * to expose the output AST to Python level. */ -- -- Py_ssize_t i, req_size, raw_size; -- -- req_size = raw_size = asdl_seq_LEN(raw_expressions); -- expr_ty expr; -- for (i = 0; i < raw_size; i++) { -- expr = asdl_seq_GET(raw_expressions, i); -- if (expr->kind == JoinedStr_kind) { -- req_size += asdl_seq_LEN(expr->v.JoinedStr.values) - 1; -- } -- } -- -- asdl_expr_seq *expressions = _Py_asdl_expr_seq_new(req_size, p->arena); -- -- Py_ssize_t raw_index, req_index = 0; -- for (raw_index = 0; raw_index < raw_size; raw_index++) { -- expr = asdl_seq_GET(raw_expressions, raw_index); -- if (expr->kind == JoinedStr_kind) { -- asdl_expr_seq *values = expr->v.JoinedStr.values; -- for (Py_ssize_t n = 0; n < asdl_seq_LEN(values); n++) { -- asdl_seq_SET(expressions, req_index, asdl_seq_GET(values, n)); -- req_index++; -- } -- } else { -- asdl_seq_SET(expressions, req_index, expr); -- req_index++; -- } -- } -- return expressions; --} -- --expr_ty --_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b) { -- asdl_expr_seq *expr = unpack_top_level_joined_strs(p, raw_expressions); -- Py_ssize_t n_items = asdl_seq_LEN(expr); -- -- const char* quote_str = PyBytes_AsString(a->bytes); -- if (quote_str == NULL) { -- return NULL; -- } -- int is_raw = strpbrk(quote_str, "rR") != NULL; -- -- asdl_expr_seq *seq = _Py_asdl_expr_seq_new(n_items, p->arena); -- if (seq == NULL) { -- return NULL; -- } -- -- Py_ssize_t index = 0; -- for (Py_ssize_t i = 0; i < n_items; i++) { -- expr_ty item = asdl_seq_GET(expr, i); -- if (item->kind == Constant_kind) { -- item = _PyPegen_decode_fstring_part(p, is_raw, item, b); -- if (item == NULL) { -- return NULL; -- } -- -- /* Tokenizer emits string parts even when the underlying string -- might become an empty value (e.g. FSTRING_MIDDLE with the value \\n) -- so we need to check for them and simplify it here. */ -- if (PyUnicode_CheckExact(item->v.Constant.value) -- && PyUnicode_GET_LENGTH(item->v.Constant.value) == 0) { -- continue; -- } -- } -- asdl_seq_SET(seq, index++, item); -- } -- -- asdl_expr_seq *resized_exprs; -- if (index != n_items) { -- resized_exprs = _Py_asdl_expr_seq_new(index, p->arena); -- if (resized_exprs == NULL) { -- return NULL; -- } -- for (Py_ssize_t i = 0; i < index; i++) { -- asdl_seq_SET(resized_exprs, i, asdl_seq_GET(seq, i)); -- } -- } -- else { -- resized_exprs = seq; -- } -- -- return _PyAST_JoinedStr(resized_exprs, a->lineno, a->col_offset, -- b->end_lineno, b->end_col_offset, -- p->arena); --} -- --expr_ty _PyPegen_decoded_constant_from_token(Parser* p, Token* tok) { -- Py_ssize_t bsize; -- char* bstr; -- if (PyBytes_AsStringAndSize(tok->bytes, &bstr, &bsize) == -1) { -- return NULL; -- } -- PyObject* str = _PyPegen_decode_string(p, 0, bstr, bsize, tok); -- if (str == NULL) { -- return NULL; -- } -- if (_PyArena_AddPyObject(p->arena, str) < 0) { -- Py_DECREF(str); -- return NULL; -- } -- return _PyAST_Constant(str, NULL, tok->lineno, tok->col_offset, -- tok->end_lineno, tok->end_col_offset, -- p->arena); --} -- --expr_ty _PyPegen_constant_from_token(Parser* p, Token* tok) { -- char* bstr = PyBytes_AsString(tok->bytes); -- if (bstr == NULL) { -- return NULL; -- } -- PyObject* str = PyUnicode_FromString(bstr); -- if (str == NULL) { -- return NULL; -- } -- if (_PyArena_AddPyObject(p->arena, str) < 0) { -- Py_DECREF(str); -- return NULL; -- } -- return _PyAST_Constant(str, NULL, tok->lineno, tok->col_offset, -- tok->end_lineno, tok->end_col_offset, -- p->arena); --} -- --expr_ty _PyPegen_constant_from_string(Parser* p, Token* tok) { -- char* the_str = PyBytes_AsString(tok->bytes); -- if (the_str == NULL) { -- return NULL; -- } -- PyObject *s = _PyPegen_parse_string(p, tok); -- if (s == NULL) { -- _Pypegen_raise_decode_error(p); -- return NULL; -- } -- if (_PyArena_AddPyObject(p->arena, s) < 0) { -- Py_DECREF(s); -- return NULL; -- } -- PyObject *kind = NULL; -- if (the_str && the_str[0] == 'u') { -- kind = _PyPegen_new_identifier(p, "u"); -- if (kind == NULL) { -- return NULL; -- } -- } -- return _PyAST_Constant(s, kind, tok->lineno, tok->col_offset, tok->end_lineno, tok->end_col_offset, p->arena); --} -- --expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, ResultTokenWithMetadata *conversion, -- ResultTokenWithMetadata *format, Token *closing_brace, int lineno, int col_offset, -- int end_lineno, int end_col_offset, PyArena *arena) { -- int conversion_val = -1; -- if (conversion != NULL) { -- expr_ty conversion_expr = (expr_ty) conversion->result; -- assert(conversion_expr->kind == Name_kind); -- Py_UCS4 first = PyUnicode_READ_CHAR(conversion_expr->v.Name.id, 0); -- -- if (PyUnicode_GET_LENGTH(conversion_expr->v.Name.id) > 1 || -- !(first == 's' || first == 'r' || first == 'a')) { -- RAISE_SYNTAX_ERROR_KNOWN_LOCATION(conversion_expr, -- "f-string: invalid conversion character %R: expected 's', 'r', or 'a'", -- conversion_expr->v.Name.id); -- return NULL; -- } -- -- conversion_val = Py_SAFE_DOWNCAST(first, Py_UCS4, int); -- } -- else if (debug && !format) { -- /* If no conversion is specified, use !r for debug expressions */ -- conversion_val = (int)'r'; -- } -- -- expr_ty formatted_value = _PyAST_FormattedValue( -- expression, conversion_val, format ? (expr_ty) format->result : NULL, -- lineno, col_offset, end_lineno, -- end_col_offset, arena -- ); -- -- if (debug) { -- /* Find the non whitespace token after the "=" */ -- int debug_end_line, debug_end_offset; -- PyObject *debug_metadata; -- -- if (conversion) { -- debug_end_line = ((expr_ty) conversion->result)->lineno; -- debug_end_offset = ((expr_ty) conversion->result)->col_offset; -- debug_metadata = conversion->metadata; -- } -- else if (format) { -- debug_end_line = ((expr_ty) format->result)->lineno; -- debug_end_offset = ((expr_ty) format->result)->col_offset + 1; -- debug_metadata = format->metadata; -- } -- else { -- debug_end_line = end_lineno; -- debug_end_offset = end_col_offset; -- debug_metadata = closing_brace->metadata; -- } -- -- expr_ty debug_text = _PyAST_Constant(debug_metadata, NULL, lineno, col_offset + 1, debug_end_line, -- debug_end_offset - 1, p->arena); -- if (!debug_text) { -- return NULL; -- } -- -- asdl_expr_seq *values = _Py_asdl_expr_seq_new(2, arena); -- asdl_seq_SET(values, 0, debug_text); -- asdl_seq_SET(values, 1, formatted_value); -- return _PyAST_JoinedStr(values, lineno, col_offset, debug_end_line, debug_end_offset, p->arena); -- } -- else { -- return formatted_value; -- } --} -- --expr_ty --_PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *strings, -- int lineno, int col_offset, int end_lineno, -- int end_col_offset, PyArena *arena) --{ -- Py_ssize_t len = asdl_seq_LEN(strings); -- assert(len > 0); -- -- int f_string_found = 0; -- int unicode_string_found = 0; -- int bytes_found = 0; -- -- Py_ssize_t i = 0; -- Py_ssize_t n_flattened_elements = 0; -+unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions) { -+ /* The parser might put multiple f-string values into an individual -+ * JoinedStr node at the top level due to stuff like f-string debugging -+ * expressions. This function flattens those and promotes them to the -+ * upper level. Only simplifies AST, but the compiler already takes care -+ * of the regular output, so this is not necessary if you are not going -+ * to expose the output AST to Python level. */ -+ -+ Py_ssize_t i, req_size, raw_size; -+ -+ req_size = raw_size = asdl_seq_LEN(raw_expressions); -+ expr_ty expr; -+ for (i = 0; i < raw_size; i++) { -+ expr = asdl_seq_GET(raw_expressions, i); -+ if (expr->kind == JoinedStr_kind) { -+ req_size += asdl_seq_LEN(expr->v.JoinedStr.values) - 1; -+ } -+ } -+ -+ asdl_expr_seq *expressions = _Py_asdl_expr_seq_new(req_size, p->arena); -+ -+ Py_ssize_t raw_index, req_index = 0; -+ for (raw_index = 0; raw_index < raw_size; raw_index++) { -+ expr = asdl_seq_GET(raw_expressions, raw_index); -+ if (expr->kind == JoinedStr_kind) { -+ asdl_expr_seq *values = expr->v.JoinedStr.values; -+ for (Py_ssize_t n = 0; n < asdl_seq_LEN(values); n++) { -+ asdl_seq_SET(expressions, req_index, asdl_seq_GET(values, n)); -+ req_index++; -+ } -+ } else { -+ asdl_seq_SET(expressions, req_index, expr); -+ req_index++; -+ } -+ } -+ return expressions; -+} -+ -+expr_ty _PyPegen_joined_str(Parser *p, Token *a, asdl_expr_seq *raw_expressions, -+ Token *b) { -+ -+ asdl_expr_seq *expr = unpack_top_level_joined_strs(p, raw_expressions); -+ Py_ssize_t n_items = asdl_seq_LEN(expr); -+ -+ const char *quote_str = PyBytes_AsString(a->bytes); -+ if (quote_str == NULL) { -+ return NULL; -+ } -+ int is_raw = strpbrk(quote_str, "rR") != NULL; -+ -+ asdl_expr_seq *seq = _Py_asdl_expr_seq_new(n_items, p->arena); -+ if (seq == NULL) { -+ return NULL; -+ } -+ -+ Py_ssize_t index = 0; -+ for (Py_ssize_t i = 0; i < n_items; i++) { -+ expr_ty item = asdl_seq_GET(expr, i); -+ if (item->kind == Constant_kind) { -+ item = _PyPegen_decode_fstring_part(p, is_raw, item, b); -+ if (item == NULL) { -+ return NULL; -+ } -+ -+ /* Tokenizer emits string parts even when the underlying string -+ might become an empty value (e.g. FSTRING_MIDDLE with the value \\n) -+ so we need to check for them and simplify it here. */ -+ if (PyUnicode_CheckExact(item->v.Constant.value) && -+ PyUnicode_GET_LENGTH(item->v.Constant.value) == 0) { -+ continue; -+ } -+ } -+ asdl_seq_SET(seq, index++, item); -+ } -+ -+ asdl_expr_seq *resized_exprs; -+ if (index != n_items) { -+ resized_exprs = _Py_asdl_expr_seq_new(index, p->arena); -+ if (resized_exprs == NULL) { -+ return NULL; -+ } -+ for (Py_ssize_t i = 0; i < index; i++) { -+ asdl_seq_SET(resized_exprs, i, asdl_seq_GET(seq, i)); -+ } -+ } else { -+ resized_exprs = seq; -+ } -+ -+ return _PyAST_JoinedStr(resized_exprs, a->lineno, a->col_offset, -+ b->end_lineno, b->end_col_offset, p->arena); -+} -+ -+expr_ty _PyPegen_decoded_constant_from_token(Parser *p, Token *tok) { -+ Py_ssize_t bsize; -+ char *bstr; -+ if (PyBytes_AsStringAndSize(tok->bytes, &bstr, &bsize) == -1) { -+ return NULL; -+ } -+ PyObject *str = _PyPegen_decode_string(p, 0, bstr, bsize, tok); -+ if (str == NULL) { -+ return NULL; -+ } -+ if (_PyArena_AddPyObject(p->arena, str) < 0) { -+ Py_DECREF(str); -+ return NULL; -+ } -+ return _PyAST_Constant(str, NULL, tok->lineno, tok->col_offset, -+ tok->end_lineno, tok->end_col_offset, p->arena); -+} -+ -+expr_ty _PyPegen_constant_from_token(Parser *p, Token *tok) { -+ char *bstr = PyBytes_AsString(tok->bytes); -+ if (bstr == NULL) { -+ return NULL; -+ } -+ PyObject *str = PyUnicode_FromString(bstr); -+ if (str == NULL) { -+ return NULL; -+ } -+ if (_PyArena_AddPyObject(p->arena, str) < 0) { -+ Py_DECREF(str); -+ return NULL; -+ } -+ return _PyAST_Constant(str, NULL, tok->lineno, tok->col_offset, -+ tok->end_lineno, tok->end_col_offset, p->arena); -+} -+ -+expr_ty _PyPegen_constant_from_string(Parser *p, Token *tok) { -+ char *the_str = PyBytes_AsString(tok->bytes); -+ if (the_str == NULL) { -+ return NULL; -+ } -+ PyObject *s = _PyPegen_parse_string(p, tok); -+ if (s == NULL) { -+ _Pypegen_raise_decode_error(p); -+ return NULL; -+ } -+ if (_PyArena_AddPyObject(p->arena, s) < 0) { -+ Py_DECREF(s); -+ return NULL; -+ } -+ PyObject *kind = NULL; -+ if (the_str && the_str[0] == 'u') { -+ kind = _PyPegen_new_identifier(p, "u"); -+ if (kind == NULL) { -+ return NULL; -+ } -+ } -+ return _PyAST_Constant(s, kind, tok->lineno, tok->col_offset, tok->end_lineno, -+ tok->end_col_offset, p->arena); -+} -+ -+expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, -+ ResultTokenWithMetadata *conversion, -+ ResultTokenWithMetadata *format, -+ Token *closing_brace, int lineno, -+ int col_offset, int end_lineno, -+ int end_col_offset, PyArena *arena) { -+ int conversion_val = -1; -+ if (conversion != NULL) { -+ expr_ty conversion_expr = (expr_ty)conversion->result; -+ assert(conversion_expr->kind == Name_kind); -+ Py_UCS4 first = PyUnicode_READ_CHAR(conversion_expr->v.Name.id, 0); -+ -+ if (PyUnicode_GET_LENGTH(conversion_expr->v.Name.id) > 1 || -+ !(first == 's' || first == 'r' || first == 'a')) { -+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION( -+ conversion_expr, -+ "f-string: invalid conversion character %R: expected 's', 'r', or " -+ "'a'", -+ conversion_expr->v.Name.id); -+ return NULL; -+ } -+ -+ conversion_val = Py_SAFE_DOWNCAST(first, Py_UCS4, int); -+ } else if (debug && !format) { -+ /* If no conversion is specified, use !r for debug expressions */ -+ conversion_val = (int)'r'; -+ } -+ -+ expr_ty formatted_value = _PyAST_FormattedValue( -+ expression, conversion_val, format ? (expr_ty)format->result : NULL, -+ lineno, col_offset, end_lineno, end_col_offset, arena); -+ -+ if (debug) { -+ /* Find the non whitespace token after the "=" */ -+ int debug_end_line, debug_end_offset; -+ PyObject *debug_metadata; -+ -+ if (conversion) { -+ debug_end_line = ((expr_ty)conversion->result)->lineno; -+ debug_end_offset = ((expr_ty)conversion->result)->col_offset; -+ debug_metadata = conversion->metadata; -+ } else if (format) { -+ debug_end_line = ((expr_ty)format->result)->lineno; -+ debug_end_offset = ((expr_ty)format->result)->col_offset + 1; -+ debug_metadata = format->metadata; -+ } else { -+ debug_end_line = end_lineno; -+ debug_end_offset = end_col_offset; -+ debug_metadata = closing_brace->metadata; -+ } -+ expr_ty debug_text = -+ _PyAST_Constant(debug_metadata, NULL, lineno, col_offset + 1, -+ debug_end_line, debug_end_offset - 1, p->arena); -+ if (!debug_text) { -+ return NULL; -+ } -+ -+ asdl_expr_seq *values = _Py_asdl_expr_seq_new(2, arena); -+ asdl_seq_SET(values, 0, debug_text); -+ asdl_seq_SET(values, 1, formatted_value); -+ return _PyAST_JoinedStr(values, lineno, col_offset, debug_end_line, -+ debug_end_offset, p->arena); -+ } else { -+ return formatted_value; -+ } -+} -+ -+expr_ty _PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *strings, -+ int lineno, int col_offset, int end_lineno, -+ int end_col_offset, PyArena *arena) { -+ Py_ssize_t len = asdl_seq_LEN(strings); -+ assert(len > 0); -+ -+ int f_string_found = 0; -+ int unicode_string_found = 0; -+ int bytes_found = 0; -+ -+ Py_ssize_t i = 0; -+ Py_ssize_t n_flattened_elements = 0; -+ for (i = 0; i < len; i++) { -+ expr_ty elem = asdl_seq_GET(strings, i); -+ switch (elem->kind) { -+ case Constant_kind: -+ if (PyBytes_CheckExact(elem->v.Constant.value)) { -+ bytes_found = 1; -+ } else { -+ unicode_string_found = 1; -+ } -+ n_flattened_elements++; -+ break; -+ case JoinedStr_kind: -+ n_flattened_elements += asdl_seq_LEN(elem->v.JoinedStr.values); -+ f_string_found = 1; -+ break; -+ default: -+ n_flattened_elements++; -+ f_string_found = 1; -+ break; -+ } -+ } -+ -+ if ((unicode_string_found || f_string_found) && bytes_found) { -+ RAISE_SYNTAX_ERROR("cannot mix bytes and nonbytes literals"); -+ return NULL; -+ } -+ -+ if (bytes_found) { -+ PyObject *res = PyBytes_FromString(""); -+ -+ /* Bytes literals never get a kind, but just for consistency -+ since they are represented as Constant nodes, we'll mirror -+ the same behavior as unicode strings for determining the -+ kind. */ -+ PyObject *kind = asdl_seq_GET(strings, 0)->v.Constant.kind; - for (i = 0; i < len; i++) { -- expr_ty elem = asdl_seq_GET(strings, i); -- if (elem->kind == Constant_kind) { -- if (PyBytes_CheckExact(elem->v.Constant.value)) { -- bytes_found = 1; -- } else { -- unicode_string_found = 1; -+ expr_ty elem = asdl_seq_GET(strings, i); -+ PyBytes_Concat(&res, elem->v.Constant.value); -+ } -+ if (!res || _PyArena_AddPyObject(arena, res) < 0) { -+ Py_XDECREF(res); -+ return NULL; -+ } -+ return _PyAST_Constant(res, kind, lineno, col_offset, end_lineno, -+ end_col_offset, p->arena); -+ } -+ -+ if (!f_string_found && len == 1) { -+ return asdl_seq_GET(strings, 0); -+ } -+ -+ asdl_expr_seq *flattened = -+ _Py_asdl_expr_seq_new(n_flattened_elements, p->arena); -+ if (flattened == NULL) { -+ return NULL; -+ } -+ -+ /* build flattened list */ -+ Py_ssize_t current_pos = 0; -+ Py_ssize_t j = 0; -+ for (i = 0; i < len; i++) { -+ expr_ty elem = asdl_seq_GET(strings, i); -+ switch (elem->kind) { -+ case JoinedStr_kind: -+ for (j = 0; j < asdl_seq_LEN(elem->v.JoinedStr.values); j++) { -+ expr_ty subvalue = asdl_seq_GET(elem->v.JoinedStr.values, j); -+ if (subvalue == NULL) { -+ return NULL; -+ } -+ asdl_seq_SET(flattened, current_pos++, subvalue); -+ } -+ break; -+ default: -+ asdl_seq_SET(flattened, current_pos++, elem); -+ break; -+ } -+ } -+ -+ /* calculate folded element count */ -+ Py_ssize_t n_elements = 0; -+ int prev_is_constant = 0; -+ for (i = 0; i < n_flattened_elements; i++) { -+ expr_ty elem = asdl_seq_GET(flattened, i); -+ -+ /* The concatenation of a FormattedValue and an empty Contant should -+ lead to the FormattedValue itself. Thus, we will not take any empty -+ constants into account, just as in `_PyPegen_joined_str` */ -+ if (f_string_found && elem->kind == Constant_kind && -+ PyUnicode_CheckExact(elem->v.Constant.value) && -+ PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0) -+ continue; -+ -+ if (!prev_is_constant || elem->kind != Constant_kind) { -+ n_elements++; -+ } -+ prev_is_constant = elem->kind == Constant_kind; -+ } -+ -+ asdl_expr_seq *values = _Py_asdl_expr_seq_new(n_elements, p->arena); -+ if (values == NULL) { -+ return NULL; -+ } -+ -+ /* build folded list */ -+ _PyUnicodeWriter writer; -+ current_pos = 0; -+ for (i = 0; i < n_flattened_elements; i++) { -+ expr_ty elem = asdl_seq_GET(flattened, i); -+ -+ /* if the current elem and the following are constants, -+ fold them and all consequent constants */ -+ if (elem->kind == Constant_kind) { -+ if (i + 1 < n_flattened_elements && -+ asdl_seq_GET(flattened, i + 1)->kind == Constant_kind) { -+ expr_ty first_elem = elem; -+ -+ /* When a string is getting concatenated, the kind of the string -+ is determined by the first string in the concatenation -+ sequence. -+ -+ u"abc" "def" -> u"abcdef" -+ "abc" u"abc" -> "abcabc" */ -+ PyObject *kind = elem->v.Constant.kind; -+ -+ _PyUnicodeWriter_Init(&writer); -+ expr_ty last_elem = elem; -+ for (j = i; j < n_flattened_elements; j++) { -+ expr_ty current_elem = asdl_seq_GET(flattened, j); -+ if (current_elem->kind == Constant_kind) { -+ if (_PyUnicodeWriter_WriteStr(&writer, -+ current_elem->v.Constant.value)) { -+ _PyUnicodeWriter_Dealloc(&writer); -+ return NULL; - } -- n_flattened_elements++; -- } else { -- n_flattened_elements += asdl_seq_LEN(elem->v.JoinedStr.values); -- f_string_found = 1; -+ last_elem = current_elem; -+ } else { -+ break; -+ } - } -- } -- -- if ((unicode_string_found || f_string_found) && bytes_found) { -- RAISE_SYNTAX_ERROR("cannot mix bytes and nonbytes literals"); -- return NULL; -- } -+ i = j - 1; - -- if (bytes_found) { -- PyObject* res = PyBytes_FromString(""); -- -- /* Bytes literals never get a kind, but just for consistency -- since they are represented as Constant nodes, we'll mirror -- the same behavior as unicode strings for determining the -- kind. */ -- PyObject* kind = asdl_seq_GET(strings, 0)->v.Constant.kind; -- for (i = 0; i < len; i++) { -- expr_ty elem = asdl_seq_GET(strings, i); -- PyBytes_Concat(&res, elem->v.Constant.value); -+ PyObject *concat_str = _PyUnicodeWriter_Finish(&writer); -+ if (concat_str == NULL) { -+ _PyUnicodeWriter_Dealloc(&writer); -+ return NULL; - } -- if (!res || _PyArena_AddPyObject(arena, res) < 0) { -- Py_XDECREF(res); -- return NULL; -+ if (_PyArena_AddPyObject(p->arena, concat_str) < 0) { -+ Py_DECREF(concat_str); -+ return NULL; - } -- return _PyAST_Constant(res, kind, lineno, col_offset, end_lineno, end_col_offset, p->arena); -- } -- -- if (!f_string_found && len == 1) { -- return asdl_seq_GET(strings, 0); -- } -- -- asdl_expr_seq* flattened = _Py_asdl_expr_seq_new(n_flattened_elements, p->arena); -- if (flattened == NULL) { -- return NULL; -- } -- -- /* build flattened list */ -- Py_ssize_t current_pos = 0; -- Py_ssize_t j = 0; -- for (i = 0; i < len; i++) { -- expr_ty elem = asdl_seq_GET(strings, i); -- if (elem->kind == Constant_kind) { -- asdl_seq_SET(flattened, current_pos++, elem); -- } else { -- for (j = 0; j < asdl_seq_LEN(elem->v.JoinedStr.values); j++) { -- expr_ty subvalue = asdl_seq_GET(elem->v.JoinedStr.values, j); -- if (subvalue == NULL) { -- return NULL; -- } -- asdl_seq_SET(flattened, current_pos++, subvalue); -- } -+ elem = _PyAST_Constant(concat_str, kind, first_elem->lineno, -+ first_elem->col_offset, last_elem->end_lineno, -+ last_elem->end_col_offset, p->arena); -+ if (elem == NULL) { -+ return NULL; - } -- } -- -- /* calculate folded element count */ -- Py_ssize_t n_elements = 0; -- int prev_is_constant = 0; -- for (i = 0; i < n_flattened_elements; i++) { -- expr_ty elem = asdl_seq_GET(flattened, i); -- -- /* The concatenation of a FormattedValue and an empty Contant should -- lead to the FormattedValue itself. Thus, we will not take any empty -- constants into account, just as in `_PyPegen_joined_str` */ -- if (f_string_found && elem->kind == Constant_kind && -- PyUnicode_CheckExact(elem->v.Constant.value) && -- PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0) -- continue; -- -- if (!prev_is_constant || elem->kind != Constant_kind) { -- n_elements++; -- } -- prev_is_constant = elem->kind == Constant_kind; -- } -+ } - -- asdl_expr_seq* values = _Py_asdl_expr_seq_new(n_elements, p->arena); -- if (values == NULL) { -- return NULL; -+ /* Drop all empty contanst strings */ -+ if (f_string_found && PyUnicode_CheckExact(elem->v.Constant.value) && -+ PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0) { -+ continue; -+ } - } - -- /* build folded list */ -- _PyUnicodeWriter writer; -- current_pos = 0; -- for (i = 0; i < n_flattened_elements; i++) { -- expr_ty elem = asdl_seq_GET(flattened, i); -- -- /* if the current elem and the following are constants, -- fold them and all consequent constants */ -- if (elem->kind == Constant_kind) { -- if (i + 1 < n_flattened_elements && -- asdl_seq_GET(flattened, i + 1)->kind == Constant_kind) { -- expr_ty first_elem = elem; -- -- /* When a string is getting concatenated, the kind of the string -- is determined by the first string in the concatenation -- sequence. -- -- u"abc" "def" -> u"abcdef" -- "abc" u"abc" -> "abcabc" */ -- PyObject *kind = elem->v.Constant.kind; -- -- _PyUnicodeWriter_Init(&writer); -- expr_ty last_elem = elem; -- for (j = i; j < n_flattened_elements; j++) { -- expr_ty current_elem = asdl_seq_GET(flattened, j); -- if (current_elem->kind == Constant_kind) { -- if (_PyUnicodeWriter_WriteStr( -- &writer, current_elem->v.Constant.value)) { -- _PyUnicodeWriter_Dealloc(&writer); -- return NULL; -- } -- last_elem = current_elem; -- } else { -- break; -- } -- } -- i = j - 1; -- -- PyObject *concat_str = _PyUnicodeWriter_Finish(&writer); -- if (concat_str == NULL) { -- _PyUnicodeWriter_Dealloc(&writer); -- return NULL; -- } -- if (_PyArena_AddPyObject(p->arena, concat_str) < 0) { -- Py_DECREF(concat_str); -- return NULL; -- } -- elem = _PyAST_Constant(concat_str, kind, first_elem->lineno, -- first_elem->col_offset, -- last_elem->end_lineno, -- last_elem->end_col_offset, p->arena); -- if (elem == NULL) { -- return NULL; -- } -- } -- -- /* Drop all empty contanst strings */ -- if (f_string_found && -- PyUnicode_CheckExact(elem->v.Constant.value) && -- PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0) { -- continue; -- } -- } -- -- asdl_seq_SET(values, current_pos++, elem); -- } -+ asdl_seq_SET(values, current_pos++, elem); -+ } - -- if (!f_string_found) { -- assert(n_elements == 1); -- expr_ty elem = asdl_seq_GET(values, 0); -- assert(elem->kind == Constant_kind); -- return elem; -- } -+ if (!f_string_found) { -+ assert(n_elements == 1); -+ expr_ty elem = asdl_seq_GET(values, 0); -+ assert(elem->kind == Constant_kind); -+ return elem; -+ } - -- assert(current_pos == n_elements); -- return _PyAST_JoinedStr(values, lineno, col_offset, end_lineno, end_col_offset, p->arena); -+ assert(current_pos == n_elements); -+ return _PyAST_JoinedStr(values, lineno, col_offset, end_lineno, -+ end_col_offset, p->arena); - } -diff --git a/Parser/myreadline.c b/Parser/myreadline.c -index 7074aba74b7..2890ff83f3f 100644 ---- a/Parser/myreadline.c -+++ b/Parser/myreadline.c -@@ -386,9 +386,14 @@ - } - } - -- _PyOS_ReadlineTState = tstate; - Py_BEGIN_ALLOW_THREADS -+ -+ // GH-123321: We need to acquire the lock before setting -+ // _PyOS_ReadlineTState and after the release of the GIL, otherwise -+ // the variable may be nullified by a different thread or a deadlock -+ // may occur if the GIL is taken in any sub-function. - PyThread_acquire_lock(_PyOS_ReadlineLock, 1); -+ _PyOS_ReadlineTState = tstate; - - /* This is needed to handle the unlikely case that the - * interpreter is in interactive mode *and* stdin/out are not -@@ -412,11 +417,13 @@ - else { - rv = (*PyOS_ReadlineFunctionPointer)(sys_stdin, sys_stdout, prompt); - } -- Py_END_ALLOW_THREADS - -+ // gh-123321: Must set the variable and then release the lock before -+ // taking the GIL. Otherwise a deadlock or segfault may occur. -+ _PyOS_ReadlineTState = NULL; - PyThread_release_lock(_PyOS_ReadlineLock); - -- _PyOS_ReadlineTState = NULL; -+ Py_END_ALLOW_THREADS - - if (rv == NULL) - return NULL; -diff --git a/Parser/pegen.c b/Parser/pegen.c -index 5460fbb2ffe..8c36c6779b9 100644 ---- a/Parser/pegen.c -+++ b/Parser/pegen.c -@@ -394,7 +394,7 @@ - - for (Memo *m = t->memo; m != NULL; m = m->next) { - if (m->type == type) { --#if defined(PY_DEBUG) -+#if defined(Py_DEBUG) - if (0 <= type && type < NSTATISTICS) { - long count = m->mark - p->mark; - // A memoized negative result counts for one. -diff --git a/Parser/string_parser.c b/Parser/string_parser.c -index 65c320c2173..164f715e153 100644 ---- a/Parser/string_parser.c -+++ b/Parser/string_parser.c -@@ -226,9 +226,14 @@ - PyErr_BadInternalCall(); - return NULL; - } -+ - /* Skip the leading quote char. */ - s++; - len = strlen(s); -+ // gh-120155: 's' contains at least the trailing quote, -+ // so the code '--len' below is safe. -+ assert(len >= 1); -+ - if (len > INT_MAX) { - PyErr_SetString(PyExc_OverflowError, "string to parse is too long"); - return NULL; -diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c -index 04ba04428fe..9e0dee8cc38 100644 ---- a/Parser/tokenizer.c -+++ b/Parser/tokenizer.c -@@ -3,53 +3,48 @@ - - #define PY_SSIZE_T_CLEAN - #include "Python.h" --#include "pycore_call.h" // _PyObject_CallNoArgs() -+#include "pycore_call.h" // _PyObject_CallNoArgs() - --#include - #include -+#include - --#include "tokenizer.h" - #include "errcode.h" -+#include "tokenizer.h" - - /* Alternate tab spacing */ - #define ALTTABSIZE 1 - --#define is_potential_identifier_start(c) (\ -- (c >= 'a' && c <= 'z')\ -- || (c >= 'A' && c <= 'Z')\ -- || c == '_'\ -- || (c >= 128)) -- --#define is_potential_identifier_char(c) (\ -- (c >= 'a' && c <= 'z')\ -- || (c >= 'A' && c <= 'Z')\ -- || (c >= '0' && c <= '9')\ -- || c == '_'\ -- || (c >= 128)) -+#define is_potential_identifier_start(c) \ -+ ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || (c >= 128)) - -+#define is_potential_identifier_char(c) \ -+ ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || \ -+ (c >= '0' && c <= '9') || c == '_' || (c >= 128)) - - /* Don't ever change this -- it would break the portability of Python code */ - #define TABSIZE 8 - --#define MAKE_TOKEN(token_type) token_setup(tok, token, token_type, p_start, p_end) --#define MAKE_TYPE_COMMENT_TOKEN(token_type, col_offset, end_col_offset) (\ -- type_comment_token_setup(tok, token, token_type, col_offset, end_col_offset, p_start, p_end)) --#define ADVANCE_LINENO() \ -- tok->lineno++; \ -- tok->col_offset = 0; -+#define MAKE_TOKEN(token_type) \ -+ token_setup(tok, token, token_type, p_start, p_end) -+#define MAKE_TYPE_COMMENT_TOKEN(token_type, col_offset, end_col_offset) \ -+ (type_comment_token_setup(tok, token, token_type, col_offset, \ -+ end_col_offset, p_start, p_end)) -+#define ADVANCE_LINENO() \ -+ tok->lineno++; \ -+ tok->col_offset = 0; - - #define INSIDE_FSTRING(tok) (tok->tok_mode_stack_index > 0) - #define INSIDE_FSTRING_EXPR(tok) (tok->curly_bracket_expr_start_depth >= 0) - #ifdef Py_DEBUG --static inline tokenizer_mode* TOK_GET_MODE(struct tok_state* tok) { -- assert(tok->tok_mode_stack_index >= 0); -- assert(tok->tok_mode_stack_index < MAXFSTRINGLEVEL); -- return &(tok->tok_mode_stack[tok->tok_mode_stack_index]); -+static inline tokenizer_mode *TOK_GET_MODE(struct tok_state *tok) { -+ assert(tok->tok_mode_stack_index >= 0); -+ assert(tok->tok_mode_stack_index < MAXFSTRINGLEVEL); -+ return &(tok->tok_mode_stack[tok->tok_mode_stack_index]); - } --static inline tokenizer_mode* TOK_NEXT_MODE(struct tok_state* tok) { -- assert(tok->tok_mode_stack_index >= 0); -- assert(tok->tok_mode_stack_index + 1 < MAXFSTRINGLEVEL); -- return &(tok->tok_mode_stack[++tok->tok_mode_stack_index]); -+static inline tokenizer_mode *TOK_NEXT_MODE(struct tok_state *tok) { -+ assert(tok->tok_mode_stack_index >= 0); -+ assert(tok->tok_mode_stack_index + 1 < MAXFSTRINGLEVEL); -+ return &(tok->tok_mode_stack[++tok->tok_mode_stack_index]); - } - #else - #define TOK_GET_MODE(tok) (&(tok->tok_mode_stack[tok->tok_mode_stack_index])) -@@ -64,171 +59,164 @@ - - /* Spaces in this constant are treated as "zero or more spaces or tabs" when - tokenizing. */ --static const char* type_comment_prefix = "# type: "; -+static const char *type_comment_prefix = "# type: "; - - /* Create and initialize a new tok_state structure */ - --static struct tok_state * --tok_new(void) --{ -- struct tok_state *tok = (struct tok_state *)PyMem_Malloc( -- sizeof(struct tok_state)); -- if (tok == NULL) -- return NULL; -- tok->buf = tok->cur = tok->inp = NULL; -- tok->fp_interactive = 0; -- tok->interactive_src_start = NULL; -- tok->interactive_src_end = NULL; -- tok->start = NULL; -- tok->end = NULL; -- tok->done = E_OK; -- tok->fp = NULL; -- tok->input = NULL; -- tok->tabsize = TABSIZE; -- tok->indent = 0; -- tok->indstack[0] = 0; -- tok->atbol = 1; -- tok->pendin = 0; -- tok->prompt = tok->nextprompt = NULL; -- tok->lineno = 0; -- tok->starting_col_offset = -1; -- tok->col_offset = -1; -- tok->level = 0; -- tok->altindstack[0] = 0; -- tok->decoding_state = STATE_INIT; -- tok->decoding_erred = 0; -- tok->enc = NULL; -- tok->encoding = NULL; -- tok->cont_line = 0; -- tok->filename = NULL; -- tok->decoding_readline = NULL; -- tok->decoding_buffer = NULL; -- tok->readline = NULL; -- tok->type_comments = 0; -- tok->async_hacks = 0; -- tok->async_def = 0; -- tok->async_def_indent = 0; -- tok->async_def_nl = 0; -- tok->interactive_underflow = IUNDERFLOW_NORMAL; -- tok->str = NULL; -- tok->report_warnings = 1; -- tok->tok_extra_tokens = 0; -- tok->comment_newline = 0; -- tok->implicit_newline = 0; -- tok->tok_mode_stack[0] = (tokenizer_mode){.kind =TOK_REGULAR_MODE, .f_string_quote='\0', .f_string_quote_size = 0, .f_string_debug=0}; -- tok->tok_mode_stack_index = 0; -+static struct tok_state *tok_new(void) { -+ struct tok_state *tok = -+ (struct tok_state *)PyMem_Calloc(1, sizeof(struct tok_state)); -+ if (tok == NULL) -+ return NULL; -+ tok->buf = tok->cur = tok->inp = NULL; -+ tok->fp_interactive = 0; -+ tok->interactive_src_start = NULL; -+ tok->interactive_src_end = NULL; -+ tok->start = NULL; -+ tok->end = NULL; -+ tok->done = E_OK; -+ tok->fp = NULL; -+ tok->input = NULL; -+ tok->tabsize = TABSIZE; -+ tok->indent = 0; -+ tok->indstack[0] = 0; -+ tok->atbol = 1; -+ tok->pendin = 0; -+ tok->prompt = tok->nextprompt = NULL; -+ tok->lineno = 0; -+ tok->starting_col_offset = -1; -+ tok->col_offset = -1; -+ tok->level = 0; -+ tok->altindstack[0] = 0; -+ tok->decoding_state = STATE_INIT; -+ tok->decoding_erred = 0; -+ tok->enc = NULL; -+ tok->encoding = NULL; -+ tok->cont_line = 0; -+ tok->filename = NULL; -+ tok->decoding_readline = NULL; -+ tok->decoding_buffer = NULL; -+ tok->readline = NULL; -+ tok->type_comments = 0; -+ tok->async_hacks = 0; -+ tok->async_def = 0; -+ tok->async_def_indent = 0; -+ tok->async_def_nl = 0; -+ tok->interactive_underflow = IUNDERFLOW_NORMAL; -+ tok->str = NULL; -+ tok->report_warnings = 1; -+ tok->tok_extra_tokens = 0; -+ tok->comment_newline = 0; -+ tok->implicit_newline = 0; -+ tok->tok_mode_stack[0] = (tokenizer_mode){.kind = TOK_REGULAR_MODE, -+ .f_string_quote = '\0', -+ .f_string_quote_size = 0, -+ .f_string_debug = 0}; -+ tok->tok_mode_stack_index = 0; - #ifdef Py_DEBUG -- tok->debug = _Py_GetConfig()->parser_debug; -+ tok->debug = _Py_GetConfig()->parser_debug; - #endif -- return tok; -+ return tok; - } - --static char * --new_string(const char *s, Py_ssize_t len, struct tok_state *tok) --{ -- char* result = (char *)PyMem_Malloc(len + 1); -- if (!result) { -- tok->done = E_NOMEM; -- return NULL; -- } -- memcpy(result, s, len); -- result[len] = '\0'; -- return result; -+static char *new_string(const char *s, Py_ssize_t len, struct tok_state *tok) { -+ char *result = (char *)PyMem_Malloc(len + 1); -+ if (!result) { -+ tok->done = E_NOMEM; -+ return NULL; -+ } -+ memcpy(result, s, len); -+ result[len] = '\0'; -+ return result; - } - --static char * --error_ret(struct tok_state *tok) /* XXX */ -+static char *error_ret(struct tok_state *tok) /* XXX */ - { -- tok->decoding_erred = 1; -- if ((tok->fp != NULL || tok->readline != NULL) && tok->buf != NULL) {/* see _PyTokenizer_Free */ -- PyMem_Free(tok->buf); -- } -- tok->buf = tok->cur = tok->inp = NULL; -- tok->start = NULL; -- tok->end = NULL; -- tok->done = E_DECODE; -- return NULL; /* as if it were EOF */ --} -- -- --static const char * --get_normal_name(const char *s) /* for utf-8 and latin-1 */ -+ tok->decoding_erred = 1; -+ if ((tok->fp != NULL || tok->readline != NULL) && -+ tok->buf != NULL) { /* see _PyTokenizer_Free */ -+ PyMem_Free(tok->buf); -+ } -+ tok->buf = tok->cur = tok->inp = NULL; -+ tok->start = NULL; -+ tok->end = NULL; -+ tok->done = E_DECODE; -+ return NULL; /* as if it were EOF */ -+} -+ -+static const char *get_normal_name(const char *s) /* for utf-8 and latin-1 */ - { -- char buf[13]; -- int i; -- for (i = 0; i < 12; i++) { -- int c = s[i]; -- if (c == '\0') -- break; -- else if (c == '_') -- buf[i] = '-'; -- else -- buf[i] = tolower(c); -- } -- buf[i] = '\0'; -- if (strcmp(buf, "utf-8") == 0 || -- strncmp(buf, "utf-8-", 6) == 0) -- return "utf-8"; -- else if (strcmp(buf, "latin-1") == 0 || -- strcmp(buf, "iso-8859-1") == 0 || -- strcmp(buf, "iso-latin-1") == 0 || -- strncmp(buf, "latin-1-", 8) == 0 || -- strncmp(buf, "iso-8859-1-", 11) == 0 || -- strncmp(buf, "iso-latin-1-", 12) == 0) -- return "iso-8859-1"; -+ char buf[13]; -+ int i; -+ for (i = 0; i < 12; i++) { -+ int c = s[i]; -+ if (c == '\0') -+ break; -+ else if (c == '_') -+ buf[i] = '-'; - else -- return s; -+ buf[i] = tolower(c); -+ } -+ buf[i] = '\0'; -+ if (strcmp(buf, "utf-8") == 0 || strncmp(buf, "utf-8-", 6) == 0) -+ return "utf-8"; -+ else if (strcmp(buf, "latin-1") == 0 || strcmp(buf, "iso-8859-1") == 0 || -+ strcmp(buf, "iso-latin-1") == 0 || -+ strncmp(buf, "latin-1-", 8) == 0 || -+ strncmp(buf, "iso-8859-1-", 11) == 0 || -+ strncmp(buf, "iso-latin-1-", 12) == 0) -+ return "iso-8859-1"; -+ else -+ return s; - } - - /* Return the coding spec in S, or NULL if none is found. */ - --static int --get_coding_spec(const char *s, char **spec, Py_ssize_t size, struct tok_state *tok) --{ -- Py_ssize_t i; -- *spec = NULL; -- /* Coding spec must be in a comment, and that comment must be -- * the only statement on the source code line. */ -- for (i = 0; i < size - 6; i++) { -- if (s[i] == '#') -- break; -- if (s[i] != ' ' && s[i] != '\t' && s[i] != '\014') -- return 1; -- } -- for (; i < size - 6; i++) { /* XXX inefficient search */ -- const char* t = s + i; -- if (memcmp(t, "coding", 6) == 0) { -- const char* begin = NULL; -- t += 6; -- if (t[0] != ':' && t[0] != '=') -- continue; -- do { -- t++; -- } while (t[0] == ' ' || t[0] == '\t'); -- -- begin = t; -- while (Py_ISALNUM(t[0]) || -- t[0] == '-' || t[0] == '_' || t[0] == '.') -- t++; -- -- if (begin < t) { -- char* r = new_string(begin, t - begin, tok); -- const char* q; -- if (!r) -- return 0; -- q = get_normal_name(r); -- if (r != q) { -- PyMem_Free(r); -- r = new_string(q, strlen(q), tok); -- if (!r) -- return 0; -- } -- *spec = r; -- break; -- } -+static int get_coding_spec(const char *s, char **spec, Py_ssize_t size, -+ struct tok_state *tok) { -+ Py_ssize_t i; -+ *spec = NULL; -+ /* Coding spec must be in a comment, and that comment must be -+ * the only statement on the source code line. */ -+ for (i = 0; i < size - 6; i++) { -+ if (s[i] == '#') -+ break; -+ if (s[i] != ' ' && s[i] != '\t' && s[i] != '\014') -+ return 1; -+ } -+ for (; i < size - 6; i++) { /* XXX inefficient search */ -+ const char *t = s + i; -+ if (memcmp(t, "coding", 6) == 0) { -+ const char *begin = NULL; -+ t += 6; -+ if (t[0] != ':' && t[0] != '=') -+ continue; -+ do { -+ t++; -+ } while (t[0] == ' ' || t[0] == '\t'); -+ -+ begin = t; -+ while (Py_ISALNUM(t[0]) || t[0] == '-' || t[0] == '_' || t[0] == '.') -+ t++; -+ -+ if (begin < t) { -+ char *r = new_string(begin, t - begin, tok); -+ const char *q; -+ if (!r) -+ return 0; -+ q = get_normal_name(r); -+ if (r != q) { -+ PyMem_Free(r); -+ r = new_string(q, strlen(q), tok); -+ if (!r) -+ return 0; - } -+ *spec = r; -+ break; -+ } - } -- return 1; -+ } -+ return 1; - } - - /* Check whether the line contains a coding spec. If it does, -@@ -237,299 +225,288 @@ - Return 1 on success, 0 on failure. */ - - static int --check_coding_spec(const char* line, Py_ssize_t size, struct tok_state *tok, -- int set_readline(struct tok_state *, const char *)) --{ -- char *cs; -- if (tok->cont_line) { -- /* It's a continuation line, so it can't be a coding spec. */ -- tok->decoding_state = STATE_NORMAL; -- return 1; -- } -- if (!get_coding_spec(line, &cs, size, tok)) { -- return 0; -- } -- if (!cs) { -- Py_ssize_t i; -- for (i = 0; i < size; i++) { -- if (line[i] == '#' || line[i] == '\n' || line[i] == '\r') -- break; -- if (line[i] != ' ' && line[i] != '\t' && line[i] != '\014') { -- /* Stop checking coding spec after a line containing -- * anything except a comment. */ -- tok->decoding_state = STATE_NORMAL; -- break; -- } -- } -- return 1; -- } -+check_coding_spec(const char *line, Py_ssize_t size, struct tok_state *tok, -+ int set_readline(struct tok_state *, const char *)) { -+ char *cs; -+ if (tok->cont_line) { -+ /* It's a continuation line, so it can't be a coding spec. */ - tok->decoding_state = STATE_NORMAL; -- if (tok->encoding == NULL) { -- assert(tok->decoding_readline == NULL); -- if (strcmp(cs, "utf-8") != 0 && !set_readline(tok, cs)) { -- error_ret(tok); -- PyErr_Format(PyExc_SyntaxError, "encoding problem: %s", cs); -- PyMem_Free(cs); -- return 0; -- } -- tok->encoding = cs; -- } else { /* then, compare cs with BOM */ -- if (strcmp(tok->encoding, cs) != 0) { -- error_ret(tok); -- PyErr_Format(PyExc_SyntaxError, -- "encoding problem: %s with BOM", cs); -- PyMem_Free(cs); -- return 0; -- } -- PyMem_Free(cs); -+ return 1; -+ } -+ if (!get_coding_spec(line, &cs, size, tok)) { -+ return 0; -+ } -+ if (!cs) { -+ Py_ssize_t i; -+ for (i = 0; i < size; i++) { -+ if (line[i] == '#' || line[i] == '\n' || line[i] == '\r') -+ break; -+ if (line[i] != ' ' && line[i] != '\t' && line[i] != '\014') { -+ /* Stop checking coding spec after a line containing -+ * anything except a comment. */ -+ tok->decoding_state = STATE_NORMAL; -+ break; -+ } - } - return 1; -+ } -+ tok->decoding_state = STATE_NORMAL; -+ if (tok->encoding == NULL) { -+ assert(tok->decoding_readline == NULL); -+ if (strcmp(cs, "utf-8") != 0 && !set_readline(tok, cs)) { -+ error_ret(tok); -+ PyErr_Format(PyExc_SyntaxError, "encoding problem: %s", cs); -+ PyMem_Free(cs); -+ return 0; -+ } -+ tok->encoding = cs; -+ } else { /* then, compare cs with BOM */ -+ if (strcmp(tok->encoding, cs) != 0) { -+ error_ret(tok); -+ PyErr_Format(PyExc_SyntaxError, "encoding problem: %s with BOM", cs); -+ PyMem_Free(cs); -+ return 0; -+ } -+ PyMem_Free(cs); -+ } -+ return 1; - } - - /* See whether the file starts with a BOM. If it does, - invoke the set_readline function with the new encoding. - Return 1 on success, 0 on failure. */ - --static int --check_bom(int get_char(struct tok_state *), -- void unget_char(int, struct tok_state *), -- int set_readline(struct tok_state *, const char *), -- struct tok_state *tok) --{ -- int ch1, ch2, ch3; -- ch1 = get_char(tok); -- tok->decoding_state = STATE_SEEK_CODING; -- if (ch1 == EOF) { -- return 1; -- } else if (ch1 == 0xEF) { -- ch2 = get_char(tok); -- if (ch2 != 0xBB) { -- unget_char(ch2, tok); -- unget_char(ch1, tok); -- return 1; -- } -- ch3 = get_char(tok); -- if (ch3 != 0xBF) { -- unget_char(ch3, tok); -- unget_char(ch2, tok); -- unget_char(ch1, tok); -- return 1; -- } -- } else { -- unget_char(ch1, tok); -- return 1; -- } -- if (tok->encoding != NULL) -- PyMem_Free(tok->encoding); -- tok->encoding = new_string("utf-8", 5, tok); -- if (!tok->encoding) -- return 0; -- /* No need to set_readline: input is already utf-8 */ -+static int check_bom(int get_char(struct tok_state *), -+ void unget_char(int, struct tok_state *), -+ int set_readline(struct tok_state *, const char *), -+ struct tok_state *tok) { -+ int ch1, ch2, ch3; -+ ch1 = get_char(tok); -+ tok->decoding_state = STATE_SEEK_CODING; -+ if (ch1 == EOF) { -+ return 1; -+ } else if (ch1 == 0xEF) { -+ ch2 = get_char(tok); -+ if (ch2 != 0xBB) { -+ unget_char(ch2, tok); -+ unget_char(ch1, tok); -+ return 1; -+ } -+ ch3 = get_char(tok); -+ if (ch3 != 0xBF) { -+ unget_char(ch3, tok); -+ unget_char(ch2, tok); -+ unget_char(ch1, tok); -+ return 1; -+ } -+ } else { -+ unget_char(ch1, tok); - return 1; -+ } -+ if (tok->encoding != NULL) -+ PyMem_Free(tok->encoding); -+ tok->encoding = new_string("utf-8", 5, tok); -+ if (!tok->encoding) -+ return 0; -+ /* No need to set_readline: input is already utf-8 */ -+ return 1; - } - --static int --tok_concatenate_interactive_new_line(struct tok_state *tok, const char *line) { -- assert(tok->fp_interactive); -- -- if (!line) { -- return 0; -- } -- -- Py_ssize_t current_size = tok->interactive_src_end - tok->interactive_src_start; -- Py_ssize_t line_size = strlen(line); -- char last_char = line[line_size > 0 ? line_size - 1 : line_size]; -- if (last_char != '\n') { -- line_size += 1; -- } -- char* new_str = tok->interactive_src_start; -+static int tok_concatenate_interactive_new_line(struct tok_state *tok, -+ const char *line) { -+ assert(tok->fp_interactive); - -- new_str = PyMem_Realloc(new_str, current_size + line_size + 1); -- if (!new_str) { -- if (tok->interactive_src_start) { -- PyMem_Free(tok->interactive_src_start); -- } -- tok->interactive_src_start = NULL; -- tok->interactive_src_end = NULL; -- tok->done = E_NOMEM; -- return -1; -- } -- strcpy(new_str + current_size, line); -- tok->implicit_newline = 0; -- if (last_char != '\n') { -- /* Last line does not end in \n, fake one */ -- new_str[current_size + line_size - 1] = '\n'; -- new_str[current_size + line_size] = '\0'; -- tok->implicit_newline = 1; -- } -- tok->interactive_src_start = new_str; -- tok->interactive_src_end = new_str + current_size + line_size; -+ if (!line) { - return 0; -+ } -+ -+ Py_ssize_t current_size = -+ tok->interactive_src_end - tok->interactive_src_start; -+ Py_ssize_t line_size = strlen(line); -+ char last_char = line[line_size > 0 ? line_size - 1 : line_size]; -+ if (last_char != '\n') { -+ line_size += 1; -+ } -+ char *new_str = tok->interactive_src_start; -+ -+ new_str = PyMem_Realloc(new_str, current_size + line_size + 1); -+ if (!new_str) { -+ if (tok->interactive_src_start) { -+ PyMem_Free(tok->interactive_src_start); -+ } -+ tok->interactive_src_start = NULL; -+ tok->interactive_src_end = NULL; -+ tok->done = E_NOMEM; -+ return -1; -+ } -+ strcpy(new_str + current_size, line); -+ tok->implicit_newline = 0; -+ if (last_char != '\n') { -+ /* Last line does not end in \n, fake one */ -+ new_str[current_size + line_size - 1] = '\n'; -+ new_str[current_size + line_size] = '\0'; -+ tok->implicit_newline = 1; -+ } -+ tok->interactive_src_start = new_str; -+ tok->interactive_src_end = new_str + current_size + line_size; -+ return 0; - } - - /* Traverse and remember all f-string buffers, in order to be able to restore - them after reallocating tok->buf */ --static void --remember_fstring_buffers(struct tok_state *tok) --{ -- int index; -- tokenizer_mode *mode; -+static void remember_fstring_buffers(struct tok_state *tok) { -+ int index; -+ tokenizer_mode *mode; - -- for (index = tok->tok_mode_stack_index; index >= 0; --index) { -- mode = &(tok->tok_mode_stack[index]); -- mode->f_string_start_offset = mode->f_string_start - tok->buf; -- mode->f_string_multi_line_start_offset = mode->f_string_multi_line_start - tok->buf; -- } -+ for (index = tok->tok_mode_stack_index; index >= 0; --index) { -+ mode = &(tok->tok_mode_stack[index]); -+ mode->f_string_start_offset = mode->f_string_start - tok->buf; -+ mode->f_string_multi_line_start_offset = -+ mode->f_string_multi_line_start - tok->buf; -+ } - } - - /* Traverse and restore all f-string buffers after reallocating tok->buf */ --static void --restore_fstring_buffers(struct tok_state *tok) --{ -- int index; -- tokenizer_mode *mode; -+static void restore_fstring_buffers(struct tok_state *tok) { -+ int index; -+ tokenizer_mode *mode; - -- for (index = tok->tok_mode_stack_index; index >= 0; --index) { -- mode = &(tok->tok_mode_stack[index]); -- mode->f_string_start = tok->buf + mode->f_string_start_offset; -- mode->f_string_multi_line_start = tok->buf + mode->f_string_multi_line_start_offset; -- } -+ for (index = tok->tok_mode_stack_index; index >= 0; --index) { -+ mode = &(tok->tok_mode_stack[index]); -+ mode->f_string_start = tok->buf + mode->f_string_start_offset; -+ mode->f_string_multi_line_start = -+ tok->buf + mode->f_string_multi_line_start_offset; -+ } - } - --static int --set_fstring_expr(struct tok_state* tok, struct token *token, char c) { -- assert(token != NULL); -- assert(c == '}' || c == ':' || c == '!'); -- tokenizer_mode *tok_mode = TOK_GET_MODE(tok); -+static int set_fstring_expr(struct tok_state *tok, struct token *token, -+ char c) { -+ assert(token != NULL); -+ assert(c == '}' || c == ':' || c == '!'); -+ tokenizer_mode *tok_mode = TOK_GET_MODE(tok); - -- if (!tok_mode->f_string_debug || token->metadata) { -- return 0; -- } -+ if (!tok_mode->f_string_debug || token->metadata) { -+ return 0; -+ } - -- PyObject *res = NULL; -+ PyObject *res = NULL; - -- // Check if there is a # character in the expression -- int hash_detected = 0; -- for (Py_ssize_t i = 0; i < tok_mode->last_expr_size - tok_mode->last_expr_end; i++) { -- if (tok_mode->last_expr_buffer[i] == '#') { -- hash_detected = 1; -- break; -- } -+ // Check if there is a # character in the expression -+ int hash_detected = 0; -+ for (Py_ssize_t i = 0; i < tok_mode->last_expr_size - tok_mode->last_expr_end; -+ i++) { -+ if (tok_mode->last_expr_buffer[i] == '#') { -+ hash_detected = 1; -+ break; - } -+ } - -- if (hash_detected) { -- Py_ssize_t input_length = tok_mode->last_expr_size - tok_mode->last_expr_end; -- char *result = (char *)PyObject_Malloc((input_length + 1) * sizeof(char)); -- if (!result) { -- return -1; -- } -- -- Py_ssize_t i = 0; -- Py_ssize_t j = 0; -- -- for (i = 0, j = 0; i < input_length; i++) { -- if (tok_mode->last_expr_buffer[i] == '#') { -- // Skip characters until newline or end of string -- while (tok_mode->last_expr_buffer[i] != '\0' && i < input_length) { -- if (tok_mode->last_expr_buffer[i] == '\n') { -- result[j++] = tok_mode->last_expr_buffer[i]; -- break; -- } -- i++; -- } -- } else { -- result[j++] = tok_mode->last_expr_buffer[i]; -- } -- } -- -- result[j] = '\0'; // Null-terminate the result string -- res = PyUnicode_DecodeUTF8(result, j, NULL); -- PyObject_Free(result); -- } else { -- res = PyUnicode_DecodeUTF8( -- tok_mode->last_expr_buffer, -- tok_mode->last_expr_size - tok_mode->last_expr_end, -- NULL -- ); -- -+ if (hash_detected) { -+ Py_ssize_t input_length = -+ tok_mode->last_expr_size - tok_mode->last_expr_end; -+ char *result = (char *)PyObject_Malloc((input_length + 1) * sizeof(char)); -+ if (!result) { -+ return -1; - } - -+ Py_ssize_t i = 0; -+ Py_ssize_t j = 0; - -- if (!res) { -- return -1; -+ for (i = 0, j = 0; i < input_length; i++) { -+ if (tok_mode->last_expr_buffer[i] == '#') { -+ // Skip characters until newline or end of string -+ while (tok_mode->last_expr_buffer[i] != '\0' && i < input_length) { -+ if (tok_mode->last_expr_buffer[i] == '\n') { -+ result[j++] = tok_mode->last_expr_buffer[i]; -+ break; -+ } -+ i++; -+ } -+ } else { -+ result[j++] = tok_mode->last_expr_buffer[i]; -+ } - } -- token->metadata = res; -- return 0; --} - --static int --update_fstring_expr(struct tok_state *tok, char cur) --{ -- assert(tok->cur != NULL); -+ result[j] = '\0'; // Null-terminate the result string -+ res = PyUnicode_DecodeUTF8(result, j, NULL); -+ PyObject_Free(result); -+ } else { -+ res = PyUnicode_DecodeUTF8( -+ tok_mode->last_expr_buffer, -+ tok_mode->last_expr_size - tok_mode->last_expr_end, NULL); -+ } - -- Py_ssize_t size = strlen(tok->cur); -- tokenizer_mode *tok_mode = TOK_GET_MODE(tok); -- -- switch (cur) { -- case 0: -- if (!tok_mode->last_expr_buffer || tok_mode->last_expr_end >= 0) { -- return 1; -- } -- char *new_buffer = PyMem_Realloc( -- tok_mode->last_expr_buffer, -- tok_mode->last_expr_size + size -- ); -- if (new_buffer == NULL) { -- PyMem_Free(tok_mode->last_expr_buffer); -- goto error; -- } -- tok_mode->last_expr_buffer = new_buffer; -- strncpy(tok_mode->last_expr_buffer + tok_mode->last_expr_size, tok->cur, size); -- tok_mode->last_expr_size += size; -- break; -- case '{': -- if (tok_mode->last_expr_buffer != NULL) { -- PyMem_Free(tok_mode->last_expr_buffer); -- } -- tok_mode->last_expr_buffer = PyMem_Malloc(size); -- if (tok_mode->last_expr_buffer == NULL) { -- goto error; -- } -- tok_mode->last_expr_size = size; -- tok_mode->last_expr_end = -1; -- strncpy(tok_mode->last_expr_buffer, tok->cur, size); -- break; -- case '}': -- case '!': -- case ':': -- if (tok_mode->last_expr_end == -1) { -- tok_mode->last_expr_end = strlen(tok->start); -- } -- break; -- default: -- Py_UNREACHABLE(); -- } -- return 1; -+ if (!res) { -+ return -1; -+ } -+ token->metadata = res; -+ return 0; -+} -+ -+static int update_fstring_expr(struct tok_state *tok, char cur) { -+ assert(tok->cur != NULL); -+ -+ Py_ssize_t size = strlen(tok->cur); -+ tokenizer_mode *tok_mode = TOK_GET_MODE(tok); -+ -+ switch (cur) { -+ case 0: -+ if (!tok_mode->last_expr_buffer || tok_mode->last_expr_end >= 0) { -+ return 1; -+ } -+ char *new_buffer = PyMem_Realloc(tok_mode->last_expr_buffer, -+ tok_mode->last_expr_size + size); -+ if (new_buffer == NULL) { -+ PyMem_Free(tok_mode->last_expr_buffer); -+ goto error; -+ } -+ tok_mode->last_expr_buffer = new_buffer; -+ strncpy(tok_mode->last_expr_buffer + tok_mode->last_expr_size, tok->cur, -+ size); -+ tok_mode->last_expr_size += size; -+ break; -+ case '{': -+ if (tok_mode->last_expr_buffer != NULL) { -+ PyMem_Free(tok_mode->last_expr_buffer); -+ } -+ tok_mode->last_expr_buffer = PyMem_Malloc(size); -+ if (tok_mode->last_expr_buffer == NULL) { -+ goto error; -+ } -+ tok_mode->last_expr_size = size; -+ tok_mode->last_expr_end = -1; -+ strncpy(tok_mode->last_expr_buffer, tok->cur, size); -+ break; -+ case '}': -+ case '!': -+ case ':': -+ if (tok_mode->last_expr_end == -1) { -+ tok_mode->last_expr_end = strlen(tok->start); -+ } -+ break; -+ default: -+ Py_UNREACHABLE(); -+ } -+ return 1; - error: -- tok->done = E_NOMEM; -- return 0; -+ tok->done = E_NOMEM; -+ return 0; - } - --static void --free_fstring_expressions(struct tok_state *tok) --{ -- int index; -- tokenizer_mode *mode; -- -- for (index = tok->tok_mode_stack_index; index >= 0; --index) { -- mode = &(tok->tok_mode_stack[index]); -- if (mode->last_expr_buffer != NULL) { -- PyMem_Free(mode->last_expr_buffer); -- mode->last_expr_buffer = NULL; -- mode->last_expr_size = 0; -- mode->last_expr_end = -1; -- } -+static void free_fstring_expressions(struct tok_state *tok) { -+ int index; -+ tokenizer_mode *mode; -+ -+ for (index = tok->tok_mode_stack_index; index >= 0; --index) { -+ mode = &(tok->tok_mode_stack[index]); -+ if (mode->last_expr_buffer != NULL) { -+ PyMem_Free(mode->last_expr_buffer); -+ mode->last_expr_buffer = NULL; -+ mode->last_expr_size = 0; -+ mode->last_expr_end = -1; -+ mode->in_format_spec = 0; - } -+ } - } - - /* Read a line of text from TOK into S, using the stream in TOK. -@@ -539,88 +516,85 @@ - 1) NULL: need to call tok->decoding_readline to get a new line - 2) PyUnicodeObject *: decoding_feof has called tok->decoding_readline and - stored the result in tok->decoding_buffer -- 3) PyByteArrayObject *: previous call to tok_readline_recode did not have enough room -- (in the s buffer) to copy entire contents of the line read -- by tok->decoding_readline. tok->decoding_buffer has the overflow. -- In this case, tok_readline_recode is called in a loop (with an expanded buffer) -- until the buffer ends with a '\n' (or until the end of the file is -- reached): see tok_nextc and its calls to tok_reserve_buf. -+ 3) PyByteArrayObject *: previous call to tok_readline_recode did not have -+ enough room (in the s buffer) to copy entire contents of the line read by -+ tok->decoding_readline. tok->decoding_buffer has the overflow. In this case, -+ tok_readline_recode is called in a loop (with an expanded buffer) until the -+ buffer ends with a '\n' (or until the end of the file is reached): see -+ tok_nextc and its calls to tok_reserve_buf. - */ - --static int --tok_reserve_buf(struct tok_state *tok, Py_ssize_t size) --{ -- Py_ssize_t cur = tok->cur - tok->buf; -- Py_ssize_t oldsize = tok->inp - tok->buf; -- Py_ssize_t newsize = oldsize + Py_MAX(size, oldsize >> 1); -- if (newsize > tok->end - tok->buf) { -- char *newbuf = tok->buf; -- Py_ssize_t start = tok->start == NULL ? -1 : tok->start - tok->buf; -- Py_ssize_t line_start = tok->start == NULL ? -1 : tok->line_start - tok->buf; -- Py_ssize_t multi_line_start = tok->multi_line_start - tok->buf; -- remember_fstring_buffers(tok); -- newbuf = (char *)PyMem_Realloc(newbuf, newsize); -- if (newbuf == NULL) { -- tok->done = E_NOMEM; -- return 0; -- } -- tok->buf = newbuf; -- tok->cur = tok->buf + cur; -- tok->inp = tok->buf + oldsize; -- tok->end = tok->buf + newsize; -- tok->start = start < 0 ? NULL : tok->buf + start; -- tok->line_start = line_start < 0 ? NULL : tok->buf + line_start; -- tok->multi_line_start = multi_line_start < 0 ? NULL : tok->buf + multi_line_start; -- restore_fstring_buffers(tok); -- } -- return 1; --} -- --static inline int --contains_null_bytes(const char* str, size_t size) { -- return memchr(str, 0, size) != NULL; --} -- --static int --tok_readline_recode(struct tok_state *tok) { -- PyObject *line; -- const char *buf; -- Py_ssize_t buflen; -- line = tok->decoding_buffer; -+static int tok_reserve_buf(struct tok_state *tok, Py_ssize_t size) { -+ Py_ssize_t cur = tok->cur - tok->buf; -+ Py_ssize_t oldsize = tok->inp - tok->buf; -+ Py_ssize_t newsize = oldsize + Py_MAX(size, oldsize >> 1); -+ if (newsize > tok->end - tok->buf) { -+ char *newbuf = tok->buf; -+ Py_ssize_t start = tok->start == NULL ? -1 : tok->start - tok->buf; -+ Py_ssize_t line_start = -+ tok->start == NULL ? -1 : tok->line_start - tok->buf; -+ Py_ssize_t multi_line_start = tok->multi_line_start - tok->buf; -+ remember_fstring_buffers(tok); -+ newbuf = (char *)PyMem_Realloc(newbuf, newsize); -+ if (newbuf == NULL) { -+ tok->done = E_NOMEM; -+ return 0; -+ } -+ tok->buf = newbuf; -+ tok->cur = tok->buf + cur; -+ tok->inp = tok->buf + oldsize; -+ tok->end = tok->buf + newsize; -+ tok->start = start < 0 ? NULL : tok->buf + start; -+ tok->line_start = line_start < 0 ? NULL : tok->buf + line_start; -+ tok->multi_line_start = -+ multi_line_start < 0 ? NULL : tok->buf + multi_line_start; -+ restore_fstring_buffers(tok); -+ } -+ return 1; -+} -+ -+static inline int contains_null_bytes(const char *str, size_t size) { -+ return memchr(str, 0, size) != NULL; -+} -+ -+static int tok_readline_recode(struct tok_state *tok) { -+ PyObject *line; -+ const char *buf; -+ Py_ssize_t buflen; -+ line = tok->decoding_buffer; -+ if (line == NULL) { -+ line = PyObject_CallNoArgs(tok->decoding_readline); - if (line == NULL) { -- line = PyObject_CallNoArgs(tok->decoding_readline); -- if (line == NULL) { -- error_ret(tok); -- goto error; -- } -- } -- else { -- tok->decoding_buffer = NULL; -- } -- buf = PyUnicode_AsUTF8AndSize(line, &buflen); -- if (buf == NULL) { -- error_ret(tok); -- goto error; -- } -- // Make room for the null terminator *and* potentially -- // an extra newline character that we may need to artificially -- // add. -- size_t buffer_size = buflen + 2; -- if (!tok_reserve_buf(tok, buffer_size)) { -- goto error; -+ error_ret(tok); -+ goto error; - } -- memcpy(tok->inp, buf, buflen); -- tok->inp += buflen; -- *tok->inp = '\0'; -- if (tok->fp_interactive && -- tok_concatenate_interactive_new_line(tok, buf) == -1) { -- goto error; -- } -- Py_DECREF(line); -- return 1; -+ } else { -+ tok->decoding_buffer = NULL; -+ } -+ buf = PyUnicode_AsUTF8AndSize(line, &buflen); -+ if (buf == NULL) { -+ error_ret(tok); -+ goto error; -+ } -+ // Make room for the null terminator *and* potentially -+ // an extra newline character that we may need to artificially -+ // add. -+ size_t buffer_size = buflen + 2; -+ if (!tok_reserve_buf(tok, buffer_size)) { -+ goto error; -+ } -+ memcpy(tok->inp, buf, buflen); -+ tok->inp += buflen; -+ *tok->inp = '\0'; -+ if (tok->fp_interactive && -+ tok_concatenate_interactive_new_line(tok, buf) == -1) { -+ goto error; -+ } -+ Py_DECREF(line); -+ return 1; - error: -- Py_XDECREF(line); -- return 0; -+ Py_XDECREF(line); -+ return 0; - } - - /* Set the readline function for TOK to a StreamReader's -@@ -633,2383 +607,2293 @@ - - Return 1 on success, 0 on failure. */ - --static int --fp_setreadl(struct tok_state *tok, const char* enc) --{ -- PyObject *readline, *open, *stream; -- int fd; -- long pos; -- -- fd = fileno(tok->fp); -- /* Due to buffering the file offset for fd can be different from the file -- * position of tok->fp. If tok->fp was opened in text mode on Windows, -- * its file position counts CRLF as one char and can't be directly mapped -- * to the file offset for fd. Instead we step back one byte and read to -- * the end of line.*/ -- pos = ftell(tok->fp); -- if (pos == -1 || -- lseek(fd, (off_t)(pos > 0 ? pos - 1 : pos), SEEK_SET) == (off_t)-1) { -- PyErr_SetFromErrnoWithFilename(PyExc_OSError, NULL); -- return 0; -- } -+static int fp_setreadl(struct tok_state *tok, const char *enc) { -+ PyObject *readline, *open, *stream; -+ int fd; -+ long pos; -+ -+ fd = fileno(tok->fp); -+ /* Due to buffering the file offset for fd can be different from the file -+ * position of tok->fp. If tok->fp was opened in text mode on Windows, -+ * its file position counts CRLF as one char and can't be directly mapped -+ * to the file offset for fd. Instead we step back one byte and read to -+ * the end of line.*/ -+ pos = ftell(tok->fp); -+ if (pos == -1 || -+ lseek(fd, (off_t)(pos > 0 ? pos - 1 : pos), SEEK_SET) == (off_t)-1) { -+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, NULL); -+ return 0; -+ } - -- open = _PyImport_GetModuleAttrString("io", "open"); -- if (open == NULL) { -- return 0; -- } -- stream = PyObject_CallFunction(open, "isisOOO", -- fd, "r", -1, enc, Py_None, Py_None, Py_False); -- Py_DECREF(open); -- if (stream == NULL) { -- return 0; -- } -+ open = _PyImport_GetModuleAttrString("io", "open"); -+ if (open == NULL) { -+ return 0; -+ } -+ stream = PyObject_CallFunction(open, "isisOOO", fd, "r", -1, enc, Py_None, -+ Py_None, Py_False); -+ Py_DECREF(open); -+ if (stream == NULL) { -+ return 0; -+ } - -- readline = PyObject_GetAttr(stream, &_Py_ID(readline)); -- Py_DECREF(stream); -- if (readline == NULL) { -- return 0; -- } -- Py_XSETREF(tok->decoding_readline, readline); -+ readline = PyObject_GetAttr(stream, &_Py_ID(readline)); -+ Py_DECREF(stream); -+ if (readline == NULL) { -+ return 0; -+ } -+ Py_XSETREF(tok->decoding_readline, readline); - -- if (pos > 0) { -- PyObject *bufobj = _PyObject_CallNoArgs(readline); -- if (bufobj == NULL) { -- return 0; -- } -- Py_DECREF(bufobj); -+ if (pos > 0) { -+ PyObject *bufobj = _PyObject_CallNoArgs(readline); -+ if (bufobj == NULL) { -+ return 0; - } -+ Py_DECREF(bufobj); -+ } - -- return 1; -+ return 1; - } - - /* Fetch the next byte from TOK. */ - --static int fp_getc(struct tok_state *tok) { -- return getc(tok->fp); --} -+static int fp_getc(struct tok_state *tok) { return getc(tok->fp); } - - /* Unfetch the last byte back into TOK. */ - --static void fp_ungetc(int c, struct tok_state *tok) { -- ungetc(c, tok->fp); --} -+static void fp_ungetc(int c, struct tok_state *tok) { ungetc(c, tok->fp); } - - /* Check whether the characters at s start a valid - UTF-8 sequence. Return the number of characters forming - the sequence if yes, 0 if not. The special cases match - those in stringlib/codecs.h:utf8_decode. - */ --static int --valid_utf8(const unsigned char* s) --{ -- int expected = 0; -- int length; -- if (*s < 0x80) { -- /* single-byte code */ -- return 1; -- } -- else if (*s < 0xE0) { -- /* \xC2\x80-\xDF\xBF -- 0080-07FF */ -- if (*s < 0xC2) { -- /* invalid sequence -- \x80-\xBF -- continuation byte -- \xC0-\xC1 -- fake 0000-007F */ -- return 0; -- } -- expected = 1; -- } -- else if (*s < 0xF0) { -- /* \xE0\xA0\x80-\xEF\xBF\xBF -- 0800-FFFF */ -- if (*s == 0xE0 && *(s + 1) < 0xA0) { -- /* invalid sequence -- \xE0\x80\x80-\xE0\x9F\xBF -- fake 0000-0800 */ -- return 0; -- } -- else if (*s == 0xED && *(s + 1) >= 0xA0) { -- /* Decoding UTF-8 sequences in range \xED\xA0\x80-\xED\xBF\xBF -- will result in surrogates in range D800-DFFF. Surrogates are -- not valid UTF-8 so they are rejected. -- See https://www.unicode.org/versions/Unicode5.2.0/ch03.pdf -- (table 3-7) and http://www.rfc-editor.org/rfc/rfc3629.txt */ -- return 0; -- } -- expected = 2; -- } -- else if (*s < 0xF5) { -- /* \xF0\x90\x80\x80-\xF4\x8F\xBF\xBF -- 10000-10FFFF */ -- if (*(s + 1) < 0x90 ? *s == 0xF0 : *s == 0xF4) { -- /* invalid sequence -- one of: -- \xF0\x80\x80\x80-\xF0\x8F\xBF\xBF -- fake 0000-FFFF -- \xF4\x90\x80\x80- -- 110000- overflow */ -- return 0; -- } -- expected = 3; -- } -- else { -- /* invalid start byte */ -- return 0; -- } -- length = expected + 1; -- for (; expected; expected--) -- if (s[expected] < 0x80 || s[expected] >= 0xC0) -- return 0; -- return length; --} -- --static int --ensure_utf8(char *line, struct tok_state *tok) --{ -- int badchar = 0; -- unsigned char *c; -- int length; -- for (c = (unsigned char *)line; *c; c += length) { -- if (!(length = valid_utf8(c))) { -- badchar = *c; -- break; -- } -- } -- if (badchar) { -- PyErr_Format(PyExc_SyntaxError, -- "Non-UTF-8 code starting with '\\x%.2x' " -- "in file %U on line %i, " -- "but no encoding declared; " -- "see https://peps.python.org/pep-0263/ for details", -- badchar, tok->filename, tok->lineno); -- return 0; -- } -+static int valid_utf8(const unsigned char *s) { -+ int expected = 0; -+ int length; -+ if (*s < 0x80) { -+ /* single-byte code */ - return 1; -+ } else if (*s < 0xE0) { -+ /* \xC2\x80-\xDF\xBF -- 0080-07FF */ -+ if (*s < 0xC2) { -+ /* invalid sequence -+ \x80-\xBF -- continuation byte -+ \xC0-\xC1 -- fake 0000-007F */ -+ return 0; -+ } -+ expected = 1; -+ } else if (*s < 0xF0) { -+ /* \xE0\xA0\x80-\xEF\xBF\xBF -- 0800-FFFF */ -+ if (*s == 0xE0 && *(s + 1) < 0xA0) { -+ /* invalid sequence -+ \xE0\x80\x80-\xE0\x9F\xBF -- fake 0000-0800 */ -+ return 0; -+ } else if (*s == 0xED && *(s + 1) >= 0xA0) { -+ /* Decoding UTF-8 sequences in range \xED\xA0\x80-\xED\xBF\xBF -+ will result in surrogates in range D800-DFFF. Surrogates are -+ not valid UTF-8 so they are rejected. -+ See https://www.unicode.org/versions/Unicode5.2.0/ch03.pdf -+ (table 3-7) and http://www.rfc-editor.org/rfc/rfc3629.txt */ -+ return 0; -+ } -+ expected = 2; -+ } else if (*s < 0xF5) { -+ /* \xF0\x90\x80\x80-\xF4\x8F\xBF\xBF -- 10000-10FFFF */ -+ if (*(s + 1) < 0x90 ? *s == 0xF0 : *s == 0xF4) { -+ /* invalid sequence -- one of: -+ \xF0\x80\x80\x80-\xF0\x8F\xBF\xBF -- fake 0000-FFFF -+ \xF4\x90\x80\x80- -- 110000- overflow */ -+ return 0; -+ } -+ expected = 3; -+ } else { -+ /* invalid start byte */ -+ return 0; -+ } -+ length = expected + 1; -+ for (; expected; expected--) -+ if (s[expected] < 0x80 || s[expected] >= 0xC0) -+ return 0; -+ return length; -+} -+ -+static int ensure_utf8(char *line, struct tok_state *tok) { -+ int badchar = 0; -+ unsigned char *c; -+ int length; -+ for (c = (unsigned char *)line; *c; c += length) { -+ if (!(length = valid_utf8(c))) { -+ badchar = *c; -+ break; -+ } -+ } -+ if (badchar) { -+ PyErr_Format(PyExc_SyntaxError, -+ "Non-UTF-8 code starting with '\\x%.2x' " -+ "in file %U on line %i, " -+ "but no encoding declared; " -+ "see https://peps.python.org/pep-0263/ for details", -+ badchar, tok->filename, tok->lineno); -+ return 0; -+ } -+ return 1; - } - - /* Fetch a byte from TOK, using the string buffer. */ - --static int --buf_getc(struct tok_state *tok) { -- return Py_CHARMASK(*tok->str++); --} -+static int buf_getc(struct tok_state *tok) { return Py_CHARMASK(*tok->str++); } - - /* Unfetch a byte from TOK, using the string buffer. */ - --static void --buf_ungetc(int c, struct tok_state *tok) { -- tok->str--; -- assert(Py_CHARMASK(*tok->str) == c); /* tok->cur may point to read-only segment */ -+static void buf_ungetc(int c, struct tok_state *tok) { -+ tok->str--; -+ assert(Py_CHARMASK(*tok->str) == -+ c); /* tok->cur may point to read-only segment */ - } - - /* Set the readline function for TOK to ENC. For the string-based - tokenizer, this means to just record the encoding. */ - --static int --buf_setreadl(struct tok_state *tok, const char* enc) { -- tok->enc = enc; -- return 1; -+static int buf_setreadl(struct tok_state *tok, const char *enc) { -+ tok->enc = enc; -+ return 1; - } - - /* Return a UTF-8 encoding Python string object from the - C byte string STR, which is encoded with ENC. */ - --static PyObject * --translate_into_utf8(const char* str, const char* enc) { -- PyObject *utf8; -- PyObject* buf = PyUnicode_Decode(str, strlen(str), enc, NULL); -- if (buf == NULL) -- return NULL; -- utf8 = PyUnicode_AsUTF8String(buf); -- Py_DECREF(buf); -- return utf8; --} -- -- --static char * --translate_newlines(const char *s, int exec_input, int preserve_crlf, -- struct tok_state *tok) { -- int skip_next_lf = 0; -- size_t needed_length = strlen(s) + 2, final_length; -- char *buf, *current; -- char c = '\0'; -- buf = PyMem_Malloc(needed_length); -- if (buf == NULL) { -- tok->done = E_NOMEM; -- return NULL; -- } -- for (current = buf; *s; s++, current++) { -- c = *s; -- if (skip_next_lf) { -- skip_next_lf = 0; -- if (c == '\n') { -- c = *++s; -- if (!c) -- break; -- } -- } -- if (!preserve_crlf && c == '\r') { -- skip_next_lf = 1; -- c = '\n'; -- } -- *current = c; -- } -- /* If this is exec input, add a newline to the end of the string if -- there isn't one already. */ -- if (exec_input && c != '\n' && c != '\0') { -- *current = '\n'; -- current++; -- } -- *current = '\0'; -- final_length = current - buf + 1; -- if (final_length < needed_length && final_length) { -- /* should never fail */ -- char* result = PyMem_Realloc(buf, final_length); -- if (result == NULL) { -- PyMem_Free(buf); -- } -- buf = result; -- } -- return buf; -+static PyObject *translate_into_utf8(const char *str, const char *enc) { -+ PyObject *utf8; -+ PyObject *buf = PyUnicode_Decode(str, strlen(str), enc, NULL); -+ if (buf == NULL) -+ return NULL; -+ utf8 = PyUnicode_AsUTF8String(buf); -+ Py_DECREF(buf); -+ return utf8; -+} -+ -+static char *translate_newlines(const char *s, int exec_input, -+ int preserve_crlf, struct tok_state *tok) { -+ int skip_next_lf = 0; -+ size_t needed_length = strlen(s) + 2, final_length; -+ char *buf, *current; -+ char c = '\0'; -+ buf = PyMem_Malloc(needed_length); -+ if (buf == NULL) { -+ tok->done = E_NOMEM; -+ return NULL; -+ } -+ for (current = buf; *s; s++, current++) { -+ c = *s; -+ if (skip_next_lf) { -+ skip_next_lf = 0; -+ if (c == '\n') { -+ c = *++s; -+ if (!c) -+ break; -+ } -+ } -+ if (!preserve_crlf && c == '\r') { -+ skip_next_lf = 1; -+ c = '\n'; -+ } -+ *current = c; -+ } -+ /* If this is exec input, add a newline to the end of the string if -+ there isn't one already. */ -+ if (exec_input && c != '\n' && c != '\0') { -+ *current = '\n'; -+ current++; -+ } -+ *current = '\0'; -+ final_length = current - buf + 1; -+ if (final_length < needed_length && final_length) { -+ /* should never fail */ -+ char *result = PyMem_Realloc(buf, final_length); -+ if (result == NULL) { -+ PyMem_Free(buf); -+ } -+ buf = result; -+ } -+ return buf; - } - - /* Decode a byte string STR for use as the buffer of TOK. - Look for encoding declarations inside STR, and record them - inside TOK. */ - --static char * --decode_str(const char *input, int single, struct tok_state *tok, int preserve_crlf) --{ -- PyObject* utf8 = NULL; -- char *str; -- const char *s; -- const char *newl[2] = {NULL, NULL}; -- int lineno = 0; -- tok->input = str = translate_newlines(input, single, preserve_crlf, tok); -- if (str == NULL) -- return NULL; -- tok->enc = NULL; -- tok->str = str; -- if (!check_bom(buf_getc, buf_ungetc, buf_setreadl, tok)) -- return error_ret(tok); -- str = tok->str; /* string after BOM if any */ -- assert(str); -- if (tok->enc != NULL) { -- utf8 = translate_into_utf8(str, tok->enc); -- if (utf8 == NULL) -- return error_ret(tok); -- str = PyBytes_AsString(utf8); -- } -- for (s = str;; s++) { -- if (*s == '\0') break; -- else if (*s == '\n') { -- assert(lineno < 2); -- newl[lineno] = s; -- lineno++; -- if (lineno == 2) break; -- } -+static char *decode_str(const char *input, int single, struct tok_state *tok, -+ int preserve_crlf) { -+ PyObject *utf8 = NULL; -+ char *str; -+ const char *s; -+ const char *newl[2] = {NULL, NULL}; -+ int lineno = 0; -+ tok->input = str = translate_newlines(input, single, preserve_crlf, tok); -+ if (str == NULL) -+ return NULL; -+ tok->enc = NULL; -+ tok->str = str; -+ if (!check_bom(buf_getc, buf_ungetc, buf_setreadl, tok)) -+ return error_ret(tok); -+ str = tok->str; /* string after BOM if any */ -+ assert(str); -+ if (tok->enc != NULL) { -+ utf8 = translate_into_utf8(str, tok->enc); -+ if (utf8 == NULL) -+ return error_ret(tok); -+ str = PyBytes_AsString(utf8); -+ } -+ for (s = str;; s++) { -+ if (*s == '\0') -+ break; -+ else if (*s == '\n') { -+ assert(lineno < 2); -+ newl[lineno] = s; -+ lineno++; -+ if (lineno == 2) -+ break; - } -- tok->enc = NULL; -- /* need to check line 1 and 2 separately since check_coding_spec -- assumes a single line as input */ -- if (newl[0]) { -- if (!check_coding_spec(str, newl[0] - str, tok, buf_setreadl)) { -- return NULL; -- } -- if (tok->enc == NULL && tok->decoding_state != STATE_NORMAL && newl[1]) { -- if (!check_coding_spec(newl[0]+1, newl[1] - newl[0], -- tok, buf_setreadl)) -- return NULL; -- } -+ } -+ tok->enc = NULL; -+ /* need to check line 1 and 2 separately since check_coding_spec -+ assumes a single line as input */ -+ if (newl[0]) { -+ if (!check_coding_spec(str, newl[0] - str, tok, buf_setreadl)) { -+ return NULL; - } -- if (tok->enc != NULL) { -- assert(utf8 == NULL); -- utf8 = translate_into_utf8(str, tok->enc); -- if (utf8 == NULL) -- return error_ret(tok); -- str = PyBytes_AS_STRING(utf8); -+ if (tok->enc == NULL && tok->decoding_state != STATE_NORMAL && newl[1]) { -+ if (!check_coding_spec(newl[0] + 1, newl[1] - newl[0], tok, buf_setreadl)) -+ return NULL; - } -- assert(tok->decoding_buffer == NULL); -- tok->decoding_buffer = utf8; /* CAUTION */ -- return str; -+ } -+ if (tok->enc != NULL) { -+ assert(utf8 == NULL); -+ utf8 = translate_into_utf8(str, tok->enc); -+ if (utf8 == NULL) -+ return error_ret(tok); -+ str = PyBytes_AS_STRING(utf8); -+ } -+ assert(tok->decoding_buffer == NULL); -+ tok->decoding_buffer = utf8; /* CAUTION */ -+ return str; - } - - /* Set up tokenizer for string */ - --struct tok_state * --_PyTokenizer_FromString(const char *str, int exec_input, int preserve_crlf) --{ -- struct tok_state *tok = tok_new(); -- char *decoded; -+struct tok_state *_PyTokenizer_FromString(const char *str, int exec_input, -+ int preserve_crlf) { -+ struct tok_state *tok = tok_new(); -+ char *decoded; - -- if (tok == NULL) -- return NULL; -- decoded = decode_str(str, exec_input, tok, preserve_crlf); -- if (decoded == NULL) { -- _PyTokenizer_Free(tok); -- return NULL; -- } -+ if (tok == NULL) -+ return NULL; -+ decoded = decode_str(str, exec_input, tok, preserve_crlf); -+ if (decoded == NULL) { -+ _PyTokenizer_Free(tok); -+ return NULL; -+ } - -- tok->buf = tok->cur = tok->inp = decoded; -- tok->end = decoded; -- return tok; -+ tok->buf = tok->cur = tok->inp = decoded; -+ tok->end = decoded; -+ return tok; - } - --struct tok_state * --_PyTokenizer_FromReadline(PyObject* readline, const char* enc, -- int exec_input, int preserve_crlf) --{ -- struct tok_state *tok = tok_new(); -- if (tok == NULL) -- return NULL; -- if ((tok->buf = (char *)PyMem_Malloc(BUFSIZ)) == NULL) { -- _PyTokenizer_Free(tok); -- return NULL; -- } -- tok->cur = tok->inp = tok->buf; -- tok->end = tok->buf + BUFSIZ; -- tok->fp = NULL; -- if (enc != NULL) { -- tok->encoding = new_string(enc, strlen(enc), tok); -- if (!tok->encoding) { -- _PyTokenizer_Free(tok); -- return NULL; -- } -+struct tok_state *_PyTokenizer_FromReadline(PyObject *readline, const char *enc, -+ int exec_input, int preserve_crlf) { -+ struct tok_state *tok = tok_new(); -+ if (tok == NULL) -+ return NULL; -+ if ((tok->buf = (char *)PyMem_Malloc(BUFSIZ)) == NULL) { -+ _PyTokenizer_Free(tok); -+ return NULL; -+ } -+ tok->cur = tok->inp = tok->buf; -+ tok->end = tok->buf + BUFSIZ; -+ tok->fp = NULL; -+ if (enc != NULL) { -+ tok->encoding = new_string(enc, strlen(enc), tok); -+ if (!tok->encoding) { -+ _PyTokenizer_Free(tok); -+ return NULL; - } -- tok->decoding_state = STATE_NORMAL; -- Py_INCREF(readline); -- tok->readline = readline; -- return tok; -+ } -+ tok->decoding_state = STATE_NORMAL; -+ Py_INCREF(readline); -+ tok->readline = readline; -+ return tok; - } - - /* Set up tokenizer for UTF-8 string */ - --struct tok_state * --_PyTokenizer_FromUTF8(const char *str, int exec_input, int preserve_crlf) --{ -- struct tok_state *tok = tok_new(); -- char *translated; -- if (tok == NULL) -- return NULL; -- tok->input = translated = translate_newlines(str, exec_input, preserve_crlf, tok); -- if (translated == NULL) { -- _PyTokenizer_Free(tok); -- return NULL; -- } -- tok->decoding_state = STATE_NORMAL; -- tok->enc = NULL; -- tok->str = translated; -- tok->encoding = new_string("utf-8", 5, tok); -- if (!tok->encoding) { -- _PyTokenizer_Free(tok); -- return NULL; -- } -+struct tok_state *_PyTokenizer_FromUTF8(const char *str, int exec_input, -+ int preserve_crlf) { -+ struct tok_state *tok = tok_new(); -+ char *translated; -+ if (tok == NULL) -+ return NULL; -+ tok->input = translated = -+ translate_newlines(str, exec_input, preserve_crlf, tok); -+ if (translated == NULL) { -+ _PyTokenizer_Free(tok); -+ return NULL; -+ } -+ tok->decoding_state = STATE_NORMAL; -+ tok->enc = NULL; -+ tok->str = translated; -+ tok->encoding = new_string("utf-8", 5, tok); -+ if (!tok->encoding) { -+ _PyTokenizer_Free(tok); -+ return NULL; -+ } - -- tok->buf = tok->cur = tok->inp = translated; -- tok->end = translated; -- return tok; -+ tok->buf = tok->cur = tok->inp = translated; -+ tok->end = translated; -+ return tok; - } - - /* Set up tokenizer for file */ - --struct tok_state * --_PyTokenizer_FromFile(FILE *fp, const char* enc, -- const char *ps1, const char *ps2) --{ -- struct tok_state *tok = tok_new(); -- if (tok == NULL) -- return NULL; -- if ((tok->buf = (char *)PyMem_Malloc(BUFSIZ)) == NULL) { -- _PyTokenizer_Free(tok); -- return NULL; -- } -- tok->cur = tok->inp = tok->buf; -- tok->end = tok->buf + BUFSIZ; -- tok->fp = fp; -- tok->prompt = ps1; -- tok->nextprompt = ps2; -- if (enc != NULL) { -- /* Must copy encoding declaration since it -- gets copied into the parse tree. */ -- tok->encoding = new_string(enc, strlen(enc), tok); -- if (!tok->encoding) { -- _PyTokenizer_Free(tok); -- return NULL; -- } -- tok->decoding_state = STATE_NORMAL; -+struct tok_state *_PyTokenizer_FromFile(FILE *fp, const char *enc, -+ const char *ps1, const char *ps2) { -+ struct tok_state *tok = tok_new(); -+ if (tok == NULL) -+ return NULL; -+ if ((tok->buf = (char *)PyMem_Malloc(BUFSIZ)) == NULL) { -+ _PyTokenizer_Free(tok); -+ return NULL; -+ } -+ tok->cur = tok->inp = tok->buf; -+ tok->end = tok->buf + BUFSIZ; -+ tok->fp = fp; -+ tok->prompt = ps1; -+ tok->nextprompt = ps2; -+ if (enc != NULL) { -+ /* Must copy encoding declaration since it -+ gets copied into the parse tree. */ -+ tok->encoding = new_string(enc, strlen(enc), tok); -+ if (!tok->encoding) { -+ _PyTokenizer_Free(tok); -+ return NULL; - } -- return tok; -+ tok->decoding_state = STATE_NORMAL; -+ } -+ return tok; - } - - /* Free a tok_state structure */ - --void --_PyTokenizer_Free(struct tok_state *tok) --{ -- if (tok->encoding != NULL) { -- PyMem_Free(tok->encoding); -- } -- Py_XDECREF(tok->decoding_readline); -- Py_XDECREF(tok->decoding_buffer); -- Py_XDECREF(tok->readline); -- Py_XDECREF(tok->filename); -- if ((tok->readline != NULL || tok->fp != NULL ) && tok->buf != NULL) { -- PyMem_Free(tok->buf); -+void _PyTokenizer_Free(struct tok_state *tok) { -+ if (tok->encoding != NULL) { -+ PyMem_Free(tok->encoding); -+ } -+ Py_XDECREF(tok->decoding_readline); -+ Py_XDECREF(tok->decoding_buffer); -+ Py_XDECREF(tok->readline); -+ Py_XDECREF(tok->filename); -+ if ((tok->readline != NULL || tok->fp != NULL) && tok->buf != NULL) { -+ PyMem_Free(tok->buf); -+ } -+ if (tok->input) { -+ PyMem_Free(tok->input); -+ } -+ if (tok->interactive_src_start != NULL) { -+ PyMem_Free(tok->interactive_src_start); -+ } -+ free_fstring_expressions(tok); -+ PyMem_Free(tok); -+} -+ -+void _PyToken_Free(struct token *token) { Py_XDECREF(token->metadata); } -+ -+void _PyToken_Init(struct token *token) { token->metadata = NULL; } -+ -+static int tok_readline_raw(struct tok_state *tok) { -+ do { -+ if (!tok_reserve_buf(tok, BUFSIZ)) { -+ return 0; -+ } -+ int n_chars = (int)(tok->end - tok->inp); -+ size_t line_size = 0; -+ char *line = _Py_UniversalNewlineFgetsWithSize(tok->inp, n_chars, tok->fp, -+ NULL, &line_size); -+ if (line == NULL) { -+ return 1; - } -- if (tok->input) { -- PyMem_Free(tok->input); -+ if (tok->fp_interactive && -+ tok_concatenate_interactive_new_line(tok, line) == -1) { -+ return 0; -+ } -+ tok->inp += line_size; -+ if (tok->inp == tok->buf) { -+ return 0; -+ } -+ } while (tok->inp[-1] != '\n'); -+ return 1; -+} -+ -+static int tok_readline_string(struct tok_state *tok) { -+ PyObject *line = NULL; -+ PyObject *raw_line = PyObject_CallNoArgs(tok->readline); -+ if (raw_line == NULL) { -+ if (PyErr_ExceptionMatches(PyExc_StopIteration)) { -+ PyErr_Clear(); -+ return 1; -+ } -+ error_ret(tok); -+ goto error; -+ } -+ if (tok->encoding != NULL) { -+ if (!PyBytes_Check(raw_line)) { -+ PyErr_Format(PyExc_TypeError, "readline() returned a non-bytes object"); -+ error_ret(tok); -+ goto error; -+ } -+ line = -+ PyUnicode_Decode(PyBytes_AS_STRING(raw_line), -+ PyBytes_GET_SIZE(raw_line), tok->encoding, "replace"); -+ Py_CLEAR(raw_line); -+ if (line == NULL) { -+ error_ret(tok); -+ goto error; -+ } -+ } else { -+ if (!PyUnicode_Check(raw_line)) { -+ PyErr_Format(PyExc_TypeError, "readline() returned a non-string object"); -+ error_ret(tok); -+ goto error; -+ } -+ line = raw_line; -+ raw_line = NULL; -+ } -+ Py_ssize_t buflen; -+ const char *buf = PyUnicode_AsUTF8AndSize(line, &buflen); -+ if (buf == NULL) { -+ error_ret(tok); -+ goto error; -+ } -+ -+ // Make room for the null terminator *and* potentially -+ // an extra newline character that we may need to artificially -+ // add. -+ size_t buffer_size = buflen + 2; -+ if (!tok_reserve_buf(tok, buffer_size)) { -+ goto error; -+ } -+ memcpy(tok->inp, buf, buflen); -+ tok->inp += buflen; -+ *tok->inp = '\0'; -+ -+ tok->line_start = tok->cur; -+ Py_DECREF(line); -+ return 1; -+error: -+ Py_XDECREF(raw_line); -+ Py_XDECREF(line); -+ return 0; -+} -+ -+static int tok_underflow_string(struct tok_state *tok) { -+ char *end = strchr(tok->inp, '\n'); -+ if (end != NULL) { -+ end++; -+ } else { -+ end = strchr(tok->inp, '\0'); -+ if (end == tok->inp) { -+ tok->done = E_EOF; -+ return 0; -+ } -+ } -+ if (tok->start == NULL) { -+ tok->buf = tok->cur; -+ } -+ tok->line_start = tok->cur; -+ ADVANCE_LINENO(); -+ tok->inp = end; -+ return 1; -+} -+ -+static int tok_underflow_interactive(struct tok_state *tok) { -+ if (tok->interactive_underflow == IUNDERFLOW_STOP) { -+ tok->done = E_INTERACT_STOP; -+ return 1; -+ } -+ char *newtok = PyOS_Readline(tok->fp ? tok->fp : stdin, stdout, tok->prompt); -+ if (newtok != NULL) { -+ char *translated = translate_newlines(newtok, 0, 0, tok); -+ PyMem_Free(newtok); -+ if (translated == NULL) { -+ return 0; - } -- if (tok->interactive_src_start != NULL) { -- PyMem_Free(tok->interactive_src_start); -+ newtok = translated; -+ } -+ if (tok->encoding && newtok && *newtok) { -+ /* Recode to UTF-8 */ -+ Py_ssize_t buflen; -+ const char *buf; -+ PyObject *u = translate_into_utf8(newtok, tok->encoding); -+ PyMem_Free(newtok); -+ if (u == NULL) { -+ tok->done = E_DECODE; -+ return 0; -+ } -+ buflen = PyBytes_GET_SIZE(u); -+ buf = PyBytes_AS_STRING(u); -+ newtok = PyMem_Malloc(buflen + 1); -+ if (newtok == NULL) { -+ Py_DECREF(u); -+ tok->done = E_NOMEM; -+ return 0; -+ } -+ strcpy(newtok, buf); -+ Py_DECREF(u); -+ } -+ if (tok->fp_interactive && -+ tok_concatenate_interactive_new_line(tok, newtok) == -1) { -+ PyMem_Free(newtok); -+ return 0; -+ } -+ if (tok->nextprompt != NULL) { -+ tok->prompt = tok->nextprompt; -+ } -+ if (newtok == NULL) { -+ tok->done = E_INTR; -+ } else if (*newtok == '\0') { -+ PyMem_Free(newtok); -+ tok->done = E_EOF; -+ } else if (tok->start != NULL) { -+ Py_ssize_t cur_multi_line_start = tok->multi_line_start - tok->buf; -+ remember_fstring_buffers(tok); -+ size_t size = strlen(newtok); -+ ADVANCE_LINENO(); -+ if (!tok_reserve_buf(tok, size + 1)) { -+ PyMem_Free(tok->buf); -+ tok->buf = NULL; -+ PyMem_Free(newtok); -+ return 0; -+ } -+ memcpy(tok->cur, newtok, size + 1); -+ PyMem_Free(newtok); -+ tok->inp += size; -+ tok->multi_line_start = tok->buf + cur_multi_line_start; -+ restore_fstring_buffers(tok); -+ } else { -+ remember_fstring_buffers(tok); -+ ADVANCE_LINENO(); -+ PyMem_Free(tok->buf); -+ tok->buf = newtok; -+ tok->cur = tok->buf; -+ tok->line_start = tok->buf; -+ tok->inp = strchr(tok->buf, '\0'); -+ tok->end = tok->inp + 1; -+ restore_fstring_buffers(tok); -+ } -+ if (tok->done != E_OK) { -+ if (tok->prompt != NULL) { -+ PySys_WriteStderr("\n"); - } -- free_fstring_expressions(tok); -- PyMem_Free(tok); --} -+ return 0; -+ } - --void --_PyToken_Free(struct token *token) { -- Py_XDECREF(token->metadata); --} -- --void --_PyToken_Init(struct token *token) { -- token->metadata = NULL; --} -- --static int --tok_readline_raw(struct tok_state *tok) --{ -- do { -- if (!tok_reserve_buf(tok, BUFSIZ)) { -- return 0; -- } -- int n_chars = (int)(tok->end - tok->inp); -- size_t line_size = 0; -- char *line = _Py_UniversalNewlineFgetsWithSize(tok->inp, n_chars, tok->fp, NULL, &line_size); -- if (line == NULL) { -- return 1; -- } -- if (tok->fp_interactive && -- tok_concatenate_interactive_new_line(tok, line) == -1) { -- return 0; -- } -- tok->inp += line_size; -- if (tok->inp == tok->buf) { -- return 0; -- } -- } while (tok->inp[-1] != '\n'); -- return 1; -+ if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) { -+ return 0; -+ } -+ return 1; - } - --static int --tok_readline_string(struct tok_state* tok) { -- PyObject* line = NULL; -- PyObject* raw_line = PyObject_CallNoArgs(tok->readline); -- if (raw_line == NULL) { -- if (PyErr_ExceptionMatches(PyExc_StopIteration)) { -- PyErr_Clear(); -- return 1; -- } -- error_ret(tok); -- goto error; -- } -- if(tok->encoding != NULL) { -- if (!PyBytes_Check(raw_line)) { -- PyErr_Format(PyExc_TypeError, "readline() returned a non-bytes object"); -- error_ret(tok); -- goto error; -- } -- line = PyUnicode_Decode(PyBytes_AS_STRING(raw_line), PyBytes_GET_SIZE(raw_line), -- tok->encoding, "replace"); -- Py_CLEAR(raw_line); -- if (line == NULL) { -- error_ret(tok); -- goto error; -- } -- } else { -- if(!PyUnicode_Check(raw_line)) { -- PyErr_Format(PyExc_TypeError, "readline() returned a non-string object"); -- error_ret(tok); -- goto error; -- } -- line = raw_line; -- raw_line = NULL; -- } -- Py_ssize_t buflen; -- const char* buf = PyUnicode_AsUTF8AndSize(line, &buflen); -- if (buf == NULL) { -- error_ret(tok); -- goto error; -- } -- -- // Make room for the null terminator *and* potentially -- // an extra newline character that we may need to artificially -- // add. -- size_t buffer_size = buflen + 2; -- if (!tok_reserve_buf(tok, buffer_size)) { -- goto error; -- } -- memcpy(tok->inp, buf, buflen); -- tok->inp += buflen; -+static int tok_underflow_file(struct tok_state *tok) { -+ if (tok->start == NULL && !INSIDE_FSTRING(tok)) { -+ tok->cur = tok->inp = tok->buf; -+ } -+ if (tok->decoding_state == STATE_INIT) { -+ /* We have not yet determined the encoding. -+ If an encoding is found, use the file-pointer -+ reader functions from now on. */ -+ if (!check_bom(fp_getc, fp_ungetc, fp_setreadl, tok)) { -+ error_ret(tok); -+ return 0; -+ } -+ assert(tok->decoding_state != STATE_INIT); -+ } -+ /* Read until '\n' or EOF */ -+ if (tok->decoding_readline != NULL) { -+ /* We already have a codec associated with this input. */ -+ if (!tok_readline_recode(tok)) { -+ return 0; -+ } -+ } else { -+ /* We want a 'raw' read. */ -+ if (!tok_readline_raw(tok)) { -+ return 0; -+ } -+ } -+ if (tok->inp == tok->cur) { -+ tok->done = E_EOF; -+ return 0; -+ } -+ tok->implicit_newline = 0; -+ if (tok->inp[-1] != '\n') { -+ assert(tok->inp + 1 < tok->end); -+ /* Last line does not end in \n, fake one */ -+ *tok->inp++ = '\n'; - *tok->inp = '\0'; -+ tok->implicit_newline = 1; -+ } - -- tok->line_start = tok->cur; -- Py_DECREF(line); -- return 1; --error: -- Py_XDECREF(raw_line); -- Py_XDECREF(line); -+ if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) { - return 0; -+ } -+ -+ ADVANCE_LINENO(); -+ if (tok->decoding_state != STATE_NORMAL) { -+ if (tok->lineno > 2) { -+ tok->decoding_state = STATE_NORMAL; -+ } else if (!check_coding_spec(tok->cur, strlen(tok->cur), tok, -+ fp_setreadl)) { -+ return 0; -+ } -+ } -+ /* The default encoding is UTF-8, so make sure we don't have any -+ non-UTF-8 sequences in it. */ -+ if (!tok->encoding && !ensure_utf8(tok->cur, tok)) { -+ error_ret(tok); -+ return 0; -+ } -+ assert(tok->done == E_OK); -+ return tok->done == E_OK; - } - --static int --tok_underflow_string(struct tok_state *tok) { -- char *end = strchr(tok->inp, '\n'); -- if (end != NULL) { -- end++; -- } -- else { -- end = strchr(tok->inp, '\0'); -- if (end == tok->inp) { -- tok->done = E_EOF; -- return 0; -- } -- } -- if (tok->start == NULL) { -- tok->buf = tok->cur; -- } -- tok->line_start = tok->cur; -- ADVANCE_LINENO(); -- tok->inp = end; -- return 1; --} -- --static int --tok_underflow_interactive(struct tok_state *tok) { -- if (tok->interactive_underflow == IUNDERFLOW_STOP) { -- tok->done = E_INTERACT_STOP; -- return 1; -- } -- char *newtok = PyOS_Readline(tok->fp ? tok->fp : stdin, stdout, tok->prompt); -- if (newtok != NULL) { -- char *translated = translate_newlines(newtok, 0, 0, tok); -- PyMem_Free(newtok); -- if (translated == NULL) { -- return 0; -- } -- newtok = translated; -- } -- if (tok->encoding && newtok && *newtok) { -- /* Recode to UTF-8 */ -- Py_ssize_t buflen; -- const char* buf; -- PyObject *u = translate_into_utf8(newtok, tok->encoding); -- PyMem_Free(newtok); -- if (u == NULL) { -- tok->done = E_DECODE; -- return 0; -- } -- buflen = PyBytes_GET_SIZE(u); -- buf = PyBytes_AS_STRING(u); -- newtok = PyMem_Malloc(buflen+1); -- if (newtok == NULL) { -- Py_DECREF(u); -- tok->done = E_NOMEM; -- return 0; -- } -- strcpy(newtok, buf); -- Py_DECREF(u); -- } -- if (tok->fp_interactive && -- tok_concatenate_interactive_new_line(tok, newtok) == -1) { -- PyMem_Free(newtok); -- return 0; -- } -- if (tok->nextprompt != NULL) { -- tok->prompt = tok->nextprompt; -- } -- if (newtok == NULL) { -- tok->done = E_INTR; -- } -- else if (*newtok == '\0') { -- PyMem_Free(newtok); -- tok->done = E_EOF; -- } -- else if (tok->start != NULL) { -- Py_ssize_t cur_multi_line_start = tok->multi_line_start - tok->buf; -- remember_fstring_buffers(tok); -- size_t size = strlen(newtok); -- ADVANCE_LINENO(); -- if (!tok_reserve_buf(tok, size + 1)) { -- PyMem_Free(tok->buf); -- tok->buf = NULL; -- PyMem_Free(newtok); -- return 0; -- } -- memcpy(tok->cur, newtok, size + 1); -- PyMem_Free(newtok); -- tok->inp += size; -- tok->multi_line_start = tok->buf + cur_multi_line_start; -- restore_fstring_buffers(tok); -- } -- else { -- remember_fstring_buffers(tok); -- ADVANCE_LINENO(); -- PyMem_Free(tok->buf); -- tok->buf = newtok; -- tok->cur = tok->buf; -- tok->line_start = tok->buf; -- tok->inp = strchr(tok->buf, '\0'); -- tok->end = tok->inp + 1; -- restore_fstring_buffers(tok); -- } -- if (tok->done != E_OK) { -- if (tok->prompt != NULL) { -- PySys_WriteStderr("\n"); -- } -- return 0; -- } -- -- if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) { -- return 0; -- } -- return 1; --} -- --static int --tok_underflow_file(struct tok_state *tok) { -- if (tok->start == NULL && !INSIDE_FSTRING(tok)) { -- tok->cur = tok->inp = tok->buf; -- } -- if (tok->decoding_state == STATE_INIT) { -- /* We have not yet determined the encoding. -- If an encoding is found, use the file-pointer -- reader functions from now on. */ -- if (!check_bom(fp_getc, fp_ungetc, fp_setreadl, tok)) { -- error_ret(tok); -- return 0; -- } -- assert(tok->decoding_state != STATE_INIT); -- } -- /* Read until '\n' or EOF */ -- if (tok->decoding_readline != NULL) { -- /* We already have a codec associated with this input. */ -- if (!tok_readline_recode(tok)) { -- return 0; -- } -- } -- else { -- /* We want a 'raw' read. */ -- if (!tok_readline_raw(tok)) { -- return 0; -- } -- } -- if (tok->inp == tok->cur) { -- tok->done = E_EOF; -- return 0; -- } -- tok->implicit_newline = 0; -- if (tok->inp[-1] != '\n') { -- assert(tok->inp + 1 < tok->end); -- /* Last line does not end in \n, fake one */ -- *tok->inp++ = '\n'; -- *tok->inp = '\0'; -- tok->implicit_newline = 1; -- } -- -- if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) { -- return 0; -- } -- -- ADVANCE_LINENO(); -- if (tok->decoding_state != STATE_NORMAL) { -- if (tok->lineno > 2) { -- tok->decoding_state = STATE_NORMAL; -- } -- else if (!check_coding_spec(tok->cur, strlen(tok->cur), -- tok, fp_setreadl)) -- { -- return 0; -- } -- } -- /* The default encoding is UTF-8, so make sure we don't have any -- non-UTF-8 sequences in it. */ -- if (!tok->encoding && !ensure_utf8(tok->cur, tok)) { -- error_ret(tok); -- return 0; -- } -- assert(tok->done == E_OK); -- return tok->done == E_OK; --} -- --static int --tok_underflow_readline(struct tok_state* tok) { -- assert(tok->decoding_state == STATE_NORMAL); -- assert(tok->fp == NULL && tok->input == NULL && tok->decoding_readline == NULL); -- if (tok->start == NULL && !INSIDE_FSTRING(tok)) { -- tok->cur = tok->inp = tok->buf; -- } -- if (!tok_readline_string(tok)) { -- return 0; -- } -- if (tok->inp == tok->cur) { -- tok->done = E_EOF; -- return 0; -- } -- tok->implicit_newline = 0; -- if (tok->inp[-1] != '\n') { -- assert(tok->inp + 1 < tok->end); -- /* Last line does not end in \n, fake one */ -- *tok->inp++ = '\n'; -- *tok->inp = '\0'; -- tok->implicit_newline = 1; -- } -+static int tok_underflow_readline(struct tok_state *tok) { -+ assert(tok->decoding_state == STATE_NORMAL); -+ assert(tok->fp == NULL && tok->input == NULL && -+ tok->decoding_readline == NULL); -+ if (tok->start == NULL && !INSIDE_FSTRING(tok)) { -+ tok->cur = tok->inp = tok->buf; -+ } -+ if (!tok_readline_string(tok)) { -+ return 0; -+ } -+ if (tok->inp == tok->cur) { -+ tok->done = E_EOF; -+ return 0; -+ } -+ tok->implicit_newline = 0; -+ if (tok->inp[-1] != '\n') { -+ assert(tok->inp + 1 < tok->end); -+ /* Last line does not end in \n, fake one */ -+ *tok->inp++ = '\n'; -+ *tok->inp = '\0'; -+ tok->implicit_newline = 1; -+ } - -- if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) { -- return 0; -- } -+ if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) { -+ return 0; -+ } - -- ADVANCE_LINENO(); -- /* The default encoding is UTF-8, so make sure we don't have any -- non-UTF-8 sequences in it. */ -- if (!tok->encoding && !ensure_utf8(tok->cur, tok)) { -- error_ret(tok); -- return 0; -- } -- assert(tok->done == E_OK); -- return tok->done == E_OK; -+ ADVANCE_LINENO(); -+ /* The default encoding is UTF-8, so make sure we don't have any -+ non-UTF-8 sequences in it. */ -+ if (!tok->encoding && !ensure_utf8(tok->cur, tok)) { -+ error_ret(tok); -+ return 0; -+ } -+ assert(tok->done == E_OK); -+ return tok->done == E_OK; - } - - #if defined(Py_DEBUG) --static void --print_escape(FILE *f, const char *s, Py_ssize_t size) --{ -- if (s == NULL) { -- fputs("NULL", f); -- return; -- } -- putc('"', f); -- while (size-- > 0) { -- unsigned char c = *s++; -- switch (c) { -- case '\n': fputs("\\n", f); break; -- case '\r': fputs("\\r", f); break; -- case '\t': fputs("\\t", f); break; -- case '\f': fputs("\\f", f); break; -- case '\'': fputs("\\'", f); break; -- case '"': fputs("\\\"", f); break; -- default: -- if (0x20 <= c && c <= 0x7f) -- putc(c, f); -- else -- fprintf(f, "\\x%02x", c); -- } -+static void print_escape(FILE *f, const char *s, Py_ssize_t size) { -+ if (s == NULL) { -+ fputs("NULL", f); -+ return; -+ } -+ putc('"', f); -+ while (size-- > 0) { -+ unsigned char c = *s++; -+ switch (c) { -+ case '\n': -+ fputs("\\n", f); -+ break; -+ case '\r': -+ fputs("\\r", f); -+ break; -+ case '\t': -+ fputs("\\t", f); -+ break; -+ case '\f': -+ fputs("\\f", f); -+ break; -+ case '\'': -+ fputs("\\'", f); -+ break; -+ case '"': -+ fputs("\\\"", f); -+ break; -+ default: -+ if (0x20 <= c && c <= 0x7f) -+ putc(c, f); -+ else -+ fprintf(f, "\\x%02x", c); - } -- putc('"', f); -+ } -+ putc('"', f); - } - #endif - - /* Get next char, updating state; error code goes into tok->done */ - --static int --tok_nextc(struct tok_state *tok) --{ -- int rc; -- for (;;) { -- if (tok->cur != tok->inp) { -- if ((unsigned int) tok->col_offset >= (unsigned int) INT_MAX) { -- tok->done = E_COLUMNOVERFLOW; -- return EOF; -- } -- tok->col_offset++; -- return Py_CHARMASK(*tok->cur++); /* Fast path */ -- } -- if (tok->done != E_OK) { -- return EOF; -- } -- if (tok->readline) { -- rc = tok_underflow_readline(tok); -- } -- else if (tok->fp == NULL) { -- rc = tok_underflow_string(tok); -- } -- else if (tok->prompt != NULL) { -- rc = tok_underflow_interactive(tok); -- } -- else { -- rc = tok_underflow_file(tok); -- } --#if defined(Py_DEBUG) -- if (tok->debug) { -- fprintf(stderr, "line[%d] = ", tok->lineno); -- print_escape(stderr, tok->cur, tok->inp - tok->cur); -- fprintf(stderr, " tok->done = %d\n", tok->done); -- } --#endif -- if (!rc) { -- tok->cur = tok->inp; -- return EOF; -- } -- tok->line_start = tok->cur; -- -- if (contains_null_bytes(tok->line_start, tok->inp - tok->line_start)) { -- syntaxerror(tok, "source code cannot contain null bytes"); -- tok->cur = tok->inp; -- return EOF; -- } -+static int tok_nextc(struct tok_state *tok) { -+ int rc; -+ for (;;) { -+ if (tok->cur != tok->inp) { -+ if ((unsigned int)tok->col_offset >= (unsigned int)INT_MAX) { -+ tok->done = E_COLUMNOVERFLOW; -+ return EOF; -+ } -+ tok->col_offset++; -+ return Py_CHARMASK(*tok->cur++); /* Fast path */ - } -- Py_UNREACHABLE(); --} -- --/* Back-up one character */ -- --static void --tok_backup(struct tok_state *tok, int c) --{ -- if (c != EOF) { -- if (--tok->cur < tok->buf) { -- Py_FatalError("tokenizer beginning of buffer"); -- } -- if ((int)(unsigned char)*tok->cur != Py_CHARMASK(c)) { -- Py_FatalError("tok_backup: wrong character"); -- } -- tok->col_offset--; -+ if (tok->done != E_OK) { -+ return EOF; -+ } -+ if (tok->readline) { -+ rc = tok_underflow_readline(tok); -+ } else if (tok->fp == NULL) { -+ rc = tok_underflow_string(tok); -+ } else if (tok->prompt != NULL) { -+ rc = tok_underflow_interactive(tok); -+ } else { -+ rc = tok_underflow_file(tok); - } --} -- --static int --_syntaxerror_range(struct tok_state *tok, const char *format, -- int col_offset, int end_col_offset, -- va_list vargs) --{ -- // In release builds, we don't want to overwrite a previous error, but in debug builds we -- // want to fail if we are not doing it so we can fix it. -- assert(tok->done != E_ERROR); -- if (tok->done == E_ERROR) { -- return ERRORTOKEN; -+#if defined(Py_DEBUG) -+ if (tok->debug) { -+ fprintf(stderr, "line[%d] = ", tok->lineno); -+ print_escape(stderr, tok->cur, tok->inp - tok->cur); -+ fprintf(stderr, " tok->done = %d\n", tok->done); - } -- PyObject *errmsg, *errtext, *args; -- errmsg = PyUnicode_FromFormatV(format, vargs); -- if (!errmsg) { -- goto error; -+#endif -+ if (!rc) { -+ tok->cur = tok->inp; -+ return EOF; - } -+ tok->line_start = tok->cur; - -- errtext = PyUnicode_DecodeUTF8(tok->line_start, tok->cur - tok->line_start, -- "replace"); -- if (!errtext) { -- goto error; -+ if (contains_null_bytes(tok->line_start, tok->inp - tok->line_start)) { -+ syntaxerror(tok, "source code cannot contain null bytes"); -+ tok->cur = tok->inp; -+ return EOF; - } -+ } -+ Py_UNREACHABLE(); -+} - -- if (col_offset == -1) { -- col_offset = (int)PyUnicode_GET_LENGTH(errtext); -- } -- if (end_col_offset == -1) { -- end_col_offset = col_offset; -- } -+/* Back-up one character */ - -- Py_ssize_t line_len = strcspn(tok->line_start, "\n"); -- if (line_len != tok->cur - tok->line_start) { -- Py_DECREF(errtext); -- errtext = PyUnicode_DecodeUTF8(tok->line_start, line_len, -- "replace"); -+static void tok_backup(struct tok_state *tok, int c) { -+ if (c != EOF) { -+ if (--tok->cur < tok->buf) { -+ Py_FatalError("tokenizer beginning of buffer"); - } -- if (!errtext) { -- goto error; -+ if ((int)(unsigned char)*tok->cur != Py_CHARMASK(c)) { -+ Py_FatalError("tok_backup: wrong character"); - } -+ tok->col_offset--; -+ } -+} - -- args = Py_BuildValue("(O(OiiNii))", errmsg, tok->filename, tok->lineno, -- col_offset, errtext, tok->lineno, end_col_offset); -- if (args) { -- PyErr_SetObject(PyExc_SyntaxError, args); -- Py_DECREF(args); -- } -+static int _syntaxerror_range(struct tok_state *tok, const char *format, -+ int col_offset, int end_col_offset, -+ va_list vargs) { -+ // In release builds, we don't want to overwrite a previous error, but in -+ // debug builds we want to fail if we are not doing it so we can fix it. -+ assert(tok->done != E_ERROR); -+ if (tok->done == E_ERROR) { -+ return ERRORTOKEN; -+ } -+ PyObject *errmsg, *errtext, *args; -+ errmsg = PyUnicode_FromFormatV(format, vargs); -+ if (!errmsg) { -+ goto error; -+ } -+ -+ errtext = PyUnicode_DecodeUTF8(tok->line_start, tok->cur - tok->line_start, -+ "replace"); -+ if (!errtext) { -+ goto error; -+ } -+ -+ if (col_offset == -1) { -+ col_offset = (int)PyUnicode_GET_LENGTH(errtext); -+ } -+ if (end_col_offset == -1) { -+ end_col_offset = col_offset; -+ } -+ -+ Py_ssize_t line_len = strcspn(tok->line_start, "\n"); -+ if (line_len != tok->cur - tok->line_start) { -+ Py_DECREF(errtext); -+ errtext = PyUnicode_DecodeUTF8(tok->line_start, line_len, "replace"); -+ } -+ if (!errtext) { -+ goto error; -+ } -+ -+ args = Py_BuildValue("(O(OiiNii))", errmsg, tok->filename, tok->lineno, -+ col_offset, errtext, tok->lineno, end_col_offset); -+ if (args) { -+ PyErr_SetObject(PyExc_SyntaxError, args); -+ Py_DECREF(args); -+ } - - error: -- Py_XDECREF(errmsg); -- tok->done = E_ERROR; -- return ERRORTOKEN; -+ Py_XDECREF(errmsg); -+ tok->done = E_ERROR; -+ return ERRORTOKEN; - } - --static int --syntaxerror(struct tok_state *tok, const char *format, ...) --{ -- // This errors are cleaned on startup. Todo: Fix it. -- va_list vargs; -- va_start(vargs, format); -- int ret = _syntaxerror_range(tok, format, -1, -1, vargs); -- va_end(vargs); -- return ret; -+static int syntaxerror(struct tok_state *tok, const char *format, ...) { -+ // This errors are cleaned on startup. Todo: Fix it. -+ va_list vargs; -+ va_start(vargs, format); -+ int ret = _syntaxerror_range(tok, format, -1, -1, vargs); -+ va_end(vargs); -+ return ret; - } - --static int --syntaxerror_known_range(struct tok_state *tok, -- int col_offset, int end_col_offset, -- const char *format, ...) --{ -- va_list vargs; -- va_start(vargs, format); -- int ret = _syntaxerror_range(tok, format, col_offset, end_col_offset, vargs); -- va_end(vargs); -- return ret; -+static int syntaxerror_known_range(struct tok_state *tok, int col_offset, -+ int end_col_offset, const char *format, -+ ...) { -+ va_list vargs; -+ va_start(vargs, format); -+ int ret = _syntaxerror_range(tok, format, col_offset, end_col_offset, vargs); -+ va_end(vargs); -+ return ret; - } - --static int --indenterror(struct tok_state *tok) --{ -- tok->done = E_TABSPACE; -- tok->cur = tok->inp; -- return ERRORTOKEN; -+static int indenterror(struct tok_state *tok) { -+ tok->done = E_TABSPACE; -+ tok->cur = tok->inp; -+ return ERRORTOKEN; - } - --static int --parser_warn(struct tok_state *tok, PyObject *category, const char *format, ...) --{ -- if (!tok->report_warnings) { -- return 0; -- } -- -- PyObject *errmsg; -- va_list vargs; -- va_start(vargs, format); -- errmsg = PyUnicode_FromFormatV(format, vargs); -- va_end(vargs); -- if (!errmsg) { -- goto error; -- } -- -- if (PyErr_WarnExplicitObject(category, errmsg, tok->filename, -- tok->lineno, NULL, NULL) < 0) { -- if (PyErr_ExceptionMatches(category)) { -- /* Replace the DeprecationWarning exception with a SyntaxError -- to get a more accurate error report */ -- PyErr_Clear(); -- syntaxerror(tok, "%U", errmsg); -- } -- goto error; -- } -- Py_DECREF(errmsg); -+static int parser_warn(struct tok_state *tok, PyObject *category, -+ const char *format, ...) { -+ if (!tok->report_warnings) { - return 0; -+ } -+ -+ PyObject *errmsg; -+ va_list vargs; -+ va_start(vargs, format); -+ errmsg = PyUnicode_FromFormatV(format, vargs); -+ va_end(vargs); -+ if (!errmsg) { -+ goto error; -+ } -+ -+ if (PyErr_WarnExplicitObject(category, errmsg, tok->filename, tok->lineno, -+ NULL, NULL) < 0) { -+ if (PyErr_ExceptionMatches(category)) { -+ /* Replace the DeprecationWarning exception with a SyntaxError -+ to get a more accurate error report */ -+ PyErr_Clear(); -+ syntaxerror(tok, "%U", errmsg); -+ } -+ goto error; -+ } -+ Py_DECREF(errmsg); -+ return 0; - - error: -- Py_XDECREF(errmsg); -- tok->done = E_ERROR; -- return -1; -+ Py_XDECREF(errmsg); -+ tok->done = E_ERROR; -+ return -1; - } - --static int --warn_invalid_escape_sequence(struct tok_state *tok, int first_invalid_escape_char) --{ -- if (!tok->report_warnings) { -- return 0; -- } -- -- PyObject *msg = PyUnicode_FromFormat( -- "invalid escape sequence '\\%c'", -- (char) first_invalid_escape_char -- ); -+static int warn_invalid_escape_sequence(struct tok_state *tok, -+ int first_invalid_escape_char) { -+ if (!tok->report_warnings) { -+ return 0; -+ } - -- if (msg == NULL) { -- return -1; -- } -+ PyObject *msg = PyUnicode_FromFormat("invalid escape sequence '\\%c'", -+ (char)first_invalid_escape_char); - -- if (PyErr_WarnExplicitObject(PyExc_SyntaxWarning, msg, tok->filename, -- tok->lineno, NULL, NULL) < 0) { -- Py_DECREF(msg); -+ if (msg == NULL) { -+ return -1; -+ } - -- if (PyErr_ExceptionMatches(PyExc_SyntaxWarning)) { -- /* Replace the SyntaxWarning exception with a SyntaxError -- to get a more accurate error report */ -- PyErr_Clear(); -- return syntaxerror(tok, "invalid escape sequence '\\%c'", (char) first_invalid_escape_char); -- } -+ if (PyErr_WarnExplicitObject(PyExc_SyntaxWarning, msg, tok->filename, -+ tok->lineno, NULL, NULL) < 0) { -+ Py_DECREF(msg); - -- return -1; -+ if (PyErr_ExceptionMatches(PyExc_SyntaxWarning)) { -+ /* Replace the SyntaxWarning exception with a SyntaxError -+ to get a more accurate error report */ -+ PyErr_Clear(); -+ return syntaxerror(tok, "invalid escape sequence '\\%c'", -+ (char)first_invalid_escape_char); - } - -- Py_DECREF(msg); -- return 0; -+ return -1; -+ } -+ -+ Py_DECREF(msg); -+ return 0; - } - --static int --lookahead(struct tok_state *tok, const char *test) --{ -- const char *s = test; -- int res = 0; -- while (1) { -- int c = tok_nextc(tok); -- if (*s == 0) { -- res = !is_potential_identifier_char(c); -- } -- else if (c == *s) { -- s++; -- continue; -- } -+static int lookahead(struct tok_state *tok, const char *test) { -+ const char *s = test; -+ int res = 0; -+ while (1) { -+ int c = tok_nextc(tok); -+ if (*s == 0) { -+ res = !is_potential_identifier_char(c); -+ } else if (c == *s) { -+ s++; -+ continue; -+ } - -- tok_backup(tok, c); -- while (s != test) { -- tok_backup(tok, *--s); -- } -- return res; -+ tok_backup(tok, c); -+ while (s != test) { -+ tok_backup(tok, *--s); - } -+ return res; -+ } - } - --static int --verify_end_of_number(struct tok_state *tok, int c, const char *kind) { -- if (tok->tok_extra_tokens) { -- // When we are parsing extra tokens, we don't want to emit warnings -- // about invalid literals, because we want to be a bit more liberal. -- return 1; -- } -- /* Emit a deprecation warning only if the numeric literal is immediately -- * followed by one of keywords which can occur after a numeric literal -- * in valid code: "and", "else", "for", "if", "in", "is" and "or". -- * It allows to gradually deprecate existing valid code without adding -- * warning before error in most cases of invalid numeric literal (which -- * would be confusing and break existing tests). -- * Raise a syntax error with slightly better message than plain -- * "invalid syntax" if the numeric literal is immediately followed by -- * other keyword or identifier. -- */ -- int r = 0; -- if (c == 'a') { -- r = lookahead(tok, "nd"); -- } -- else if (c == 'e') { -- r = lookahead(tok, "lse"); -- } -- else if (c == 'f') { -- r = lookahead(tok, "or"); -- } -- else if (c == 'i') { -- int c2 = tok_nextc(tok); -- if (c2 == 'f' || c2 == 'n' || c2 == 's') { -- r = 1; -- } -- tok_backup(tok, c2); -- } -- else if (c == 'o') { -- r = lookahead(tok, "r"); -- } -- else if (c == 'n') { -- r = lookahead(tok, "ot"); -- } -- if (r) { -- tok_backup(tok, c); -- if (parser_warn(tok, PyExc_SyntaxWarning, -- "invalid %s literal", kind)) -- { -- return 0; -- } -- tok_nextc(tok); -+static int verify_end_of_number(struct tok_state *tok, int c, -+ const char *kind) { -+ if (tok->tok_extra_tokens) { -+ // When we are parsing extra tokens, we don't want to emit warnings -+ // about invalid literals, because we want to be a bit more liberal. -+ return 1; -+ } -+ /* Emit a deprecation warning only if the numeric literal is immediately -+ * followed by one of keywords which can occur after a numeric literal -+ * in valid code: "and", "else", "for", "if", "in", "is" and "or". -+ * It allows to gradually deprecate existing valid code without adding -+ * warning before error in most cases of invalid numeric literal (which -+ * would be confusing and break existing tests). -+ * Raise a syntax error with slightly better message than plain -+ * "invalid syntax" if the numeric literal is immediately followed by -+ * other keyword or identifier. -+ */ -+ int r = 0; -+ if (c == 'a') { -+ r = lookahead(tok, "nd"); -+ } else if (c == 'e') { -+ r = lookahead(tok, "lse"); -+ } else if (c == 'f') { -+ r = lookahead(tok, "or"); -+ } else if (c == 'i') { -+ int c2 = tok_nextc(tok); -+ if (c2 == 'f' || c2 == 'n' || c2 == 's') { -+ r = 1; -+ } -+ tok_backup(tok, c2); -+ } else if (c == 'o') { -+ r = lookahead(tok, "r"); -+ } else if (c == 'n') { -+ r = lookahead(tok, "ot"); -+ } -+ if (r) { -+ tok_backup(tok, c); -+ if (parser_warn(tok, PyExc_SyntaxWarning, "invalid %s literal", kind)) { -+ return 0; - } -- else /* In future releases, only error will remain. */ -+ tok_nextc(tok); -+ } else /* In future releases, only error will remain. */ - if (c < 128 && is_potential_identifier_char(c)) { -- tok_backup(tok, c); -- syntaxerror(tok, "invalid %s literal", kind); -- return 0; -+ tok_backup(tok, c); -+ syntaxerror(tok, "invalid %s literal", kind); -+ return 0; - } -- return 1; -+ return 1; - } - - /* Verify that the identifier follows PEP 3131. - All identifier strings are guaranteed to be "ready" unicode objects. - */ --static int --verify_identifier(struct tok_state *tok) --{ -- if (tok->tok_extra_tokens) { -- return 1; -- } -- PyObject *s; -- if (tok->decoding_erred) -- return 0; -- s = PyUnicode_DecodeUTF8(tok->start, tok->cur - tok->start, NULL); -- if (s == NULL) { -- if (PyErr_ExceptionMatches(PyExc_UnicodeDecodeError)) { -- tok->done = E_DECODE; -- } -- else { -- tok->done = E_ERROR; -- } -- return 0; -+static int verify_identifier(struct tok_state *tok) { -+ if (tok->tok_extra_tokens) { -+ return 1; -+ } -+ PyObject *s; -+ if (tok->decoding_erred) -+ return 0; -+ s = PyUnicode_DecodeUTF8(tok->start, tok->cur - tok->start, NULL); -+ if (s == NULL) { -+ if (PyErr_ExceptionMatches(PyExc_UnicodeDecodeError)) { -+ tok->done = E_DECODE; -+ } else { -+ tok->done = E_ERROR; - } -- Py_ssize_t invalid = _PyUnicode_ScanIdentifier(s); -- if (invalid < 0) { -- Py_DECREF(s); -+ return 0; -+ } -+ Py_ssize_t invalid = _PyUnicode_ScanIdentifier(s); -+ if (invalid < 0) { -+ Py_DECREF(s); -+ tok->done = E_ERROR; -+ return 0; -+ } -+ assert(PyUnicode_GET_LENGTH(s) > 0); -+ if (invalid < PyUnicode_GET_LENGTH(s)) { -+ Py_UCS4 ch = PyUnicode_READ_CHAR(s, invalid); -+ if (invalid + 1 < PyUnicode_GET_LENGTH(s)) { -+ /* Determine the offset in UTF-8 encoded input */ -+ Py_SETREF(s, PyUnicode_Substring(s, 0, invalid + 1)); -+ if (s != NULL) { -+ Py_SETREF(s, PyUnicode_AsUTF8String(s)); -+ } -+ if (s == NULL) { - tok->done = E_ERROR; - return 0; -- } -- assert(PyUnicode_GET_LENGTH(s) > 0); -- if (invalid < PyUnicode_GET_LENGTH(s)) { -- Py_UCS4 ch = PyUnicode_READ_CHAR(s, invalid); -- if (invalid + 1 < PyUnicode_GET_LENGTH(s)) { -- /* Determine the offset in UTF-8 encoded input */ -- Py_SETREF(s, PyUnicode_Substring(s, 0, invalid + 1)); -- if (s != NULL) { -- Py_SETREF(s, PyUnicode_AsUTF8String(s)); -- } -- if (s == NULL) { -- tok->done = E_ERROR; -- return 0; -- } -- tok->cur = (char *)tok->start + PyBytes_GET_SIZE(s); -- } -- Py_DECREF(s); -- if (Py_UNICODE_ISPRINTABLE(ch)) { -- syntaxerror(tok, "invalid character '%c' (U+%04X)", ch, ch); -- } -- else { -- syntaxerror(tok, "invalid non-printable character U+%04X", ch); -- } -- return 0; -+ } -+ tok->cur = (char *)tok->start + PyBytes_GET_SIZE(s); - } - Py_DECREF(s); -- return 1; --} -- --static int --tok_decimal_tail(struct tok_state *tok) --{ -- int c; -- -- while (1) { -- do { -- c = tok_nextc(tok); -- } while (isdigit(c)); -- if (c != '_') { -- break; -- } -- c = tok_nextc(tok); -- if (!isdigit(c)) { -- tok_backup(tok, c); -- syntaxerror(tok, "invalid decimal literal"); -- return 0; -- } -- } -- return c; --} -- -- --static inline int --tok_continuation_line(struct tok_state *tok) { -- int c = tok_nextc(tok); -- if (c == '\r') { -- c = tok_nextc(tok); -- } -- if (c != '\n') { -- tok->done = E_LINECONT; -- return -1; -- } -- c = tok_nextc(tok); -- if (c == EOF) { -- tok->done = E_EOF; -- tok->cur = tok->inp; -- return -1; -+ if (Py_UNICODE_ISPRINTABLE(ch)) { -+ syntaxerror(tok, "invalid character '%c' (U+%04X)", ch, ch); - } else { -- tok_backup(tok, c); -+ syntaxerror(tok, "invalid non-printable character U+%04X", ch); - } -- return c; -+ return 0; -+ } -+ Py_DECREF(s); -+ return 1; - } - --static int --type_comment_token_setup(struct tok_state *tok, struct token *token, int type, int col_offset, -- int end_col_offset, const char *start, const char *end) --{ -- token->level = tok->level; -- token->lineno = token->end_lineno = tok->lineno; -- token->col_offset = col_offset; -- token->end_col_offset = end_col_offset; -- token->start = start; -- token->end = end; -- return type; --} -+static int tok_decimal_tail(struct tok_state *tok) { -+ int c; - --static int --token_setup(struct tok_state *tok, struct token *token, int type, const char *start, const char *end) --{ -- assert((start == NULL && end == NULL) || (start != NULL && end != NULL)); -- token->level = tok->level; -- if (ISSTRINGLIT(type)) { -- token->lineno = tok->first_lineno; -- } -- else { -- token->lineno = tok->lineno; -+ while (1) { -+ do { -+ c = tok_nextc(tok); -+ } while (isdigit(c)); -+ if (c != '_') { -+ break; - } -- token->end_lineno = tok->lineno; -- token->col_offset = token->end_col_offset = -1; -- token->start = start; -- token->end = end; -- -- if (start != NULL && end != NULL) { -- token->col_offset = tok->starting_col_offset; -- token->end_col_offset = tok->col_offset; -+ c = tok_nextc(tok); -+ if (!isdigit(c)) { -+ tok_backup(tok, c); -+ syntaxerror(tok, "invalid decimal literal"); -+ return 0; - } -- return type; -+ } -+ return c; - } - -- --static int --tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct token *token) --{ -- int c; -- int blankline, nonascii; -- -- const char *p_start = NULL; -- const char *p_end = NULL; -- nextline: -- tok->start = NULL; -- tok->starting_col_offset = -1; -- blankline = 0; -- -- -- /* Get indentation level */ -- if (tok->atbol) { -- int col = 0; -- int altcol = 0; -- tok->atbol = 0; -- int cont_line_col = 0; -- for (;;) { -- c = tok_nextc(tok); -- if (c == ' ') { -- col++, altcol++; -- } -- else if (c == '\t') { -- col = (col / tok->tabsize + 1) * tok->tabsize; -- altcol = (altcol / ALTTABSIZE + 1) * ALTTABSIZE; -- } -- else if (c == '\014') {/* Control-L (formfeed) */ -- col = altcol = 0; /* For Emacs users */ -- } -- else if (c == '\\') { -- // Indentation cannot be split over multiple physical lines -- // using backslashes. This means that if we found a backslash -- // preceded by whitespace, **the first one we find** determines -- // the level of indentation of whatever comes next. -- cont_line_col = cont_line_col ? cont_line_col : col; -- if ((c = tok_continuation_line(tok)) == -1) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- else { -- break; -- } -- } -- tok_backup(tok, c); -- if (c == '#' || c == '\n' || c == '\r') { -- /* Lines with only whitespace and/or comments -- shouldn't affect the indentation and are -- not passed to the parser as NEWLINE tokens, -- except *totally* empty lines in interactive -- mode, which signal the end of a command group. */ -- if (col == 0 && c == '\n' && tok->prompt != NULL) { -- blankline = 0; /* Let it through */ -- } -- else if (tok->prompt != NULL && tok->lineno == 1) { -- /* In interactive mode, if the first line contains -- only spaces and/or a comment, let it through. */ -- blankline = 0; -- col = altcol = 0; -- } -- else { -- blankline = 1; /* Ignore completely */ -- } -- /* We can't jump back right here since we still -- may need to skip to the end of a comment */ -- } -- if (!blankline && tok->level == 0) { -- col = cont_line_col ? cont_line_col : col; -- altcol = cont_line_col ? cont_line_col : altcol; -- if (col == tok->indstack[tok->indent]) { -- /* No change */ -- if (altcol != tok->altindstack[tok->indent]) { -- return MAKE_TOKEN(indenterror(tok)); -- } -- } -- else if (col > tok->indstack[tok->indent]) { -- /* Indent -- always one */ -- if (tok->indent+1 >= MAXINDENT) { -- tok->done = E_TOODEEP; -- tok->cur = tok->inp; -- return MAKE_TOKEN(ERRORTOKEN); -- } -- if (altcol <= tok->altindstack[tok->indent]) { -- return MAKE_TOKEN(indenterror(tok)); -- } -- tok->pendin++; -- tok->indstack[++tok->indent] = col; -- tok->altindstack[tok->indent] = altcol; -- } -- else /* col < tok->indstack[tok->indent] */ { -- /* Dedent -- any number, must be consistent */ -- while (tok->indent > 0 && -- col < tok->indstack[tok->indent]) { -- tok->pendin--; -- tok->indent--; -- } -- if (col != tok->indstack[tok->indent]) { -- tok->done = E_DEDENT; -- tok->cur = tok->inp; -- return MAKE_TOKEN(ERRORTOKEN); -- } -- if (altcol != tok->altindstack[tok->indent]) { -- return MAKE_TOKEN(indenterror(tok)); -- } -- } -- } -- } -- -- tok->start = tok->cur; -- tok->starting_col_offset = tok->col_offset; -- -- /* Return pending indents/dedents */ -- if (tok->pendin != 0) { -- if (tok->pendin < 0) { -- if (tok->tok_extra_tokens) { -- p_start = tok->cur; -- p_end = tok->cur; -- } -- tok->pendin++; -- return MAKE_TOKEN(DEDENT); -- } -- else { -- if (tok->tok_extra_tokens) { -- p_start = tok->buf; -- p_end = tok->cur; -- } -- tok->pendin--; -- return MAKE_TOKEN(INDENT); -+static inline int tok_continuation_line(struct tok_state *tok) { -+ int c = tok_nextc(tok); -+ if (c == '\r') { -+ c = tok_nextc(tok); -+ } -+ if (c != '\n') { -+ tok->done = E_LINECONT; -+ return -1; -+ } -+ c = tok_nextc(tok); -+ if (c == EOF) { -+ tok->done = E_EOF; -+ tok->cur = tok->inp; -+ return -1; -+ } else { -+ tok_backup(tok, c); -+ } -+ return c; -+} -+ -+static int type_comment_token_setup(struct tok_state *tok, struct token *token, -+ int type, int col_offset, -+ int end_col_offset, const char *start, -+ const char *end) { -+ token->level = tok->level; -+ token->lineno = token->end_lineno = tok->lineno; -+ token->col_offset = col_offset; -+ token->end_col_offset = end_col_offset; -+ token->start = start; -+ token->end = end; -+ return type; -+} -+ -+static int token_setup(struct tok_state *tok, struct token *token, int type, -+ const char *start, const char *end) { -+ assert((start == NULL && end == NULL) || (start != NULL && end != NULL)); -+ token->level = tok->level; -+ if (ISSTRINGLIT(type)) { -+ token->lineno = tok->first_lineno; -+ } else { -+ token->lineno = tok->lineno; -+ } -+ token->end_lineno = tok->lineno; -+ token->col_offset = token->end_col_offset = -1; -+ token->start = start; -+ token->end = end; -+ -+ if (start != NULL && end != NULL) { -+ token->col_offset = tok->starting_col_offset; -+ token->end_col_offset = tok->col_offset; -+ } -+ return type; -+} -+ -+static int tok_get_normal_mode(struct tok_state *tok, -+ tokenizer_mode *current_tok, -+ struct token *token) { -+ int c; -+ int blankline, nonascii; -+ -+ const char *p_start = NULL; -+ const char *p_end = NULL; -+nextline: -+ tok->start = NULL; -+ tok->starting_col_offset = -1; -+ blankline = 0; -+ -+ /* Get indentation level */ -+ if (tok->atbol) { -+ int col = 0; -+ int altcol = 0; -+ tok->atbol = 0; -+ int cont_line_col = 0; -+ for (;;) { -+ c = tok_nextc(tok); -+ if (c == ' ') { -+ col++, altcol++; -+ } else if (c == '\t') { -+ col = (col / tok->tabsize + 1) * tok->tabsize; -+ altcol = (altcol / ALTTABSIZE + 1) * ALTTABSIZE; -+ } else if (c == '\014') { /* Control-L (formfeed) */ -+ col = altcol = 0; /* For Emacs users */ -+ } else if (c == '\\') { -+ // Indentation cannot be split over multiple physical lines -+ // using backslashes. This means that if we found a backslash -+ // preceded by whitespace, **the first one we find** determines -+ // the level of indentation of whatever comes next. -+ cont_line_col = cont_line_col ? cont_line_col : col; -+ if ((c = tok_continuation_line(tok)) == -1) { -+ return MAKE_TOKEN(ERRORTOKEN); - } -+ } else { -+ break; -+ } - } -- -- /* Peek ahead at the next character */ -- c = tok_nextc(tok); - tok_backup(tok, c); -- /* Check if we are closing an async function */ -- if (tok->async_def -- && !blankline -- /* Due to some implementation artifacts of type comments, -- * a TYPE_COMMENT at the start of a function won't set an -- * indentation level and it will produce a NEWLINE after it. -- * To avoid spuriously ending an async function due to this, -- * wait until we have some non-newline char in front of us. */ -- && c != '\n' -- && tok->level == 0 -- /* There was a NEWLINE after ASYNC DEF, -- so we're past the signature. */ -- && tok->async_def_nl -- /* Current indentation level is less than where -- the async function was defined */ -- && tok->async_def_indent >= tok->indent) -- { -- tok->async_def = 0; -- tok->async_def_indent = 0; -- tok->async_def_nl = 0; -- } -- -- again: -- tok->start = NULL; -- /* Skip spaces */ -- do { -- c = tok_nextc(tok); -- } while (c == ' ' || c == '\t' || c == '\014'); -- -- /* Set start of current token */ -- tok->start = tok->cur == NULL ? NULL : tok->cur - 1; -- tok->starting_col_offset = tok->col_offset - 1; -- -- /* Skip comment, unless it's a type comment */ -- if (c == '#') { -+ if (c == '#' || c == '\n' || c == '\r') { -+ /* Lines with only whitespace and/or comments -+ shouldn't affect the indentation and are -+ not passed to the parser as NEWLINE tokens, -+ except *totally* empty lines in interactive -+ mode, which signal the end of a command group. */ -+ if (col == 0 && c == '\n' && tok->prompt != NULL) { -+ blankline = 0; /* Let it through */ -+ } else if (tok->prompt != NULL && tok->lineno == 1) { -+ /* In interactive mode, if the first line contains -+ only spaces and/or a comment, let it through. */ -+ blankline = 0; -+ col = altcol = 0; -+ } else { -+ blankline = 1; /* Ignore completely */ -+ } -+ /* We can't jump back right here since we still -+ may need to skip to the end of a comment */ -+ } -+ if (!blankline && tok->level == 0) { -+ col = cont_line_col ? cont_line_col : col; -+ altcol = cont_line_col ? cont_line_col : altcol; -+ if (col == tok->indstack[tok->indent]) { -+ /* No change */ -+ if (altcol != tok->altindstack[tok->indent]) { -+ return MAKE_TOKEN(indenterror(tok)); -+ } -+ } else if (col > tok->indstack[tok->indent]) { -+ /* Indent -- always one */ -+ if (tok->indent + 1 >= MAXINDENT) { -+ tok->done = E_TOODEEP; -+ tok->cur = tok->inp; -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ if (altcol <= tok->altindstack[tok->indent]) { -+ return MAKE_TOKEN(indenterror(tok)); -+ } -+ tok->pendin++; -+ tok->indstack[++tok->indent] = col; -+ tok->altindstack[tok->indent] = altcol; -+ } else /* col < tok->indstack[tok->indent] */ { -+ /* Dedent -- any number, must be consistent */ -+ while (tok->indent > 0 && col < tok->indstack[tok->indent]) { -+ tok->pendin--; -+ tok->indent--; -+ } -+ if (col != tok->indstack[tok->indent]) { -+ tok->done = E_DEDENT; -+ tok->cur = tok->inp; -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ if (altcol != tok->altindstack[tok->indent]) { -+ return MAKE_TOKEN(indenterror(tok)); -+ } -+ } -+ } -+ } -+ -+ tok->start = tok->cur; -+ tok->starting_col_offset = tok->col_offset; -+ -+ /* Return pending indents/dedents */ -+ if (tok->pendin != 0) { -+ if (tok->pendin < 0) { -+ if (tok->tok_extra_tokens) { -+ p_start = tok->cur; -+ p_end = tok->cur; -+ } -+ tok->pendin++; -+ return MAKE_TOKEN(DEDENT); -+ } else { -+ if (tok->tok_extra_tokens) { -+ p_start = tok->buf; -+ p_end = tok->cur; -+ } -+ tok->pendin--; -+ return MAKE_TOKEN(INDENT); -+ } -+ } -+ -+ /* Peek ahead at the next character */ -+ c = tok_nextc(tok); -+ tok_backup(tok, c); -+ /* Check if we are closing an async function */ -+ if (tok->async_def && -+ !blankline -+ /* Due to some implementation artifacts of type comments, -+ * a TYPE_COMMENT at the start of a function won't set an -+ * indentation level and it will produce a NEWLINE after it. -+ * To avoid spuriously ending an async function due to this, -+ * wait until we have some non-newline char in front of us. */ -+ && c != '\n' && -+ tok->level == 0 -+ /* There was a NEWLINE after ASYNC DEF, -+ so we're past the signature. */ -+ && tok->async_def_nl -+ /* Current indentation level is less than where -+ the async function was defined */ -+ && tok->async_def_indent >= tok->indent) { -+ tok->async_def = 0; -+ tok->async_def_indent = 0; -+ tok->async_def_nl = 0; -+ } - -- const char* p = NULL; -- const char *prefix, *type_start; -- int current_starting_col_offset; -+again: -+ tok->start = NULL; -+ /* Skip spaces */ -+ do { -+ c = tok_nextc(tok); -+ } while (c == ' ' || c == '\t' || c == '\014'); - -- while (c != EOF && c != '\n' && c != '\r') { -- c = tok_nextc(tok); -- } -+ /* Set start of current token */ -+ tok->start = tok->cur == NULL ? NULL : tok->cur - 1; -+ tok->starting_col_offset = tok->col_offset - 1; - -- if (tok->tok_extra_tokens) { -- p = tok->start; -- } -+ /* Skip comment, unless it's a type comment */ -+ if (c == '#') { - -- if (tok->type_comments) { -- p = tok->start; -- current_starting_col_offset = tok->starting_col_offset; -- prefix = type_comment_prefix; -- while (*prefix && p < tok->cur) { -- if (*prefix == ' ') { -- while (*p == ' ' || *p == '\t') { -- p++; -- current_starting_col_offset++; -- } -- } else if (*prefix == *p) { -- p++; -- current_starting_col_offset++; -- } else { -- break; -- } -- -- prefix++; -- } -+ const char *p = NULL; -+ const char *prefix, *type_start; -+ int current_starting_col_offset; - -- /* This is a type comment if we matched all of type_comment_prefix. */ -- if (!*prefix) { -- int is_type_ignore = 1; -- // +6 in order to skip the word 'ignore' -- const char *ignore_end = p + 6; -- const int ignore_end_col_offset = current_starting_col_offset + 6; -- tok_backup(tok, c); /* don't eat the newline or EOF */ -- -- type_start = p; -- -- /* A TYPE_IGNORE is "type: ignore" followed by the end of the token -- * or anything ASCII and non-alphanumeric. */ -- is_type_ignore = ( -- tok->cur >= ignore_end && memcmp(p, "ignore", 6) == 0 -- && !(tok->cur > ignore_end -- && ((unsigned char)ignore_end[0] >= 128 || Py_ISALNUM(ignore_end[0])))); -- -- if (is_type_ignore) { -- p_start = ignore_end; -- p_end = tok->cur; -- -- /* If this type ignore is the only thing on the line, consume the newline also. */ -- if (blankline) { -- tok_nextc(tok); -- tok->atbol = 1; -- } -- return MAKE_TYPE_COMMENT_TOKEN(TYPE_IGNORE, ignore_end_col_offset, tok->col_offset); -- } else { -- p_start = type_start; -- p_end = tok->cur; -- return MAKE_TYPE_COMMENT_TOKEN(TYPE_COMMENT, current_starting_col_offset, tok->col_offset); -- } -- } -- } -- if (tok->tok_extra_tokens) { -- tok_backup(tok, c); /* don't eat the newline or EOF */ -- p_start = p; -- p_end = tok->cur; -- tok->comment_newline = blankline; -- return MAKE_TOKEN(COMMENT); -- } -+ while (c != EOF && c != '\n' && c != '\r') { -+ c = tok_nextc(tok); - } - -- if (tok->done == E_INTERACT_STOP) { -- return MAKE_TOKEN(ENDMARKER); -+ if (tok->tok_extra_tokens) { -+ p = tok->start; -+ } -+ -+ if (tok->type_comments) { -+ p = tok->start; -+ current_starting_col_offset = tok->starting_col_offset; -+ prefix = type_comment_prefix; -+ while (*prefix && p < tok->cur) { -+ if (*prefix == ' ') { -+ while (*p == ' ' || *p == '\t') { -+ p++; -+ current_starting_col_offset++; -+ } -+ } else if (*prefix == *p) { -+ p++; -+ current_starting_col_offset++; -+ } else { -+ break; -+ } -+ -+ prefix++; -+ } -+ -+ /* This is a type comment if we matched all of type_comment_prefix. */ -+ if (!*prefix) { -+ int is_type_ignore = 1; -+ // +6 in order to skip the word 'ignore' -+ const char *ignore_end = p + 6; -+ const int ignore_end_col_offset = current_starting_col_offset + 6; -+ tok_backup(tok, c); /* don't eat the newline or EOF */ -+ -+ type_start = p; -+ -+ /* A TYPE_IGNORE is "type: ignore" followed by the end of the token -+ * or anything ASCII and non-alphanumeric. */ -+ is_type_ignore = -+ (tok->cur >= ignore_end && memcmp(p, "ignore", 6) == 0 && -+ !(tok->cur > ignore_end && ((unsigned char)ignore_end[0] >= 128 || -+ Py_ISALNUM(ignore_end[0])))); -+ -+ if (is_type_ignore) { -+ p_start = ignore_end; -+ p_end = tok->cur; -+ -+ /* If this type ignore is the only thing on the line, consume the -+ * newline also. */ -+ if (blankline) { -+ tok_nextc(tok); -+ tok->atbol = 1; -+ } -+ return MAKE_TYPE_COMMENT_TOKEN(TYPE_IGNORE, ignore_end_col_offset, -+ tok->col_offset); -+ } else { -+ p_start = type_start; -+ p_end = tok->cur; -+ return MAKE_TYPE_COMMENT_TOKEN( -+ TYPE_COMMENT, current_starting_col_offset, tok->col_offset); -+ } -+ } - } -- -- /* Check for EOF and errors now */ -- if (c == EOF) { -- if (tok->level) { -- return MAKE_TOKEN(ERRORTOKEN); -+ if (tok->tok_extra_tokens) { -+ tok_backup(tok, c); /* don't eat the newline or EOF */ -+ p_start = p; -+ p_end = tok->cur; -+ tok->comment_newline = blankline; -+ return MAKE_TOKEN(COMMENT); -+ } -+ } -+ -+ if (tok->done == E_INTERACT_STOP) { -+ return MAKE_TOKEN(ENDMARKER); -+ } -+ -+ /* Check for EOF and errors now */ -+ if (c == EOF) { -+ if (tok->level) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ return MAKE_TOKEN(tok->done == E_EOF ? ENDMARKER : ERRORTOKEN); -+ } -+ -+ /* Identifier (most frequent token!) */ -+ nonascii = 0; -+ if (is_potential_identifier_start(c)) { -+ /* Process the various legal combinations of b"", r"", u"", and f"". */ -+ int saw_b = 0, saw_r = 0, saw_u = 0, saw_f = 0; -+ while (1) { -+ if (!(saw_b || saw_u || saw_f) && (c == 'b' || c == 'B')) -+ saw_b = 1; -+ /* Since this is a backwards compatibility support literal we don't -+ want to support it in arbitrary order like byte literals. */ -+ else if (!(saw_b || saw_u || saw_r || saw_f) && (c == 'u' || c == 'U')) { -+ saw_u = 1; -+ } -+ /* ur"" and ru"" are not supported */ -+ else if (!(saw_r || saw_u) && (c == 'r' || c == 'R')) { -+ saw_r = 1; -+ } else if (!(saw_f || saw_b || saw_u) && (c == 'f' || c == 'F')) { -+ saw_f = 1; -+ } else { -+ break; -+ } -+ c = tok_nextc(tok); -+ if (c == '"' || c == '\'') { -+ if (saw_f) { -+ goto f_string_quote; - } -- return MAKE_TOKEN(tok->done == E_EOF ? ENDMARKER : ERRORTOKEN); -+ goto letter_quote; -+ } -+ } -+ while (is_potential_identifier_char(c)) { -+ if (c >= 128) { -+ nonascii = 1; -+ } -+ c = tok_nextc(tok); -+ } -+ tok_backup(tok, c); -+ if (nonascii && !verify_identifier(tok)) { -+ return MAKE_TOKEN(ERRORTOKEN); - } - -- /* Identifier (most frequent token!) */ -- nonascii = 0; -- if (is_potential_identifier_start(c)) { -- /* Process the various legal combinations of b"", r"", u"", and f"". */ -- int saw_b = 0, saw_r = 0, saw_u = 0, saw_f = 0; -- while (1) { -- if (!(saw_b || saw_u || saw_f) && (c == 'b' || c == 'B')) -- saw_b = 1; -- /* Since this is a backwards compatibility support literal we don't -- want to support it in arbitrary order like byte literals. */ -- else if (!(saw_b || saw_u || saw_r || saw_f) -- && (c == 'u'|| c == 'U')) { -- saw_u = 1; -- } -- /* ur"" and ru"" are not supported */ -- else if (!(saw_r || saw_u) && (c == 'r' || c == 'R')) { -- saw_r = 1; -- } -- else if (!(saw_f || saw_b || saw_u) && (c == 'f' || c == 'F')) { -- saw_f = 1; -- } -- else { -- break; -- } -- c = tok_nextc(tok); -- if (c == '"' || c == '\'') { -- if (saw_f) { -- goto f_string_quote; -- } -- goto letter_quote; -- } -- } -- while (is_potential_identifier_char(c)) { -- if (c >= 128) { -- nonascii = 1; -- } -- c = tok_nextc(tok); -- } -- tok_backup(tok, c); -- if (nonascii && !verify_identifier(tok)) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -+ p_start = tok->start; -+ p_end = tok->cur; - -- p_start = tok->start; -- p_end = tok->cur; -+ /* async/await parsing block. */ -+ if (tok->cur - tok->start == 5 && tok->start[0] == 'a') { -+ /* May be an 'async' or 'await' token. For Python 3.7 or -+ later we recognize them unconditionally. For Python -+ 3.5 or 3.6 we recognize 'async' in front of 'def', and -+ either one inside of 'async def'. (Technically we -+ shouldn't recognize these at all for 3.4 or earlier, -+ but there's no *valid* Python 3.4 code that would be -+ rejected, and async functions will be rejected in a -+ later phase.) */ -+ if (!tok->async_hacks || tok->async_def) { -+ /* Always recognize the keywords. */ -+ if (memcmp(tok->start, "async", 5) == 0) { -+ return MAKE_TOKEN(ASYNC); -+ } -+ if (memcmp(tok->start, "await", 5) == 0) { -+ return MAKE_TOKEN(AWAIT); -+ } -+ } else if (memcmp(tok->start, "async", 5) == 0) { -+ /* The current token is 'async'. -+ Look ahead one token to see if that is 'def'. */ -+ -+ struct tok_state ahead_tok; -+ struct token ahead_token; -+ _PyToken_Init(&ahead_token); -+ int ahead_tok_kind; -+ -+ memcpy(&ahead_tok, tok, sizeof(ahead_tok)); -+ ahead_tok_kind = -+ tok_get_normal_mode(&ahead_tok, current_tok, &ahead_token); -+ -+ if (ahead_tok_kind == NAME && ahead_tok.cur - ahead_tok.start == 3 && -+ memcmp(ahead_tok.start, "def", 3) == 0) { -+ /* The next token is going to be 'def', so instead of -+ returning a plain NAME token, return ASYNC. */ -+ tok->async_def_indent = tok->indent; -+ tok->async_def = 1; -+ _PyToken_Free(&ahead_token); -+ return MAKE_TOKEN(ASYNC); -+ } -+ _PyToken_Free(&ahead_token); -+ } -+ } -+ -+ return MAKE_TOKEN(NAME); -+ } -+ -+ if (c == '\r') { -+ c = tok_nextc(tok); -+ } - -- /* async/await parsing block. */ -- if (tok->cur - tok->start == 5 && tok->start[0] == 'a') { -- /* May be an 'async' or 'await' token. For Python 3.7 or -- later we recognize them unconditionally. For Python -- 3.5 or 3.6 we recognize 'async' in front of 'def', and -- either one inside of 'async def'. (Technically we -- shouldn't recognize these at all for 3.4 or earlier, -- but there's no *valid* Python 3.4 code that would be -- rejected, and async functions will be rejected in a -- later phase.) */ -- if (!tok->async_hacks || tok->async_def) { -- /* Always recognize the keywords. */ -- if (memcmp(tok->start, "async", 5) == 0) { -- return MAKE_TOKEN(ASYNC); -- } -- if (memcmp(tok->start, "await", 5) == 0) { -- return MAKE_TOKEN(AWAIT); -- } -- } -- else if (memcmp(tok->start, "async", 5) == 0) { -- /* The current token is 'async'. -- Look ahead one token to see if that is 'def'. */ -- -- struct tok_state ahead_tok; -- struct token ahead_token; -- _PyToken_Init(&ahead_token); -- int ahead_tok_kind; -- -- memcpy(&ahead_tok, tok, sizeof(ahead_tok)); -- ahead_tok_kind = tok_get_normal_mode(&ahead_tok, -- current_tok, -- &ahead_token); -- -- if (ahead_tok_kind == NAME -- && ahead_tok.cur - ahead_tok.start == 3 -- && memcmp(ahead_tok.start, "def", 3) == 0) -- { -- /* The next token is going to be 'def', so instead of -- returning a plain NAME token, return ASYNC. */ -- tok->async_def_indent = tok->indent; -- tok->async_def = 1; -- _PyToken_Free(&ahead_token); -- return MAKE_TOKEN(ASYNC); -- } -- _PyToken_Free(&ahead_token); -- } -+ /* Newline */ -+ if (c == '\n') { -+ tok->atbol = 1; -+ if (blankline || tok->level > 0) { -+ if (tok->tok_extra_tokens) { -+ if (tok->comment_newline) { -+ tok->comment_newline = 0; - } -- -- return MAKE_TOKEN(NAME); -+ p_start = tok->start; -+ p_end = tok->cur; -+ return MAKE_TOKEN(NL); -+ } -+ goto nextline; - } -- -- if (c == '\r') { -- c = tok_nextc(tok); -+ if (tok->comment_newline && tok->tok_extra_tokens) { -+ tok->comment_newline = 0; -+ p_start = tok->start; -+ p_end = tok->cur; -+ return MAKE_TOKEN(NL); -+ } -+ p_start = tok->start; -+ p_end = tok->cur - 1; /* Leave '\n' out of the string */ -+ tok->cont_line = 0; -+ if (tok->async_def) { -+ /* We're somewhere inside an 'async def' function, and -+ we've encountered a NEWLINE after its signature. */ -+ tok->async_def_nl = 1; - } -+ return MAKE_TOKEN(NEWLINE); -+ } - -- /* Newline */ -- if (c == '\n') { -- tok->atbol = 1; -- if (blankline || tok->level > 0) { -- if (tok->tok_extra_tokens) { -- if (tok->comment_newline) { -- tok->comment_newline = 0; -- } -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(NL); -- } -- goto nextline; -- } -- if (tok->comment_newline && tok->tok_extra_tokens) { -- tok->comment_newline = 0; -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(NL); -- } -+ /* Period or number starting with period? */ -+ if (c == '.') { -+ c = tok_nextc(tok); -+ if (isdigit(c)) { -+ goto fraction; -+ } else if (c == '.') { -+ c = tok_nextc(tok); -+ if (c == '.') { - p_start = tok->start; -- p_end = tok->cur - 1; /* Leave '\n' out of the string */ -- tok->cont_line = 0; -- if (tok->async_def) { -- /* We're somewhere inside an 'async def' function, and -- we've encountered a NEWLINE after its signature. */ -- tok->async_def_nl = 1; -- } -- return MAKE_TOKEN(NEWLINE); -+ p_end = tok->cur; -+ return MAKE_TOKEN(ELLIPSIS); -+ } else { -+ tok_backup(tok, c); -+ } -+ tok_backup(tok, '.'); -+ } else { -+ tok_backup(tok, c); - } -- -- /* Period or number starting with period? */ -- if (c == '.') { -+ p_start = tok->start; -+ p_end = tok->cur; -+ return MAKE_TOKEN(DOT); -+ } -+ -+ /* Number */ -+ if (isdigit(c)) { -+ if (c == '0') { -+ /* Hex, octal or binary -- maybe. */ -+ c = tok_nextc(tok); -+ if (c == 'x' || c == 'X') { -+ /* Hex */ - c = tok_nextc(tok); -- if (isdigit(c)) { -- goto fraction; -- } else if (c == '.') { -+ do { -+ if (c == '_') { - c = tok_nextc(tok); -- if (c == '.') { -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(ELLIPSIS); -- } -- else { -- tok_backup(tok, c); -- } -- tok_backup(tok, '.'); -- } -- else { -+ } -+ if (!isxdigit(c)) { - tok_backup(tok, c); -- } -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(DOT); -- } -- -- /* Number */ -- if (isdigit(c)) { -- if (c == '0') { -- /* Hex, octal or binary -- maybe. */ -+ return MAKE_TOKEN(syntaxerror(tok, "invalid hexadecimal literal")); -+ } -+ do { - c = tok_nextc(tok); -- if (c == 'x' || c == 'X') { -- /* Hex */ -- c = tok_nextc(tok); -- do { -- if (c == '_') { -- c = tok_nextc(tok); -- } -- if (!isxdigit(c)) { -- tok_backup(tok, c); -- return MAKE_TOKEN(syntaxerror(tok, "invalid hexadecimal literal")); -- } -- do { -- c = tok_nextc(tok); -- } while (isxdigit(c)); -- } while (c == '_'); -- if (!verify_end_of_number(tok, c, "hexadecimal")) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- else if (c == 'o' || c == 'O') { -- /* Octal */ -- c = tok_nextc(tok); -- do { -- if (c == '_') { -- c = tok_nextc(tok); -- } -- if (c < '0' || c >= '8') { -- if (isdigit(c)) { -- return MAKE_TOKEN(syntaxerror(tok, -- "invalid digit '%c' in octal literal", c)); -- } -- else { -- tok_backup(tok, c); -- return MAKE_TOKEN(syntaxerror(tok, "invalid octal literal")); -- } -- } -- do { -- c = tok_nextc(tok); -- } while ('0' <= c && c < '8'); -- } while (c == '_'); -- if (isdigit(c)) { -- return MAKE_TOKEN(syntaxerror(tok, -- "invalid digit '%c' in octal literal", c)); -- } -- if (!verify_end_of_number(tok, c, "octal")) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- else if (c == 'b' || c == 'B') { -- /* Binary */ -- c = tok_nextc(tok); -- do { -- if (c == '_') { -- c = tok_nextc(tok); -- } -- if (c != '0' && c != '1') { -- if (isdigit(c)) { -- return MAKE_TOKEN(syntaxerror(tok, "invalid digit '%c' in binary literal", c)); -- } -- else { -- tok_backup(tok, c); -- return MAKE_TOKEN(syntaxerror(tok, "invalid binary literal")); -- } -- } -- do { -- c = tok_nextc(tok); -- } while (c == '0' || c == '1'); -- } while (c == '_'); -- if (isdigit(c)) { -- return MAKE_TOKEN(syntaxerror(tok, "invalid digit '%c' in binary literal", c)); -- } -- if (!verify_end_of_number(tok, c, "binary")) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- else { -- int nonzero = 0; -- /* maybe old-style octal; c is first char of it */ -- /* in any case, allow '0' as a literal */ -- while (1) { -- if (c == '_') { -- c = tok_nextc(tok); -- if (!isdigit(c)) { -- tok_backup(tok, c); -- return MAKE_TOKEN(syntaxerror(tok, "invalid decimal literal")); -- } -- } -- if (c != '0') { -- break; -- } -- c = tok_nextc(tok); -- } -- char* zeros_end = tok->cur; -- if (isdigit(c)) { -- nonzero = 1; -- c = tok_decimal_tail(tok); -- if (c == 0) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- if (c == '.') { -- c = tok_nextc(tok); -- goto fraction; -- } -- else if (c == 'e' || c == 'E') { -- goto exponent; -- } -- else if (c == 'j' || c == 'J') { -- goto imaginary; -- } -- else if (nonzero && !tok->tok_extra_tokens) { -- /* Old-style octal: now disallowed. */ -- tok_backup(tok, c); -- return MAKE_TOKEN(syntaxerror_known_range( -- tok, (int)(tok->start + 1 - tok->line_start), -- (int)(zeros_end - tok->line_start), -- "leading zeros in decimal integer " -- "literals are not permitted; " -- "use an 0o prefix for octal integers")); -- } -- if (!verify_end_of_number(tok, c, "decimal")) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- } -- else { -- /* Decimal */ -- c = tok_decimal_tail(tok); -- if (c == 0) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- { -- /* Accept floating point numbers. */ -- if (c == '.') { -- c = tok_nextc(tok); -- fraction: -- /* Fraction */ -- if (isdigit(c)) { -- c = tok_decimal_tail(tok); -- if (c == 0) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- } -- if (c == 'e' || c == 'E') { -- int e; -- exponent: -- e = c; -- /* Exponent part */ -- c = tok_nextc(tok); -- if (c == '+' || c == '-') { -- c = tok_nextc(tok); -- if (!isdigit(c)) { -- tok_backup(tok, c); -- return MAKE_TOKEN(syntaxerror(tok, "invalid decimal literal")); -- } -- } else if (!isdigit(c)) { -- tok_backup(tok, c); -- if (!verify_end_of_number(tok, e, "decimal")) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- tok_backup(tok, e); -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(NUMBER); -- } -- c = tok_decimal_tail(tok); -- if (c == 0) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- if (c == 'j' || c == 'J') { -- /* Imaginary part */ -- imaginary: -- c = tok_nextc(tok); -- if (!verify_end_of_number(tok, c, "imaginary")) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- else if (!verify_end_of_number(tok, c, "decimal")) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -+ } while (isxdigit(c)); -+ } while (c == '_'); -+ if (!verify_end_of_number(tok, c, "hexadecimal")) { -+ return MAKE_TOKEN(ERRORTOKEN); - } -- tok_backup(tok, c); -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(NUMBER); -- } -- -- f_string_quote: -- if (((tolower(*tok->start) == 'f' || tolower(*tok->start) == 'r') && (c == '\'' || c == '"'))) { -- int quote = c; -- int quote_size = 1; /* 1 or 3 */ -- -- /* Nodes of type STRING, especially multi line strings -- must be handled differently in order to get both -- the starting line number and the column offset right. -- (cf. issue 16806) */ -- tok->first_lineno = tok->lineno; -- tok->multi_line_start = tok->line_start; -- -- /* Find the quote size and start of string */ -- int after_quote = tok_nextc(tok); -- if (after_quote == quote) { -- int after_after_quote = tok_nextc(tok); -- if (after_after_quote == quote) { -- quote_size = 3; -- } -- else { -- // TODO: Check this -- tok_backup(tok, after_after_quote); -- tok_backup(tok, after_quote); -+ } else if (c == 'o' || c == 'O') { -+ /* Octal */ -+ c = tok_nextc(tok); -+ do { -+ if (c == '_') { -+ c = tok_nextc(tok); -+ } -+ if (c < '0' || c >= '8') { -+ if (isdigit(c)) { -+ return MAKE_TOKEN( -+ syntaxerror(tok, "invalid digit '%c' in octal literal", c)); -+ } else { -+ tok_backup(tok, c); -+ return MAKE_TOKEN(syntaxerror(tok, "invalid octal literal")); - } -+ } -+ do { -+ c = tok_nextc(tok); -+ } while ('0' <= c && c < '8'); -+ } while (c == '_'); -+ if (isdigit(c)) { -+ return MAKE_TOKEN( -+ syntaxerror(tok, "invalid digit '%c' in octal literal", c)); - } -- if (after_quote != quote) { -- tok_backup(tok, after_quote); -- } -- -- -- p_start = tok->start; -- p_end = tok->cur; -- if (tok->tok_mode_stack_index + 1 >= MAXFSTRINGLEVEL) { -- return MAKE_TOKEN(syntaxerror(tok, "too many nested f-strings")); -- } -- tokenizer_mode *the_current_tok = TOK_NEXT_MODE(tok); -- the_current_tok->kind = TOK_FSTRING_MODE; -- the_current_tok->f_string_quote = quote; -- the_current_tok->f_string_quote_size = quote_size; -- the_current_tok->f_string_start = tok->start; -- the_current_tok->f_string_multi_line_start = tok->line_start; -- the_current_tok->f_string_line_start = tok->lineno; -- the_current_tok->f_string_start_offset = -1; -- the_current_tok->f_string_multi_line_start_offset = -1; -- the_current_tok->last_expr_buffer = NULL; -- the_current_tok->last_expr_size = 0; -- the_current_tok->last_expr_end = -1; -- the_current_tok->f_string_debug = 0; -- -- switch (*tok->start) { -- case 'F': -- case 'f': -- the_current_tok->f_string_raw = tolower(*(tok->start + 1)) == 'r'; -- break; -- case 'R': -- case 'r': -- the_current_tok->f_string_raw = 1; -- break; -- default: -- Py_UNREACHABLE(); -+ if (!verify_end_of_number(tok, c, "octal")) { -+ return MAKE_TOKEN(ERRORTOKEN); - } -- -- the_current_tok->curly_bracket_depth = 0; -- the_current_tok->curly_bracket_expr_start_depth = -1; -- return MAKE_TOKEN(FSTRING_START); -- } -- -- letter_quote: -- /* String */ -- if (c == '\'' || c == '"') { -- int quote = c; -- int quote_size = 1; /* 1 or 3 */ -- int end_quote_size = 0; -- -- /* Nodes of type STRING, especially multi line strings -- must be handled differently in order to get both -- the starting line number and the column offset right. -- (cf. issue 16806) */ -- tok->first_lineno = tok->lineno; -- tok->multi_line_start = tok->line_start; -- -- /* Find the quote size and start of string */ -+ } else if (c == 'b' || c == 'B') { -+ /* Binary */ - c = tok_nextc(tok); -- if (c == quote) { -+ do { -+ if (c == '_') { - c = tok_nextc(tok); -- if (c == quote) { -- quote_size = 3; -- } -- else { -- end_quote_size = 1; /* empty string found */ -+ } -+ if (c != '0' && c != '1') { -+ if (isdigit(c)) { -+ return MAKE_TOKEN( -+ syntaxerror(tok, "invalid digit '%c' in binary literal", c)); -+ } else { -+ tok_backup(tok, c); -+ return MAKE_TOKEN(syntaxerror(tok, "invalid binary literal")); - } -+ } -+ do { -+ c = tok_nextc(tok); -+ } while (c == '0' || c == '1'); -+ } while (c == '_'); -+ if (isdigit(c)) { -+ return MAKE_TOKEN( -+ syntaxerror(tok, "invalid digit '%c' in binary literal", c)); - } -- if (c != quote) { -- tok_backup(tok, c); -+ if (!verify_end_of_number(tok, c, "binary")) { -+ return MAKE_TOKEN(ERRORTOKEN); - } -- -- /* Get rest of string */ -- while (end_quote_size != quote_size) { -+ } else { -+ int nonzero = 0; -+ /* maybe old-style octal; c is first char of it */ -+ /* in any case, allow '0' as a literal */ -+ while (1) { -+ if (c == '_') { - c = tok_nextc(tok); -- if (tok->done == E_ERROR) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- if (tok->done == E_DECODE) { -- break; -- } -- if (c == EOF || (quote_size == 1 && c == '\n')) { -- assert(tok->multi_line_start != NULL); -- // shift the tok_state's location into -- // the start of string, and report the error -- // from the initial quote character -- tok->cur = (char *)tok->start; -- tok->cur++; -- tok->line_start = tok->multi_line_start; -- int start = tok->lineno; -- tok->lineno = tok->first_lineno; -- -- if (INSIDE_FSTRING(tok)) { -- /* When we are in an f-string, before raising the -- * unterminated string literal error, check whether -- * does the initial quote matches with f-strings quotes -- * and if it is, then this must be a missing '}' token -- * so raise the proper error */ -- tokenizer_mode *the_current_tok = TOK_GET_MODE(tok); -- if (the_current_tok->f_string_quote == quote && -- the_current_tok->f_string_quote_size == quote_size) { -- return MAKE_TOKEN(syntaxerror(tok, "f-string: expecting '}'", start)); -- } -- } -- -- if (quote_size == 3) { -- syntaxerror(tok, "unterminated triple-quoted string literal" -- " (detected at line %d)", start); -- if (c != '\n') { -- tok->done = E_EOFS; -- } -- return MAKE_TOKEN(ERRORTOKEN); -- } -- else { -- syntaxerror(tok, "unterminated string literal (detected at" -- " line %d)", start); -- if (c != '\n') { -- tok->done = E_EOLS; -- } -- return MAKE_TOKEN(ERRORTOKEN); -- } -+ if (!isdigit(c)) { -+ tok_backup(tok, c); -+ return MAKE_TOKEN(syntaxerror(tok, "invalid decimal literal")); - } -- if (c == quote) { -- end_quote_size += 1; -- } -- else { -- end_quote_size = 0; -- if (c == '\\') { -- c = tok_nextc(tok); /* skip escaped char */ -- if (c == '\r') { -- c = tok_nextc(tok); -- } -- } -- } -- } -- -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(STRING); -- } -- -- /* Line continuation */ -- if (c == '\\') { -- if ((c = tok_continuation_line(tok)) == -1) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- tok->cont_line = 1; -- goto again; /* Read next line */ -- } -- -- /* Punctuation character */ -- int is_punctuation = (c == ':' || c == '}' || c == '!' || c == '{'); -- if (is_punctuation && INSIDE_FSTRING(tok) && INSIDE_FSTRING_EXPR(current_tok)) { -- /* This code block gets executed before the curly_bracket_depth is incremented -- * by the `{` case, so for ensuring that we are on the 0th level, we need -- * to adjust it manually */ -- int cursor = current_tok->curly_bracket_depth - (c != '{'); -- if (cursor == 0 && !update_fstring_expr(tok, c)) { -- return MAKE_TOKEN(ENDMARKER); -+ } -+ if (c != '0') { -+ break; -+ } -+ c = tok_nextc(tok); - } -- if (cursor == 0 && c != '{' && set_fstring_expr(tok, token, c)) { -+ char *zeros_end = tok->cur; -+ if (isdigit(c)) { -+ nonzero = 1; -+ c = tok_decimal_tail(tok); -+ if (c == 0) { - return MAKE_TOKEN(ERRORTOKEN); -+ } -+ } -+ if (c == '.') { -+ c = tok_nextc(tok); -+ goto fraction; -+ } else if (c == 'e' || c == 'E') { -+ goto exponent; -+ } else if (c == 'j' || c == 'J') { -+ goto imaginary; -+ } else if (nonzero && !tok->tok_extra_tokens) { -+ /* Old-style octal: now disallowed. */ -+ tok_backup(tok, c); -+ return MAKE_TOKEN(syntaxerror_known_range( -+ tok, (int)(tok->start + 1 - tok->line_start), -+ (int)(zeros_end - tok->line_start), -+ "leading zeros in decimal integer " -+ "literals are not permitted; " -+ "use an 0o prefix for octal integers")); -+ } -+ if (!verify_end_of_number(tok, c, "decimal")) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ } -+ } else { -+ /* Decimal */ -+ c = tok_decimal_tail(tok); -+ if (c == 0) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ { -+ /* Accept floating point numbers. */ -+ if (c == '.') { -+ c = tok_nextc(tok); -+ fraction: -+ /* Fraction */ -+ if (isdigit(c)) { -+ c = tok_decimal_tail(tok); -+ if (c == 0) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ } - } -- -- if (c == ':' && cursor == current_tok->curly_bracket_expr_start_depth) { -- current_tok->kind = TOK_FSTRING_MODE; -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(_PyToken_OneChar(c)); -- } -- } -- -- /* Check for two-character token */ -- { -- int c2 = tok_nextc(tok); -- int current_token = _PyToken_TwoChars(c, c2); -- if (current_token != OP) { -- int c3 = tok_nextc(tok); -- int current_token3 = _PyToken_ThreeChars(c, c2, c3); -- if (current_token3 != OP) { -- current_token = current_token3; -+ if (c == 'e' || c == 'E') { -+ int e; -+ exponent: -+ e = c; -+ /* Exponent part */ -+ c = tok_nextc(tok); -+ if (c == '+' || c == '-') { -+ c = tok_nextc(tok); -+ if (!isdigit(c)) { -+ tok_backup(tok, c); -+ return MAKE_TOKEN(syntaxerror(tok, "invalid decimal literal")); - } -- else { -- tok_backup(tok, c3); -+ } else if (!isdigit(c)) { -+ tok_backup(tok, c); -+ if (!verify_end_of_number(tok, e, "decimal")) { -+ return MAKE_TOKEN(ERRORTOKEN); - } -+ tok_backup(tok, e); - p_start = tok->start; - p_end = tok->cur; -- return MAKE_TOKEN(current_token); -- } -- tok_backup(tok, c2); -- } -- -- /* Keep track of parentheses nesting level */ -- switch (c) { -- case '(': -- case '[': -- case '{': -- if (tok->level >= MAXLEVEL) { -- return MAKE_TOKEN(syntaxerror(tok, "too many nested parentheses")); -- } -- tok->parenstack[tok->level] = c; -- tok->parenlinenostack[tok->level] = tok->lineno; -- tok->parencolstack[tok->level] = (int)(tok->start - tok->line_start); -- tok->level++; -- if (INSIDE_FSTRING(tok)) { -- current_tok->curly_bracket_depth++; -- } -- break; -- case ')': -- case ']': -- case '}': -- if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') { -- return MAKE_TOKEN(syntaxerror(tok, "f-string: single '}' is not allowed")); -- } -- if (!tok->tok_extra_tokens && !tok->level) { -- return MAKE_TOKEN(syntaxerror(tok, "unmatched '%c'", c)); -- } -- if (tok->level > 0) { -- tok->level--; -- int opening = tok->parenstack[tok->level]; -- if (!tok->tok_extra_tokens && !((opening == '(' && c == ')') || -- (opening == '[' && c == ']') || -- (opening == '{' && c == '}'))) { -- /* If the opening bracket belongs to an f-string's expression -- part (e.g. f"{)}") and the closing bracket is an arbitrary -- nested expression, then instead of matching a different -- syntactical construct with it; we'll throw an unmatched -- parentheses error. */ -- if (INSIDE_FSTRING(tok) && opening == '{') { -- assert(current_tok->curly_bracket_depth >= 0); -- int previous_bracket = current_tok->curly_bracket_depth - 1; -- if (previous_bracket == current_tok->curly_bracket_expr_start_depth) { -- return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c)); -- } -- } -- if (tok->parenlinenostack[tok->level] != tok->lineno) { -- return MAKE_TOKEN(syntaxerror(tok, -- "closing parenthesis '%c' does not match " -- "opening parenthesis '%c' on line %d", -- c, opening, tok->parenlinenostack[tok->level])); -- } -- else { -- return MAKE_TOKEN(syntaxerror(tok, -- "closing parenthesis '%c' does not match " -- "opening parenthesis '%c'", -- c, opening)); -- } -- } -+ return MAKE_TOKEN(NUMBER); -+ } -+ c = tok_decimal_tail(tok); -+ if (c == 0) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } - } -- -- if (INSIDE_FSTRING(tok)) { -- current_tok->curly_bracket_depth--; -- if (c == '}' && current_tok->curly_bracket_depth == current_tok->curly_bracket_expr_start_depth) { -- current_tok->curly_bracket_expr_start_depth--; -- current_tok->kind = TOK_FSTRING_MODE; -- current_tok->f_string_debug = 0; -- } -+ if (c == 'j' || c == 'J') { -+ /* Imaginary part */ -+ imaginary: -+ c = tok_nextc(tok); -+ if (!verify_end_of_number(tok, c, "imaginary")) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ } else if (!verify_end_of_number(tok, c, "decimal")) { -+ return MAKE_TOKEN(ERRORTOKEN); - } -- break; -- default: -- break; -- } -- -- if (!Py_UNICODE_ISPRINTABLE(c)) { -- return MAKE_TOKEN(syntaxerror(tok, "invalid non-printable character U+%04X", c)); -+ } - } -- -- if( c == '=' && INSIDE_FSTRING_EXPR(current_tok)) { -- current_tok->f_string_debug = 1; -- } -- -- /* Punctuation character */ -+ tok_backup(tok, c); - p_start = tok->start; - p_end = tok->cur; -- return MAKE_TOKEN(_PyToken_OneChar(c)); --} -- --static int --tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct token *token) --{ -- const char *p_start = NULL; -- const char *p_end = NULL; -- int end_quote_size = 0; -- int unicode_escape = 0; -- -- tok->start = tok->cur; -+ return MAKE_TOKEN(NUMBER); -+ } -+ -+f_string_quote: -+ if (((tolower(*tok->start) == 'f' || tolower(*tok->start) == 'r') && -+ (c == '\'' || c == '"'))) { -+ int quote = c; -+ int quote_size = 1; /* 1 or 3 */ -+ -+ /* Nodes of type STRING, especially multi line strings -+ must be handled differently in order to get both -+ the starting line number and the column offset right. -+ (cf. issue 16806) */ - tok->first_lineno = tok->lineno; -- tok->starting_col_offset = tok->col_offset; -- -- // If we start with a bracket, we defer to the normal mode as there is nothing for us to tokenize -- // before it. -- int start_char = tok_nextc(tok); -- if (start_char == '{') { -- int peek1 = tok_nextc(tok); -- tok_backup(tok, peek1); -- tok_backup(tok, start_char); -- if (peek1 != '{') { -- current_tok->curly_bracket_expr_start_depth++; -- if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) { -- return MAKE_TOKEN(syntaxerror(tok, "f-string: expressions nested too deeply")); -- } -- TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; -- return tok_get_normal_mode(tok, current_tok, token); -- } -- } -- else { -- tok_backup(tok, start_char); -- } -+ tok->multi_line_start = tok->line_start; - -- // Check if we are at the end of the string -- for (int i = 0; i < current_tok->f_string_quote_size; i++) { -- int quote = tok_nextc(tok); -- if (quote != current_tok->f_string_quote) { -- tok_backup(tok, quote); -- goto f_string_middle; -- } -+ /* Find the quote size and start of string */ -+ int after_quote = tok_nextc(tok); -+ if (after_quote == quote) { -+ int after_after_quote = tok_nextc(tok); -+ if (after_after_quote == quote) { -+ quote_size = 3; -+ } else { -+ // TODO: Check this -+ tok_backup(tok, after_after_quote); -+ tok_backup(tok, after_quote); -+ } - } -- -- if (current_tok->last_expr_buffer != NULL) { -- PyMem_Free(current_tok->last_expr_buffer); -- current_tok->last_expr_buffer = NULL; -- current_tok->last_expr_size = 0; -- current_tok->last_expr_end = -1; -+ if (after_quote != quote) { -+ tok_backup(tok, after_quote); - } - - p_start = tok->start; - p_end = tok->cur; -- tok->tok_mode_stack_index--; -- return MAKE_TOKEN(FSTRING_END); -+ if (tok->tok_mode_stack_index + 1 >= MAXFSTRINGLEVEL) { -+ return MAKE_TOKEN(syntaxerror(tok, "too many nested f-strings")); -+ } -+ tokenizer_mode *the_current_tok = TOK_NEXT_MODE(tok); -+ the_current_tok->kind = TOK_FSTRING_MODE; -+ the_current_tok->f_string_quote = quote; -+ the_current_tok->f_string_quote_size = quote_size; -+ the_current_tok->f_string_start = tok->start; -+ the_current_tok->f_string_multi_line_start = tok->line_start; -+ the_current_tok->f_string_line_start = tok->lineno; -+ the_current_tok->f_string_start_offset = -1; -+ the_current_tok->f_string_multi_line_start_offset = -1; -+ the_current_tok->last_expr_buffer = NULL; -+ the_current_tok->last_expr_size = 0; -+ the_current_tok->last_expr_end = -1; -+ the_current_tok->f_string_debug = 0; -+ -+ switch (*tok->start) { -+ case 'F': -+ case 'f': -+ the_current_tok->f_string_raw = tolower(*(tok->start + 1)) == 'r'; -+ break; -+ case 'R': -+ case 'r': -+ the_current_tok->f_string_raw = 1; -+ break; -+ default: -+ Py_UNREACHABLE(); -+ } - --f_string_middle: -+ the_current_tok->curly_bracket_depth = 0; -+ the_current_tok->curly_bracket_expr_start_depth = -1; -+ return MAKE_TOKEN(FSTRING_START); -+ } - -- // TODO: This is a bit of a hack, but it works for now. We need to find a better way to handle -- // this. -- tok->multi_line_start = tok->line_start; -- while (end_quote_size != current_tok->f_string_quote_size) { -- int c = tok_nextc(tok); -- if (tok->done == E_ERROR) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- int in_format_spec = ( -- current_tok->last_expr_end != -1 -- && -- INSIDE_FSTRING_EXPR(current_tok) -- ); -- -- if (c == EOF || (current_tok->f_string_quote_size == 1 && c == '\n')) { -- if (tok->decoding_erred) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -+letter_quote: -+ /* String */ -+ if (c == '\'' || c == '"') { -+ int quote = c; -+ int quote_size = 1; /* 1 or 3 */ -+ int end_quote_size = 0; - -- // If we are in a format spec and we found a newline, -- // it means that the format spec ends here and we should -- // return to the regular mode. -- if (in_format_spec && c == '\n') { -- tok_backup(tok, c); -- TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(FSTRING_MIDDLE); -- } -+ /* Nodes of type STRING, especially multi line strings -+ must be handled differently in order to get both -+ the starting line number and the column offset right. -+ (cf. issue 16806) */ -+ tok->first_lineno = tok->lineno; -+ tok->multi_line_start = tok->line_start; - -- assert(tok->multi_line_start != NULL); -- // shift the tok_state's location into -- // the start of string, and report the error -- // from the initial quote character -- tok->cur = (char *)current_tok->f_string_start; -- tok->cur++; -- tok->line_start = current_tok->f_string_multi_line_start; -- int start = tok->lineno; -- -- tokenizer_mode *the_current_tok = TOK_GET_MODE(tok); -- tok->lineno = the_current_tok->f_string_line_start; -- -- if (current_tok->f_string_quote_size == 3) { -- syntaxerror(tok, -- "unterminated triple-quoted f-string literal" -- " (detected at line %d)", start); -- if (c != '\n') { -- tok->done = E_EOFS; -- } -- return MAKE_TOKEN(ERRORTOKEN); -- } -- else { -- return MAKE_TOKEN(syntaxerror(tok, -- "unterminated f-string literal (detected at" -- " line %d)", start)); -- } -- } -+ /* Find the quote size and start of string */ -+ c = tok_nextc(tok); -+ if (c == quote) { -+ c = tok_nextc(tok); -+ if (c == quote) { -+ quote_size = 3; -+ } else { -+ end_quote_size = 1; /* empty string found */ -+ } -+ } -+ if (c != quote) { -+ tok_backup(tok, c); -+ } -+ -+ /* Get rest of string */ -+ while (end_quote_size != quote_size) { -+ c = tok_nextc(tok); -+ if (tok->done == E_ERROR) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ if (tok->done == E_DECODE) { -+ break; -+ } -+ if (c == EOF || (quote_size == 1 && c == '\n')) { -+ assert(tok->multi_line_start != NULL); -+ // shift the tok_state's location into -+ // the start of string, and report the error -+ // from the initial quote character -+ tok->cur = (char *)tok->start; -+ tok->cur++; -+ tok->line_start = tok->multi_line_start; -+ int start = tok->lineno; -+ tok->lineno = tok->first_lineno; - -- if (c == current_tok->f_string_quote) { -- end_quote_size += 1; -- continue; -+ if (INSIDE_FSTRING(tok)) { -+ /* When we are in an f-string, before raising the -+ * unterminated string literal error, check whether -+ * does the initial quote matches with f-strings quotes -+ * and if it is, then this must be a missing '}' token -+ * so raise the proper error */ -+ tokenizer_mode *the_current_tok = TOK_GET_MODE(tok); -+ if (the_current_tok->f_string_quote == quote && -+ the_current_tok->f_string_quote_size == quote_size) { -+ return MAKE_TOKEN( -+ syntaxerror(tok, "f-string: expecting '}'", start)); -+ } -+ } -+ -+ if (quote_size == 3) { -+ syntaxerror(tok, -+ "unterminated triple-quoted string literal" -+ " (detected at line %d)", -+ start); -+ if (c != '\n') { -+ tok->done = E_EOFS; -+ } -+ return MAKE_TOKEN(ERRORTOKEN); - } else { -- end_quote_size = 0; -- } -- -- if (c == '{') { -- int peek = tok_nextc(tok); -- if (peek != '{' || in_format_spec) { -- tok_backup(tok, peek); -- tok_backup(tok, c); -- current_tok->curly_bracket_expr_start_depth++; -- if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) { -- return MAKE_TOKEN(syntaxerror(tok, "f-string: expressions nested too deeply")); -- } -- TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; -- p_start = tok->start; -- p_end = tok->cur; -- } else { -- p_start = tok->start; -- p_end = tok->cur - 1; -- } -- return MAKE_TOKEN(FSTRING_MIDDLE); -- } else if (c == '}') { -- if (unicode_escape) { -- p_start = tok->start; -- p_end = tok->cur; -- return MAKE_TOKEN(FSTRING_MIDDLE); -- } -- int peek = tok_nextc(tok); -- -- // The tokenizer can only be in the format spec if we have already completed the expression -- // scanning (indicated by the end of the expression being set) and we are not at the top level -- // of the bracket stack (-1 is the top level). Since format specifiers can't legally use double -- // brackets, we can bypass it here. -- if (peek == '}' && !in_format_spec) { -- p_start = tok->start; -- p_end = tok->cur - 1; -- } else { -- tok_backup(tok, peek); -- tok_backup(tok, c); -- TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; -- p_start = tok->start; -- p_end = tok->cur; -- } -- return MAKE_TOKEN(FSTRING_MIDDLE); -- } else if (c == '\\') { -- int peek = tok_nextc(tok); -- if (peek == '\r') { -- peek = tok_nextc(tok); -- } -- // Special case when the backslash is right before a curly -- // brace. We have to restore and return the control back -- // to the loop for the next iteration. -- if (peek == '{' || peek == '}') { -- if (!current_tok->f_string_raw) { -- if (warn_invalid_escape_sequence(tok, peek)) { -- return MAKE_TOKEN(ERRORTOKEN); -- } -- } -- tok_backup(tok, peek); -- continue; -- } -- -- if (!current_tok->f_string_raw) { -- if (peek == 'N') { -- /* Handle named unicode escapes (\N{BULLET}) */ -- peek = tok_nextc(tok); -- if (peek == '{') { -- unicode_escape = 1; -- } else { -- tok_backup(tok, peek); -- } -- } -- } /* else { -- skip the escaped character -- }*/ -+ syntaxerror(tok, -+ "unterminated string literal (detected at" -+ " line %d)", -+ start); -+ if (c != '\n') { -+ tok->done = E_EOLS; -+ } -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ } -+ if (c == quote) { -+ end_quote_size += 1; -+ } else { -+ end_quote_size = 0; -+ if (c == '\\') { -+ c = tok_nextc(tok); /* skip escaped char */ -+ if (c == '\r') { -+ c = tok_nextc(tok); -+ } - } -+ } - } - -- // Backup the f-string quotes to emit a final FSTRING_MIDDLE and -- // add the quotes to the FSTRING_END in the next tokenizer iteration. -- for (int i = 0; i < current_tok->f_string_quote_size; i++) { -- tok_backup(tok, current_tok->f_string_quote); -- } - p_start = tok->start; - p_end = tok->cur; -- return MAKE_TOKEN(FSTRING_MIDDLE); --} -+ return MAKE_TOKEN(STRING); -+ } -+ -+ /* Line continuation */ -+ if (c == '\\') { -+ if ((c = tok_continuation_line(tok)) == -1) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ tok->cont_line = 1; -+ goto again; /* Read next line */ -+ } -+ -+ /* Punctuation character */ -+ int is_punctuation = (c == ':' || c == '}' || c == '!' || c == '{'); -+ if (is_punctuation && INSIDE_FSTRING(tok) && -+ INSIDE_FSTRING_EXPR(current_tok)) { -+ /* This code block gets executed before the curly_bracket_depth is -+ * incremented by the `{` case, so for ensuring that we are on the 0th -+ * level, we need to adjust it manually */ -+ int cursor = current_tok->curly_bracket_depth - (c != '{'); -+ int in_format_spec = current_tok->in_format_spec; -+ int cursor_in_format_with_debug = -+ cursor == 1 && (current_tok->f_string_debug || in_format_spec); -+ int cursor_valid = cursor == 0 || cursor_in_format_with_debug; -+ if (cursor_valid && !update_fstring_expr(tok, c)) { -+ return MAKE_TOKEN(ENDMARKER); -+ } -+ if (cursor_valid && c != '{' && set_fstring_expr(tok, token, c)) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ -+ if (c == ':' && cursor == current_tok->curly_bracket_expr_start_depth) { -+ current_tok->kind = TOK_FSTRING_MODE; -+ current_tok->in_format_spec = 1; -+ p_start = tok->start; -+ p_end = tok->cur; -+ return MAKE_TOKEN(_PyToken_OneChar(c)); -+ } -+ } -+ -+ /* Check for two-character token */ -+ { -+ int c2 = tok_nextc(tok); -+ int current_token = _PyToken_TwoChars(c, c2); -+ if (current_token != OP) { -+ int c3 = tok_nextc(tok); -+ int current_token3 = _PyToken_ThreeChars(c, c2, c3); -+ if (current_token3 != OP) { -+ current_token = current_token3; -+ } else { -+ tok_backup(tok, c3); -+ } -+ p_start = tok->start; -+ p_end = tok->cur; -+ return MAKE_TOKEN(current_token); -+ } -+ tok_backup(tok, c2); -+ } -+ -+ /* Keep track of parentheses nesting level */ -+ switch (c) { -+ case '(': -+ case '[': -+ case '{': -+ if (tok->level >= MAXLEVEL) { -+ return MAKE_TOKEN(syntaxerror(tok, "too many nested parentheses")); -+ } -+ tok->parenstack[tok->level] = c; -+ tok->parenlinenostack[tok->level] = tok->lineno; -+ tok->parencolstack[tok->level] = (int)(tok->start - tok->line_start); -+ tok->level++; -+ if (INSIDE_FSTRING(tok)) { -+ current_tok->curly_bracket_depth++; -+ } -+ break; -+ case ')': -+ case ']': -+ case '}': -+ if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') { -+ return MAKE_TOKEN( -+ syntaxerror(tok, "f-string: single '}' is not allowed")); -+ } -+ if (!tok->tok_extra_tokens && !tok->level) { -+ return MAKE_TOKEN(syntaxerror(tok, "unmatched '%c'", c)); -+ } -+ if (tok->level > 0) { -+ tok->level--; -+ int opening = tok->parenstack[tok->level]; -+ if (!tok->tok_extra_tokens && -+ !((opening == '(' && c == ')') || (opening == '[' && c == ']') || -+ (opening == '{' && c == '}'))) { -+ /* If the opening bracket belongs to an f-string's expression -+ part (e.g. f"{)}") and the closing bracket is an arbitrary -+ nested expression, then instead of matching a different -+ syntactical construct with it; we'll throw an unmatched -+ parentheses error. */ -+ if (INSIDE_FSTRING(tok) && opening == '{') { -+ assert(current_tok->curly_bracket_depth >= 0); -+ int previous_bracket = current_tok->curly_bracket_depth - 1; -+ if (previous_bracket == current_tok->curly_bracket_expr_start_depth) { -+ return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c)); -+ } -+ } -+ if (tok->parenlinenostack[tok->level] != tok->lineno) { -+ return MAKE_TOKEN( -+ syntaxerror(tok, -+ "closing parenthesis '%c' does not match " -+ "opening parenthesis '%c' on line %d", -+ c, opening, tok->parenlinenostack[tok->level])); -+ } else { -+ return MAKE_TOKEN( -+ syntaxerror(tok, -+ "closing parenthesis '%c' does not match " -+ "opening parenthesis '%c'", -+ c, opening)); -+ } -+ } -+ } -+ -+ if (INSIDE_FSTRING(tok)) { -+ current_tok->curly_bracket_depth--; -+ if (current_tok->curly_bracket_depth < 0) { -+ return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c)); -+ } -+ if (c == '}' && current_tok->curly_bracket_depth == -+ current_tok->curly_bracket_expr_start_depth) { -+ current_tok->curly_bracket_expr_start_depth--; -+ current_tok->kind = TOK_FSTRING_MODE; -+ current_tok->in_format_spec = 0; -+ current_tok->f_string_debug = 0; -+ } -+ } -+ break; -+ default: -+ break; -+ } -+ -+ if (!Py_UNICODE_ISPRINTABLE(c)) { -+ return MAKE_TOKEN( -+ syntaxerror(tok, "invalid non-printable character U+%04X", c)); -+ } -+ -+ if (c == '=' && INSIDE_FSTRING_EXPR(current_tok)) { -+ current_tok->f_string_debug = 1; -+ } -+ -+ /* Punctuation character */ -+ p_start = tok->start; -+ p_end = tok->cur; -+ return MAKE_TOKEN(_PyToken_OneChar(c)); -+} -+ -+static int tok_get_fstring_mode(struct tok_state *tok, -+ tokenizer_mode *current_tok, -+ struct token *token) { -+ const char *p_start = NULL; -+ const char *p_end = NULL; -+ int end_quote_size = 0; -+ int unicode_escape = 0; -+ -+ tok->start = tok->cur; -+ tok->first_lineno = tok->lineno; -+ tok->starting_col_offset = tok->col_offset; -+ -+ // If we start with a bracket, we defer to the normal mode as there is nothing -+ // for us to tokenize before it. -+ int start_char = tok_nextc(tok); -+ if (start_char == '{') { -+ int peek1 = tok_nextc(tok); -+ tok_backup(tok, peek1); -+ tok_backup(tok, start_char); -+ if (peek1 != '{') { -+ current_tok->curly_bracket_expr_start_depth++; -+ if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) { -+ return MAKE_TOKEN( -+ syntaxerror(tok, "f-string: expressions nested too deeply")); -+ } -+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; -+ return tok_get_normal_mode(tok, current_tok, token); -+ } -+ } else { -+ tok_backup(tok, start_char); -+ } -+ -+ // Check if we are at the end of the string -+ for (int i = 0; i < current_tok->f_string_quote_size; i++) { -+ int quote = tok_nextc(tok); -+ if (quote != current_tok->f_string_quote) { -+ tok_backup(tok, quote); -+ goto f_string_middle; -+ } -+ } -+ -+ if (current_tok->last_expr_buffer != NULL) { -+ PyMem_Free(current_tok->last_expr_buffer); -+ current_tok->last_expr_buffer = NULL; -+ current_tok->last_expr_size = 0; -+ current_tok->last_expr_end = -1; -+ } -+ -+ p_start = tok->start; -+ p_end = tok->cur; -+ tok->tok_mode_stack_index--; -+ return MAKE_TOKEN(FSTRING_END); - -+f_string_middle: - --static int --tok_get(struct tok_state *tok, struct token *token) --{ -- tokenizer_mode *current_tok = TOK_GET_MODE(tok); -- if (current_tok->kind == TOK_REGULAR_MODE) { -- return tok_get_normal_mode(tok, current_tok, token); -- } else { -- return tok_get_fstring_mode(tok, current_tok, token); -+ // TODO: This is a bit of a hack, but it works for now. We need to find a -+ // better way to handle this. -+ tok->multi_line_start = tok->line_start; -+ while (end_quote_size != current_tok->f_string_quote_size) { -+ int c = tok_nextc(tok); -+ if (tok->done == E_ERROR || tok->done == E_DECODE) { -+ return MAKE_TOKEN(ERRORTOKEN); - } --} -+ int in_format_spec = -+ (current_tok->in_format_spec && INSIDE_FSTRING_EXPR(current_tok)); - --int --_PyTokenizer_Get(struct tok_state *tok, struct token *token) --{ -- int result = tok_get(tok, token); -- if (tok->decoding_erred) { -- result = ERRORTOKEN; -- tok->done = E_DECODE; -+ if (c == EOF || (current_tok->f_string_quote_size == 1 && c == '\n')) { -+ if (tok->decoding_erred) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ -+ // If we are in a format spec and we found a newline, -+ // it means that the format spec ends here and we should -+ // return to the regular mode. -+ if (in_format_spec && c == '\n') { -+ tok_backup(tok, c); -+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; -+ current_tok->in_format_spec = 0; -+ p_start = tok->start; -+ p_end = tok->cur; -+ return MAKE_TOKEN(FSTRING_MIDDLE); -+ } -+ -+ assert(tok->multi_line_start != NULL); -+ // shift the tok_state's location into -+ // the start of string, and report the error -+ // from the initial quote character -+ tok->cur = (char *)current_tok->f_string_start; -+ tok->cur++; -+ tok->line_start = current_tok->f_string_multi_line_start; -+ int start = tok->lineno; -+ -+ tokenizer_mode *the_current_tok = TOK_GET_MODE(tok); -+ tok->lineno = the_current_tok->f_string_line_start; -+ -+ if (current_tok->f_string_quote_size == 3) { -+ syntaxerror(tok, -+ "unterminated triple-quoted f-string literal" -+ " (detected at line %d)", -+ start); -+ if (c != '\n') { -+ tok->done = E_EOFS; -+ } -+ return MAKE_TOKEN(ERRORTOKEN); -+ } else { -+ return MAKE_TOKEN( -+ syntaxerror(tok, -+ "unterminated f-string literal (detected at" -+ " line %d)", -+ start)); -+ } -+ } -+ -+ if (c == current_tok->f_string_quote) { -+ end_quote_size += 1; -+ continue; -+ } else { -+ end_quote_size = 0; - } -- return result; -+ -+ if (c == '{') { -+ if (!update_fstring_expr(tok, c)) { -+ return MAKE_TOKEN(ENDMARKER); -+ } -+ int peek = tok_nextc(tok); -+ if (peek != '{' || in_format_spec) { -+ tok_backup(tok, peek); -+ tok_backup(tok, c); -+ current_tok->curly_bracket_expr_start_depth++; -+ if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) { -+ return MAKE_TOKEN( -+ syntaxerror(tok, "f-string: expressions nested too deeply")); -+ } -+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; -+ current_tok->in_format_spec = 0; -+ p_start = tok->start; -+ p_end = tok->cur; -+ } else { -+ p_start = tok->start; -+ p_end = tok->cur - 1; -+ } -+ return MAKE_TOKEN(FSTRING_MIDDLE); -+ } else if (c == '}') { -+ if (unicode_escape) { -+ p_start = tok->start; -+ p_end = tok->cur; -+ return MAKE_TOKEN(FSTRING_MIDDLE); -+ } -+ int peek = tok_nextc(tok); -+ -+ // The tokenizer can only be in the format spec if we have already -+ // completed the expression scanning (indicated by the end of the -+ // expression being set) and we are not at the top level of the bracket -+ // stack (-1 is the top level). Since format specifiers can't legally use -+ // double brackets, we can bypass it here. -+ int cursor = current_tok->curly_bracket_depth; -+ if (peek == '}' && !in_format_spec && cursor == 0) { -+ p_start = tok->start; -+ p_end = tok->cur - 1; -+ } else { -+ tok_backup(tok, peek); -+ tok_backup(tok, c); -+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; -+ p_start = tok->start; -+ p_end = tok->cur; -+ } -+ return MAKE_TOKEN(FSTRING_MIDDLE); -+ } else if (c == '\\') { -+ int peek = tok_nextc(tok); -+ if (peek == '\r') { -+ peek = tok_nextc(tok); -+ } -+ // Special case when the backslash is right before a curly -+ // brace. We have to restore and return the control back -+ // to the loop for the next iteration. -+ if (peek == '{' || peek == '}') { -+ if (!current_tok->f_string_raw) { -+ if (warn_invalid_escape_sequence(tok, peek)) { -+ return MAKE_TOKEN(ERRORTOKEN); -+ } -+ } -+ tok_backup(tok, peek); -+ continue; -+ } -+ -+ if (!current_tok->f_string_raw) { -+ if (peek == 'N') { -+ /* Handle named unicode escapes (\N{BULLET}) */ -+ peek = tok_nextc(tok); -+ if (peek == '{') { -+ unicode_escape = 1; -+ } else { -+ tok_backup(tok, peek); -+ } -+ } -+ } /* else { -+ skip the escaped character -+ }*/ -+ } -+ } -+ -+ // Backup the f-string quotes to emit a final FSTRING_MIDDLE and -+ // add the quotes to the FSTRING_END in the next tokenizer iteration. -+ for (int i = 0; i < current_tok->f_string_quote_size; i++) { -+ tok_backup(tok, current_tok->f_string_quote); -+ } -+ p_start = tok->start; -+ p_end = tok->cur; -+ return MAKE_TOKEN(FSTRING_MIDDLE); -+} -+ -+static int tok_get(struct tok_state *tok, struct token *token) { -+ tokenizer_mode *current_tok = TOK_GET_MODE(tok); -+ if (current_tok->kind == TOK_REGULAR_MODE) { -+ return tok_get_normal_mode(tok, current_tok, token); -+ } else { -+ return tok_get_fstring_mode(tok, current_tok, token); -+ } -+} -+ -+int _PyTokenizer_Get(struct tok_state *tok, struct token *token) { -+ int result = tok_get(tok, token); -+ if (tok->decoding_erred) { -+ result = ERRORTOKEN; -+ tok->done = E_DECODE; -+ } -+ return result; - } - --#if defined(__wasi__) || (defined(__EMSCRIPTEN__) && (__EMSCRIPTEN_major__ >= 3)) -+#if defined(__wasi__) || \ -+ (defined(__EMSCRIPTEN__) && (__EMSCRIPTEN_major__ >= 3)) - // fdopen() with borrowed fd. WASI does not provide dup() and Emscripten's - // dup() emulation with open() is slow. - typedef union { -- void *cookie; -- int fd; -+ void *cookie; -+ int fd; - } borrowed; - --static ssize_t --borrow_read(void *cookie, char *buf, size_t size) --{ -- borrowed b = {.cookie = cookie}; -- return read(b.fd, (void *)buf, size); -+static ssize_t borrow_read(void *cookie, char *buf, size_t size) { -+ borrowed b = {.cookie = cookie}; -+ return read(b.fd, (void *)buf, size); - } - --static FILE * --fdopen_borrow(int fd) { -- // supports only reading. seek fails. close and write are no-ops. -- cookie_io_functions_t io_cb = {borrow_read, NULL, NULL, NULL}; -- borrowed b = {.fd = fd}; -- return fopencookie(b.cookie, "r", io_cb); -+static FILE *fdopen_borrow(int fd) { -+ // supports only reading. seek fails. close and write are no-ops. -+ cookie_io_functions_t io_cb = {borrow_read, NULL, NULL, NULL}; -+ borrowed b = {.fd = fd}; -+ return fopencookie(b.cookie, "r", io_cb); - } - #else --static FILE * --fdopen_borrow(int fd) { -- fd = _Py_dup(fd); -- if (fd < 0) { -- return NULL; -- } -- return fdopen(fd, "r"); -+static FILE *fdopen_borrow(int fd) { -+ fd = _Py_dup(fd); -+ if (fd < 0) { -+ return NULL; -+ } -+ return fdopen(fd, "r"); - } - #endif - -@@ -3023,59 +2907,54 @@ - The char* returned is malloc'ed via PyMem_Malloc() and thus must be freed - by the caller. */ - --char * --_PyTokenizer_FindEncodingFilename(int fd, PyObject *filename) --{ -- struct tok_state *tok; -- FILE *fp; -- char *encoding = NULL; -- -- fp = fdopen_borrow(fd); -- if (fp == NULL) { -- return NULL; -- } -- tok = _PyTokenizer_FromFile(fp, NULL, NULL, NULL); -- if (tok == NULL) { -- fclose(fp); -- return NULL; -- } -- if (filename != NULL) { -- tok->filename = Py_NewRef(filename); -- } -- else { -- tok->filename = PyUnicode_FromString(""); -- if (tok->filename == NULL) { -- fclose(fp); -- _PyTokenizer_Free(tok); -- return encoding; -- } -- } -- struct token token; -- // We don't want to report warnings here because it could cause infinite recursion -- // if fetching the encoding shows a warning. -- tok->report_warnings = 0; -- while (tok->lineno < 2 && tok->done == E_OK) { -- _PyToken_Init(&token); -- _PyTokenizer_Get(tok, &token); -- _PyToken_Free(&token); -- } -+char *_PyTokenizer_FindEncodingFilename(int fd, PyObject *filename) { -+ struct tok_state *tok; -+ FILE *fp; -+ char *encoding = NULL; -+ -+ fp = fdopen_borrow(fd); -+ if (fp == NULL) { -+ return NULL; -+ } -+ tok = _PyTokenizer_FromFile(fp, NULL, NULL, NULL); -+ if (tok == NULL) { - fclose(fp); -- if (tok->encoding) { -- encoding = (char *)PyMem_Malloc(strlen(tok->encoding) + 1); -- if (encoding) { -- strcpy(encoding, tok->encoding); -- } -- } -- _PyTokenizer_Free(tok); -- return encoding; -+ return NULL; -+ } -+ if (filename != NULL) { -+ tok->filename = Py_NewRef(filename); -+ } else { -+ tok->filename = PyUnicode_FromString(""); -+ if (tok->filename == NULL) { -+ fclose(fp); -+ _PyTokenizer_Free(tok); -+ return encoding; -+ } -+ } -+ struct token token; -+ // We don't want to report warnings here because it could cause infinite -+ // recursion if fetching the encoding shows a warning. -+ tok->report_warnings = 0; -+ while (tok->lineno < 2 && tok->done == E_OK) { -+ _PyToken_Init(&token); -+ _PyTokenizer_Get(tok, &token); -+ _PyToken_Free(&token); -+ } -+ fclose(fp); -+ if (tok->encoding) { -+ encoding = (char *)PyMem_Malloc(strlen(tok->encoding) + 1); -+ if (encoding) { -+ strcpy(encoding, tok->encoding); -+ } -+ } -+ _PyTokenizer_Free(tok); -+ return encoding; - } - - #ifdef Py_DEBUG --void --tok_dump(int type, char *start, char *end) --{ -- fprintf(stderr, "%s", _PyParser_TokenNames[type]); -- if (type == NAME || type == NUMBER || type == STRING || type == OP) -- fprintf(stderr, "(%.*s)", (int)(end - start), start); -+void tok_dump(int type, char *start, char *end) { -+ fprintf(stderr, "%s", _PyParser_TokenNames[type]); -+ if (type == NAME || type == NUMBER || type == STRING || type == OP) -+ fprintf(stderr, "(%.*s)", (int)(end - start), start); - } --#endif // Py_DEBUG -+#endif // Py_DEBUG -diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h -index 1e1daa3648f..4eeeb1f234e 100644 ---- a/Parser/tokenizer.h -+++ b/Parser/tokenizer.h -@@ -14,133 +14,136 @@ - #define MAXLEVEL 200 /* Max parentheses level */ - #define MAXFSTRINGLEVEL 150 /* Max f-string nesting level */ - --enum decoding_state { -- STATE_INIT, -- STATE_SEEK_CODING, -- STATE_NORMAL --}; -+enum decoding_state { STATE_INIT, STATE_SEEK_CODING, STATE_NORMAL }; - - enum interactive_underflow_t { -- /* Normal mode of operation: return a new token when asked in interactive mode */ -- IUNDERFLOW_NORMAL, -- /* Forcefully return ENDMARKER when asked for a new token in interactive mode. This -- * can be used to prevent the tokenizer to prompt the user for new tokens */ -- IUNDERFLOW_STOP, -+ /* Normal mode of operation: return a new token when asked in interactive mode -+ */ -+ IUNDERFLOW_NORMAL, -+ /* Forcefully return ENDMARKER when asked for a new token in interactive mode. -+ * This can be used to prevent the tokenizer to prompt the user for new tokens -+ */ -+ IUNDERFLOW_STOP, - }; - - struct token { -- int level; -- int lineno, col_offset, end_lineno, end_col_offset; -- const char *start, *end; -- PyObject *metadata; -+ int level; -+ int lineno, col_offset, end_lineno, end_col_offset; -+ const char *start, *end; -+ PyObject *metadata; - }; - - enum tokenizer_mode_kind_t { -- TOK_REGULAR_MODE, -- TOK_FSTRING_MODE, -+ TOK_REGULAR_MODE, -+ TOK_FSTRING_MODE, - }; - - #define MAX_EXPR_NESTING 3 - - typedef struct _tokenizer_mode { -- enum tokenizer_mode_kind_t kind; -- -- int curly_bracket_depth; -- int curly_bracket_expr_start_depth; -- -- char f_string_quote; -- int f_string_quote_size; -- int f_string_raw; -- const char* f_string_start; -- const char* f_string_multi_line_start; -- int f_string_line_start; -- -- Py_ssize_t f_string_start_offset; -- Py_ssize_t f_string_multi_line_start_offset; -- -- Py_ssize_t last_expr_size; -- Py_ssize_t last_expr_end; -- char* last_expr_buffer; -- int f_string_debug; -+ enum tokenizer_mode_kind_t kind; -+ -+ int curly_bracket_depth; -+ int curly_bracket_expr_start_depth; -+ -+ char f_string_quote; -+ int f_string_quote_size; -+ int f_string_raw; -+ const char *f_string_start; -+ const char *f_string_multi_line_start; -+ int f_string_line_start; -+ -+ Py_ssize_t f_string_start_offset; -+ Py_ssize_t f_string_multi_line_start_offset; -+ -+ Py_ssize_t last_expr_size; -+ Py_ssize_t last_expr_end; -+ char *last_expr_buffer; -+ int f_string_debug; -+ int in_format_spec; - } tokenizer_mode; - - /* Tokenizer state */ - struct tok_state { -- /* Input state; buf <= cur <= inp <= end */ -- /* NB an entire line is held in the buffer */ -- char *buf; /* Input buffer, or NULL; malloc'ed if fp != NULL or readline != NULL */ -- char *cur; /* Next character in buffer */ -- char *inp; /* End of data in buffer */ -- int fp_interactive; /* If the file descriptor is interactive */ -- char *interactive_src_start; /* The start of the source parsed so far in interactive mode */ -- char *interactive_src_end; /* The end of the source parsed so far in interactive mode */ -- const char *end; /* End of input buffer if buf != NULL */ -- const char *start; /* Start of current token if not NULL */ -- int done; /* E_OK normally, E_EOF at EOF, otherwise error code */ -- /* NB If done != E_OK, cur must be == inp!!! */ -- FILE *fp; /* Rest of input; NULL if tokenizing a string */ -- int tabsize; /* Tab spacing */ -- int indent; /* Current indentation index */ -- int indstack[MAXINDENT]; /* Stack of indents */ -- int atbol; /* Nonzero if at begin of new line */ -- int pendin; /* Pending indents (if > 0) or dedents (if < 0) */ -- const char *prompt, *nextprompt; /* For interactive prompting */ -- int lineno; /* Current line number */ -- int first_lineno; /* First line of a single line or multi line string -- expression (cf. issue 16806) */ -- int starting_col_offset; /* The column offset at the beginning of a token */ -- int col_offset; /* Current col offset */ -- int level; /* () [] {} Parentheses nesting level */ -- /* Used to allow free continuations inside them */ -- char parenstack[MAXLEVEL]; -- int parenlinenostack[MAXLEVEL]; -- int parencolstack[MAXLEVEL]; -- PyObject *filename; -- /* Stuff for checking on different tab sizes */ -- int altindstack[MAXINDENT]; /* Stack of alternate indents */ -- /* Stuff for PEP 0263 */ -- enum decoding_state decoding_state; -- int decoding_erred; /* whether erred in decoding */ -- char *encoding; /* Source encoding. */ -- int cont_line; /* whether we are in a continuation line. */ -- const char* line_start; /* pointer to start of current line */ -- const char* multi_line_start; /* pointer to start of first line of -- a single line or multi line string -- expression (cf. issue 16806) */ -- PyObject *decoding_readline; /* open(...).readline */ -- PyObject *decoding_buffer; -- PyObject *readline; /* readline() function */ -- const char* enc; /* Encoding for the current str. */ -- char* str; /* Source string being tokenized (if tokenizing from a string)*/ -- char* input; /* Tokenizer's newline translated copy of the string. */ -- -- int type_comments; /* Whether to look for type comments */ -- -- /* async/await related fields (still needed depending on feature_version) */ -- int async_hacks; /* =1 if async/await aren't always keywords */ -- int async_def; /* =1 if tokens are inside an 'async def' body. */ -- int async_def_indent; /* Indentation level of the outermost 'async def'. */ -- int async_def_nl; /* =1 if the outermost 'async def' had at least one -- NEWLINE token after it. */ -- /* How to proceed when asked for a new token in interactive mode */ -- enum interactive_underflow_t interactive_underflow; -- int report_warnings; -- // TODO: Factor this into its own thing -- tokenizer_mode tok_mode_stack[MAXFSTRINGLEVEL]; -- int tok_mode_stack_index; -- int tok_extra_tokens; -- int comment_newline; -- int implicit_newline; -+ /* Input state; buf <= cur <= inp <= end */ -+ /* NB an entire line is held in the buffer */ -+ char *buf; /* Input buffer, or NULL; malloc'ed if fp != NULL or readline != -+ NULL */ -+ char *cur; /* Next character in buffer */ -+ char *inp; /* End of data in buffer */ -+ int fp_interactive; /* If the file descriptor is interactive */ -+ char *interactive_src_start; /* The start of the source parsed so far in -+ interactive mode */ -+ char *interactive_src_end; /* The end of the source parsed so far in -+ interactive mode */ -+ const char *end; /* End of input buffer if buf != NULL */ -+ const char *start; /* Start of current token if not NULL */ -+ int done; /* E_OK normally, E_EOF at EOF, otherwise error code */ -+ /* NB If done != E_OK, cur must be == inp!!! */ -+ FILE *fp; /* Rest of input; NULL if tokenizing a string */ -+ int tabsize; /* Tab spacing */ -+ int indent; /* Current indentation index */ -+ int indstack[MAXINDENT]; /* Stack of indents */ -+ int atbol; /* Nonzero if at begin of new line */ -+ int pendin; /* Pending indents (if > 0) or dedents (if < 0) */ -+ const char *prompt, *nextprompt; /* For interactive prompting */ -+ int lineno; /* Current line number */ -+ int first_lineno; /* First line of a single line or multi line string -+ expression (cf. issue 16806) */ -+ int starting_col_offset; /* The column offset at the beginning of a token */ -+ int col_offset; /* Current col offset */ -+ int level; /* () [] {} Parentheses nesting level */ -+ /* Used to allow free continuations inside them */ -+ char parenstack[MAXLEVEL]; -+ int parenlinenostack[MAXLEVEL]; -+ int parencolstack[MAXLEVEL]; -+ PyObject *filename; -+ /* Stuff for checking on different tab sizes */ -+ int altindstack[MAXINDENT]; /* Stack of alternate indents */ -+ /* Stuff for PEP 0263 */ -+ enum decoding_state decoding_state; -+ int decoding_erred; /* whether erred in decoding */ -+ char *encoding; /* Source encoding. */ -+ int cont_line; /* whether we are in a continuation line. */ -+ const char *line_start; /* pointer to start of current line */ -+ const char *multi_line_start; /* pointer to start of first line of -+ a single line or multi line string -+ expression (cf. issue 16806) */ -+ PyObject *decoding_readline; /* open(...).readline */ -+ PyObject *decoding_buffer; -+ PyObject *readline; /* readline() function */ -+ const char *enc; /* Encoding for the current str. */ -+ char *str; /* Source string being tokenized (if tokenizing from a string)*/ -+ char *input; /* Tokenizer's newline translated copy of the string. */ -+ -+ int type_comments; /* Whether to look for type comments */ -+ -+ /* async/await related fields (still needed depending on feature_version) */ -+ int async_hacks; /* =1 if async/await aren't always keywords */ -+ int async_def; /* =1 if tokens are inside an 'async def' body. */ -+ int async_def_indent; /* Indentation level of the outermost 'async def'. */ -+ int async_def_nl; /* =1 if the outermost 'async def' had at least one -+ NEWLINE token after it. */ -+ /* How to proceed when asked for a new token in interactive mode */ -+ enum interactive_underflow_t interactive_underflow; -+ int report_warnings; -+ // TODO: Factor this into its own thing -+ tokenizer_mode tok_mode_stack[MAXFSTRINGLEVEL]; -+ int tok_mode_stack_index; -+ int tok_extra_tokens; -+ int comment_newline; -+ int implicit_newline; - #ifdef Py_DEBUG -- int debug; -+ int debug; - #endif - }; - - extern struct tok_state *_PyTokenizer_FromString(const char *, int, int); - extern struct tok_state *_PyTokenizer_FromUTF8(const char *, int, int); --extern struct tok_state *_PyTokenizer_FromReadline(PyObject*, const char*, int, int); --extern struct tok_state *_PyTokenizer_FromFile(FILE *, const char*, -- const char *, const char *); -+extern struct tok_state *_PyTokenizer_FromReadline(PyObject *, const char *, -+ int, int); -+extern struct tok_state *_PyTokenizer_FromFile(FILE *, const char *, -+ const char *, const char *); - extern void _PyTokenizer_Free(struct tok_state *); - extern void _PyToken_Free(struct token *); - extern void _PyToken_Init(struct token *); -diff --git a/Programs/_testembed.c b/Programs/_testembed.c -index f78ba41fe7b..1386153ad12 100644 ---- a/Programs/_testembed.c -+++ b/Programs/_testembed.c -@@ -163,15 +163,23 @@ - static int test_repeated_init_exec(void) - { - if (main_argc < 3) { -- fprintf(stderr, "usage: %s test_repeated_init_exec CODE\n", PROGRAM); -+ fprintf(stderr, -+ "usage: %s test_repeated_init_exec CODE ...\n", PROGRAM); - exit(1); - } - const char *code = main_argv[2]; -+ int loops = main_argc > 3 -+ ? main_argc - 2 -+ : INIT_LOOPS; - -- for (int i=1; i <= INIT_LOOPS; i++) { -- fprintf(stderr, "--- Loop #%d ---\n", i); -+ for (int i=0; i < loops; i++) { -+ fprintf(stderr, "--- Loop #%d ---\n", i+1); - fflush(stderr); - -+ if (main_argc > 3) { -+ code = main_argv[i+2]; -+ } -+ - _testembed_Py_InitializeFromConfig(); - int err = PyRun_SimpleString(code); - Py_Finalize(); -diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h -index cd9d1032629..8c7cb58fbfd 100644 ---- a/Programs/test_frozenmain.h -+++ b/Programs/test_frozenmain.h -@@ -27,12 +27,11 @@ - 218,3,107,101,121,169,0,243,0,0,0,0,250,18,116,101, - 115,116,95,102,114,111,122,101,110,109,97,105,110,46,112,121, - 250,8,60,109,111,100,117,108,101,62,114,18,0,0,0,1, -- 0,0,0,115,102,0,0,0,240,3,1,1,1,243,8,0, -+ 0,0,0,115,97,0,0,0,240,3,1,1,1,243,8,0, - 1,11,219,0,24,225,0,5,208,6,26,212,0,27,217,0, - 5,128,106,144,35,151,40,145,40,212,0,27,216,9,38,208, - 9,26,215,9,38,209,9,38,211,9,40,168,24,209,9,50, -- 128,6,240,2,6,12,2,242,0,7,1,42,128,67,241,14, -- 0,5,10,136,71,144,67,144,53,152,2,152,54,160,35,153, -- 59,152,45,208,10,40,213,4,41,241,15,7,1,42,114,16, -- 0,0,0, -+ 128,6,243,2,6,12,2,128,67,241,14,0,5,10,136,71, -+ 144,67,144,53,152,2,152,54,160,35,153,59,152,45,208,10, -+ 40,213,4,41,241,15,6,12,2,114,16,0,0,0, - }; -diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c -index 664e7d8a50a..baad836d7a3 100644 ---- a/Python/Python-tokenize.c -+++ b/Python/Python-tokenize.c -@@ -35,6 +35,7 @@ - /* Needed to cache line for performance */ - PyObject *last_line; - Py_ssize_t last_lineno; -+ Py_ssize_t last_end_lineno; - Py_ssize_t byte_col_offset_diff; - } tokenizeriterobject; - -@@ -76,6 +77,7 @@ - self->last_line = NULL; - self->byte_col_offset_diff = 0; - self->last_lineno = 0; -+ self->last_end_lineno = 0; - - return (PyObject *)self; - } -@@ -212,6 +214,7 @@ - - const char *line_start = ISSTRINGLIT(type) ? it->tok->multi_line_start : it->tok->line_start; - PyObject* line = NULL; -+ int line_changed = 1; - if (it->tok->tok_extra_tokens && is_trailing_token) { - line = PyUnicode_FromString(""); - } else { -@@ -230,6 +233,7 @@ - } else { - // Line hasn't changed so we reuse the cached one. - line = it->last_line; -+ line_changed = 0; - } - } - if (line == NULL) { -@@ -240,13 +244,20 @@ - Py_ssize_t lineno = ISSTRINGLIT(type) ? it->tok->first_lineno : it->tok->lineno; - Py_ssize_t end_lineno = it->tok->lineno; - it->last_lineno = lineno; -+ it->last_end_lineno = end_lineno; - - Py_ssize_t col_offset = -1; - Py_ssize_t end_col_offset = -1; - Py_ssize_t byte_offset = -1; - if (token.start != NULL && token.start >= line_start) { - byte_offset = token.start - line_start; -- col_offset = byte_offset - it->byte_col_offset_diff; -+ if (line_changed) { -+ col_offset = _PyPegen_byte_offset_to_character_offset_line(line, 0, byte_offset); -+ it->byte_col_offset_diff = byte_offset - col_offset; -+ } -+ else { -+ col_offset = byte_offset - it->byte_col_offset_diff; -+ } - } - if (token.end != NULL && token.end >= it->tok->line_start) { - Py_ssize_t end_byte_offset = token.end - it->tok->line_start; -diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c -index 84fbc33a48c..1a65ddd0154 100644 ---- a/Python/bltinmodule.c -+++ b/Python/bltinmodule.c -@@ -2559,8 +2559,8 @@ - b = PyLong_AsLongAndOverflow(item, &overflow); - } - if (overflow == 0 && -- (i_result >= 0 ? (b <= LONG_MAX - i_result) -- : (b >= LONG_MIN - i_result))) -+ (i_result >= 0 ? (b <= PY_SSIZE_T_MAX - i_result) -+ : (b >= PY_SSIZE_T_MIN - i_result))) - { - i_result += b; - Py_DECREF(item); -diff --git a/Python/bytecodes.c b/Python/bytecodes.c -index e17b2294c24..b307edd57df 100644 ---- a/Python/bytecodes.c -+++ b/Python/bytecodes.c -@@ -1999,14 +1999,15 @@ - new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, value); - ep->me_value = value; - } -- Py_DECREF(old_value); -- STAT_INC(STORE_ATTR, hit); - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { - _PyObject_GC_TRACK(dict); - } -- /* PEP 509 */ -- dict->ma_version_tag = new_version; -+ dict->ma_version_tag = new_version; // PEP 509 -+ // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, -+ // when dict only holds the strong reference to value in ep->me_value. -+ Py_DECREF(old_value); -+ STAT_INC(STORE_ATTR, hit); - Py_DECREF(owner); - } - -diff --git a/Python/compile.c b/Python/compile.c -index 40335f6dc30..7255f5d1475 100644 ---- a/Python/compile.c -+++ b/Python/compile.c -@@ -2966,7 +2966,7 @@ - co = optimize_and_assemble(c, 0); - } - else { -- location loc = LOCATION(e->lineno, e->lineno, 0, 0); -+ location loc = LOC(e->v.Lambda.body); - ADDOP_IN_SCOPE(c, loc, RETURN_VALUE); - co = optimize_and_assemble(c, 1); - } -@@ -3024,11 +3024,18 @@ - RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)); - - VISIT(c, expr, s->v.For.iter); -+ -+ loc = LOC(s->v.For.iter); - ADDOP(c, loc, GET_ITER); - - USE_LABEL(c, start); - ADDOP_JUMP(c, loc, FOR_ITER, cleanup); - -+ /* Add NOP to ensure correct line tracing of multiline for statements. -+ * It will be removed later if redundant. -+ */ -+ ADDOP(c, LOC(s->v.For.target), NOP); -+ - USE_LABEL(c, body); - VISIT(c, expr, s->v.For.target); - VISIT_SEQ(c, stmt, s->v.For.body); -@@ -3062,7 +3069,7 @@ - NEW_JUMP_TARGET_LABEL(c, end); - - VISIT(c, expr, s->v.AsyncFor.iter); -- ADDOP(c, loc, GET_AITER); -+ ADDOP(c, LOC(s->v.AsyncFor.iter), GET_AITER); - - USE_LABEL(c, start); - RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)); -@@ -5265,14 +5272,15 @@ - } - if (IS_LABEL(start)) { - VISIT(c, expr, gen->iter); -- ADDOP(c, loc, GET_ITER); -+ ADDOP(c, LOC(gen->iter), GET_ITER); - } - } - } -+ - if (IS_LABEL(start)) { - depth++; - USE_LABEL(c, start); -- ADDOP_JUMP(c, loc, FOR_ITER, anchor); -+ ADDOP_JUMP(c, LOC(gen->iter), FOR_ITER, anchor); - } - VISIT(c, expr, gen->target); - -@@ -5359,7 +5367,7 @@ - else { - /* Sub-iter - calculate on the fly */ - VISIT(c, expr, gen->iter); -- ADDOP(c, loc, GET_AITER); -+ ADDOP(c, LOC(gen->iter), GET_AITER); - } - } - -@@ -5644,15 +5652,14 @@ - } - - static inline int --compiler_comprehension_iter(struct compiler *c, location loc, -- comprehension_ty comp) -+compiler_comprehension_iter(struct compiler *c, comprehension_ty comp) - { - VISIT(c, expr, comp->iter); - if (comp->is_async) { -- ADDOP(c, loc, GET_AITER); -+ ADDOP(c, LOC(comp->iter), GET_AITER); - } - else { -- ADDOP(c, loc, GET_ITER); -+ ADDOP(c, LOC(comp->iter), GET_ITER); - } - return SUCCESS; - } -@@ -5678,7 +5685,7 @@ - - outermost = (comprehension_ty) asdl_seq_GET(generators, 0); - if (is_inlined) { -- if (compiler_comprehension_iter(c, loc, outermost)) { -+ if (compiler_comprehension_iter(c, outermost)) { - goto error; - } - if (push_inlined_comprehension_state(c, loc, entry, &inline_state)) { -@@ -5764,7 +5771,7 @@ - } - Py_CLEAR(co); - -- if (compiler_comprehension_iter(c, loc, outermost)) { -+ if (compiler_comprehension_iter(c, outermost)) { - goto error; - } - -@@ -5906,7 +5913,7 @@ - - /* Evaluate EXPR */ - VISIT(c, expr, item->context_expr); -- -+ loc = LOC(item->context_expr); - ADDOP(c, loc, BEFORE_ASYNC_WITH); - ADDOP_I(c, loc, GET_AWAITABLE, 1); - ADDOP_LOAD_CONST(c, loc, Py_None); -@@ -6004,7 +6011,7 @@ - /* Evaluate EXPR */ - VISIT(c, expr, item->context_expr); - /* Will push bound __exit__ */ -- location loc = LOC(s); -+ location loc = LOC(item->context_expr); - ADDOP(c, loc, BEFORE_WITH); - ADDOP_JUMP(c, loc, SETUP_WITH, final); - -@@ -6037,7 +6044,6 @@ - /* For successful outcome: - * call __exit__(None, None, None) - */ -- loc = LOC(s); - RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc)); - ADDOP(c, loc, POP_TOP); - ADDOP_JUMP(c, loc, JUMP, exit); -@@ -6110,7 +6116,7 @@ - break; - case YieldFrom_kind: - if (!_PyST_IsFunctionLike(c->u->u_ste)) { -- return compiler_error(c, loc, "'yield' outside function"); -+ return compiler_error(c, loc, "'yield from' outside function"); - } - if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) { - return compiler_error(c, loc, "'yield from' inside async function"); -@@ -7682,7 +7688,7 @@ - PyCodeObject *co = NULL; - PyObject *consts = consts_dict_keys_inorder(u->u_metadata.u_consts); - if (consts == NULL) { -- goto error; -+ return NULL; - } - cfg_builder g; - if (instr_sequence_to_cfg(&u->u_instr_sequence, &g) < 0) { -diff --git a/Python/context.c b/Python/context.c -index 1ffae9871be..7bccfad11a4 100644 ---- a/Python/context.c -+++ b/Python/context.c -@@ -669,6 +669,7 @@ - ts, args[0], args + 1, nargs - 1, kwnames); - - if (_PyContext_Exit(ts, (PyObject *)self)) { -+ Py_XDECREF(call_result); - return NULL; - } - -diff --git a/Python/frame.c b/Python/frame.c -index a49215fa44a..b84fd9b6a93 100644 ---- a/Python/frame.c -+++ b/Python/frame.c -@@ -115,18 +115,6 @@ - } - } - --void --_PyFrame_ClearLocals(_PyInterpreterFrame *frame) --{ -- assert(frame->stacktop >= 0); -- int stacktop = frame->stacktop; -- frame->stacktop = 0; -- for (int i = 0; i < stacktop; i++) { -- Py_XDECREF(frame->localsplus[i]); -- } -- Py_CLEAR(frame->f_locals); --} -- - void - _PyFrame_ClearExceptCode(_PyInterpreterFrame *frame) - { -@@ -147,8 +135,12 @@ - } - Py_DECREF(f); - } -- _PyFrame_ClearLocals(frame); -+ assert(frame->stacktop >= 0); -+ for (int i = 0; i < frame->stacktop; i++) { -+ Py_XDECREF(frame->localsplus[i]); -+ } - Py_XDECREF(frame->frame_obj); -+ Py_XDECREF(frame->f_locals); - Py_DECREF(frame->f_funcobj); - } - -diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h -index a23cbd52ec3..bbaf589e2ef 100644 ---- a/Python/generated_cases.c.h -+++ b/Python/generated_cases.c.h -@@ -2762,16 +2762,17 @@ - new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, value); - ep->me_value = value; - } -- Py_DECREF(old_value); -- STAT_INC(STORE_ATTR, hit); - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { - _PyObject_GC_TRACK(dict); - } -- /* PEP 509 */ -- dict->ma_version_tag = new_version; -+ dict->ma_version_tag = new_version; // PEP 509 -+ // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, -+ // when dict only holds the strong reference to value in ep->me_value. -+ Py_DECREF(old_value); -+ STAT_INC(STORE_ATTR, hit); - Py_DECREF(owner); -- #line 2775 "Python/generated_cases.c.h" -+ #line 2776 "Python/generated_cases.c.h" - STACK_SHRINK(2); - next_instr += 4; - DISPATCH(); -@@ -2782,7 +2783,7 @@ - PyObject *value = stack_pointer[-2]; - uint32_t type_version = read_u32(&next_instr[1].cache); - uint16_t index = read_u16(&next_instr[3].cache); -- #line 2014 "Python/bytecodes.c" -+ #line 2015 "Python/bytecodes.c" - PyTypeObject *tp = Py_TYPE(owner); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); -@@ -2792,7 +2793,7 @@ - *(PyObject **)addr = value; - Py_XDECREF(old_value); - Py_DECREF(owner); -- #line 2796 "Python/generated_cases.c.h" -+ #line 2797 "Python/generated_cases.c.h" - STACK_SHRINK(2); - next_instr += 4; - DISPATCH(); -@@ -2804,7 +2805,7 @@ - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; - PyObject *res; -- #line 2033 "Python/bytecodes.c" -+ #line 2034 "Python/bytecodes.c" - #if ENABLE_SPECIALIZATION - _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { -@@ -2817,12 +2818,12 @@ - #endif /* ENABLE_SPECIALIZATION */ - assert((oparg >> 4) <= Py_GE); - res = PyObject_RichCompare(left, right, oparg>>4); -- #line 2821 "Python/generated_cases.c.h" -+ #line 2822 "Python/generated_cases.c.h" - Py_DECREF(left); - Py_DECREF(right); -- #line 2046 "Python/bytecodes.c" -+ #line 2047 "Python/bytecodes.c" - if (res == NULL) goto pop_2_error; -- #line 2826 "Python/generated_cases.c.h" -+ #line 2827 "Python/generated_cases.c.h" - STACK_SHRINK(1); - stack_pointer[-1] = res; - next_instr += 1; -@@ -2833,7 +2834,7 @@ - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; - PyObject *res; -- #line 2050 "Python/bytecodes.c" -+ #line 2051 "Python/bytecodes.c" - DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); - DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); - STAT_INC(COMPARE_OP, hit); -@@ -2844,7 +2845,7 @@ - _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); - _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - res = (sign_ish & oparg) ? Py_True : Py_False; -- #line 2848 "Python/generated_cases.c.h" -+ #line 2849 "Python/generated_cases.c.h" - STACK_SHRINK(1); - stack_pointer[-1] = res; - next_instr += 1; -@@ -2855,7 +2856,7 @@ - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; - PyObject *res; -- #line 2064 "Python/bytecodes.c" -+ #line 2065 "Python/bytecodes.c" - DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); - DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); - DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP); -@@ -2870,7 +2871,7 @@ - _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - res = (sign_ish & oparg) ? Py_True : Py_False; -- #line 2874 "Python/generated_cases.c.h" -+ #line 2875 "Python/generated_cases.c.h" - STACK_SHRINK(1); - stack_pointer[-1] = res; - next_instr += 1; -@@ -2881,7 +2882,7 @@ - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; - PyObject *res; -- #line 2082 "Python/bytecodes.c" -+ #line 2083 "Python/bytecodes.c" - DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); - DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); - STAT_INC(COMPARE_OP, hit); -@@ -2893,7 +2894,7 @@ - assert((oparg & 0xf) == COMPARISON_NOT_EQUALS || (oparg & 0xf) == COMPARISON_EQUALS); - assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS); - res = ((COMPARISON_NOT_EQUALS + eq) & oparg) ? Py_True : Py_False; -- #line 2897 "Python/generated_cases.c.h" -+ #line 2898 "Python/generated_cases.c.h" - STACK_SHRINK(1); - stack_pointer[-1] = res; - next_instr += 1; -@@ -2904,14 +2905,14 @@ - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; - PyObject *b; -- #line 2096 "Python/bytecodes.c" -+ #line 2097 "Python/bytecodes.c" - int res = Py_Is(left, right) ^ oparg; -- #line 2910 "Python/generated_cases.c.h" -+ #line 2911 "Python/generated_cases.c.h" - Py_DECREF(left); - Py_DECREF(right); -- #line 2098 "Python/bytecodes.c" -+ #line 2099 "Python/bytecodes.c" - b = res ? Py_True : Py_False; -- #line 2915 "Python/generated_cases.c.h" -+ #line 2916 "Python/generated_cases.c.h" - STACK_SHRINK(1); - stack_pointer[-1] = b; - DISPATCH(); -@@ -2921,15 +2922,15 @@ - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; - PyObject *b; -- #line 2102 "Python/bytecodes.c" -+ #line 2103 "Python/bytecodes.c" - int res = PySequence_Contains(right, left); -- #line 2927 "Python/generated_cases.c.h" -+ #line 2928 "Python/generated_cases.c.h" - Py_DECREF(left); - Py_DECREF(right); -- #line 2104 "Python/bytecodes.c" -+ #line 2105 "Python/bytecodes.c" - if (res < 0) goto pop_2_error; - b = (res ^ oparg) ? Py_True : Py_False; -- #line 2933 "Python/generated_cases.c.h" -+ #line 2934 "Python/generated_cases.c.h" - STACK_SHRINK(1); - stack_pointer[-1] = b; - DISPATCH(); -@@ -2940,12 +2941,12 @@ - PyObject *exc_value = stack_pointer[-2]; - PyObject *rest; - PyObject *match; -- #line 2109 "Python/bytecodes.c" -+ #line 2110 "Python/bytecodes.c" - if (check_except_star_type_valid(tstate, match_type) < 0) { -- #line 2946 "Python/generated_cases.c.h" -+ #line 2947 "Python/generated_cases.c.h" - Py_DECREF(exc_value); - Py_DECREF(match_type); -- #line 2111 "Python/bytecodes.c" -+ #line 2112 "Python/bytecodes.c" - if (true) goto pop_2_error; - } - -@@ -2953,10 +2954,10 @@ - rest = NULL; - int res = exception_group_match(exc_value, match_type, - &match, &rest); -- #line 2957 "Python/generated_cases.c.h" -+ #line 2958 "Python/generated_cases.c.h" - Py_DECREF(exc_value); - Py_DECREF(match_type); -- #line 2119 "Python/bytecodes.c" -+ #line 2120 "Python/bytecodes.c" - if (res < 0) goto pop_2_error; - - assert((match == NULL) == (rest == NULL)); -@@ -2965,7 +2966,7 @@ - if (!Py_IsNone(match)) { - PyErr_SetHandledException(match); - } -- #line 2969 "Python/generated_cases.c.h" -+ #line 2970 "Python/generated_cases.c.h" - stack_pointer[-1] = match; - stack_pointer[-2] = rest; - DISPATCH(); -@@ -2975,21 +2976,21 @@ - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; - PyObject *b; -- #line 2130 "Python/bytecodes.c" -+ #line 2131 "Python/bytecodes.c" - assert(PyExceptionInstance_Check(left)); - if (check_except_type_valid(tstate, right) < 0) { -- #line 2982 "Python/generated_cases.c.h" -+ #line 2983 "Python/generated_cases.c.h" - Py_DECREF(right); -- #line 2133 "Python/bytecodes.c" -+ #line 2134 "Python/bytecodes.c" - if (true) goto pop_1_error; - } - - int res = PyErr_GivenExceptionMatches(left, right); -- #line 2989 "Python/generated_cases.c.h" -+ #line 2990 "Python/generated_cases.c.h" - Py_DECREF(right); -- #line 2138 "Python/bytecodes.c" -+ #line 2139 "Python/bytecodes.c" - b = res ? Py_True : Py_False; -- #line 2993 "Python/generated_cases.c.h" -+ #line 2994 "Python/generated_cases.c.h" - stack_pointer[-1] = b; - DISPATCH(); - } -@@ -2998,15 +2999,15 @@ - PyObject *fromlist = stack_pointer[-1]; - PyObject *level = stack_pointer[-2]; - PyObject *res; -- #line 2142 "Python/bytecodes.c" -+ #line 2143 "Python/bytecodes.c" - PyObject *name = GETITEM(frame->f_code->co_names, oparg); - res = import_name(tstate, frame, name, fromlist, level); -- #line 3005 "Python/generated_cases.c.h" -+ #line 3006 "Python/generated_cases.c.h" - Py_DECREF(level); - Py_DECREF(fromlist); -- #line 2145 "Python/bytecodes.c" -+ #line 2146 "Python/bytecodes.c" - if (res == NULL) goto pop_2_error; -- #line 3010 "Python/generated_cases.c.h" -+ #line 3011 "Python/generated_cases.c.h" - STACK_SHRINK(1); - stack_pointer[-1] = res; - DISPATCH(); -@@ -3015,29 +3016,29 @@ - TARGET(IMPORT_FROM) { - PyObject *from = stack_pointer[-1]; - PyObject *res; -- #line 2149 "Python/bytecodes.c" -+ #line 2150 "Python/bytecodes.c" - PyObject *name = GETITEM(frame->f_code->co_names, oparg); - res = import_from(tstate, from, name); - if (res == NULL) goto error; -- #line 3023 "Python/generated_cases.c.h" -+ #line 3024 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = res; - DISPATCH(); - } - - TARGET(JUMP_FORWARD) { -- #line 2155 "Python/bytecodes.c" -+ #line 2156 "Python/bytecodes.c" - JUMPBY(oparg); -- #line 3032 "Python/generated_cases.c.h" -+ #line 3033 "Python/generated_cases.c.h" - DISPATCH(); - } - - TARGET(JUMP_BACKWARD) { - PREDICTED(JUMP_BACKWARD); -- #line 2159 "Python/bytecodes.c" -+ #line 2160 "Python/bytecodes.c" - assert(oparg < INSTR_OFFSET()); - JUMPBY(-oparg); -- #line 3041 "Python/generated_cases.c.h" -+ #line 3042 "Python/generated_cases.c.h" - CHECK_EVAL_BREAKER(); - DISPATCH(); - } -@@ -3045,15 +3046,15 @@ - TARGET(POP_JUMP_IF_FALSE) { - PREDICTED(POP_JUMP_IF_FALSE); - PyObject *cond = stack_pointer[-1]; -- #line 2165 "Python/bytecodes.c" -+ #line 2166 "Python/bytecodes.c" - if (Py_IsFalse(cond)) { - JUMPBY(oparg); - } - else if (!Py_IsTrue(cond)) { - int err = PyObject_IsTrue(cond); -- #line 3055 "Python/generated_cases.c.h" -+ #line 3056 "Python/generated_cases.c.h" - Py_DECREF(cond); -- #line 2171 "Python/bytecodes.c" -+ #line 2172 "Python/bytecodes.c" - if (err == 0) { - JUMPBY(oparg); - } -@@ -3061,22 +3062,22 @@ - if (err < 0) goto pop_1_error; - } - } -- #line 3065 "Python/generated_cases.c.h" -+ #line 3066 "Python/generated_cases.c.h" - STACK_SHRINK(1); - DISPATCH(); - } - - TARGET(POP_JUMP_IF_TRUE) { - PyObject *cond = stack_pointer[-1]; -- #line 2181 "Python/bytecodes.c" -+ #line 2182 "Python/bytecodes.c" - if (Py_IsTrue(cond)) { - JUMPBY(oparg); - } - else if (!Py_IsFalse(cond)) { - int err = PyObject_IsTrue(cond); -- #line 3078 "Python/generated_cases.c.h" -+ #line 3079 "Python/generated_cases.c.h" - Py_DECREF(cond); -- #line 2187 "Python/bytecodes.c" -+ #line 2188 "Python/bytecodes.c" - if (err > 0) { - JUMPBY(oparg); - } -@@ -3084,63 +3085,63 @@ - if (err < 0) goto pop_1_error; - } - } -- #line 3088 "Python/generated_cases.c.h" -+ #line 3089 "Python/generated_cases.c.h" - STACK_SHRINK(1); - DISPATCH(); - } - - TARGET(POP_JUMP_IF_NOT_NONE) { - PyObject *value = stack_pointer[-1]; -- #line 2197 "Python/bytecodes.c" -+ #line 2198 "Python/bytecodes.c" - if (!Py_IsNone(value)) { -- #line 3097 "Python/generated_cases.c.h" -+ #line 3098 "Python/generated_cases.c.h" - Py_DECREF(value); -- #line 2199 "Python/bytecodes.c" -+ #line 2200 "Python/bytecodes.c" - JUMPBY(oparg); - } -- #line 3102 "Python/generated_cases.c.h" -+ #line 3103 "Python/generated_cases.c.h" - STACK_SHRINK(1); - DISPATCH(); - } - - TARGET(POP_JUMP_IF_NONE) { - PyObject *value = stack_pointer[-1]; -- #line 2204 "Python/bytecodes.c" -+ #line 2205 "Python/bytecodes.c" - if (Py_IsNone(value)) { - JUMPBY(oparg); - } - else { -- #line 3114 "Python/generated_cases.c.h" -+ #line 3115 "Python/generated_cases.c.h" - Py_DECREF(value); -- #line 2209 "Python/bytecodes.c" -+ #line 2210 "Python/bytecodes.c" - } -- #line 3118 "Python/generated_cases.c.h" -+ #line 3119 "Python/generated_cases.c.h" - STACK_SHRINK(1); - DISPATCH(); - } - - TARGET(JUMP_BACKWARD_NO_INTERRUPT) { -- #line 2213 "Python/bytecodes.c" -+ #line 2214 "Python/bytecodes.c" - /* This bytecode is used in the `yield from` or `await` loop. - * If there is an interrupt, we want it handled in the innermost - * generator or coroutine, so we deliberately do not check it here. - * (see bpo-30039). - */ - JUMPBY(-oparg); -- #line 3131 "Python/generated_cases.c.h" -+ #line 3132 "Python/generated_cases.c.h" - DISPATCH(); - } - - TARGET(GET_LEN) { - PyObject *obj = stack_pointer[-1]; - PyObject *len_o; -- #line 2222 "Python/bytecodes.c" -+ #line 2223 "Python/bytecodes.c" - // PUSH(len(TOS)) - Py_ssize_t len_i = PyObject_Length(obj); - if (len_i < 0) goto error; - len_o = PyLong_FromSsize_t(len_i); - if (len_o == NULL) goto error; -- #line 3144 "Python/generated_cases.c.h" -+ #line 3145 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = len_o; - DISPATCH(); -@@ -3151,16 +3152,16 @@ - PyObject *type = stack_pointer[-2]; - PyObject *subject = stack_pointer[-3]; - PyObject *attrs; -- #line 2230 "Python/bytecodes.c" -+ #line 2231 "Python/bytecodes.c" - // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or - // None on failure. - assert(PyTuple_CheckExact(names)); - attrs = match_class(tstate, subject, type, oparg, names); -- #line 3160 "Python/generated_cases.c.h" -+ #line 3161 "Python/generated_cases.c.h" - Py_DECREF(subject); - Py_DECREF(type); - Py_DECREF(names); -- #line 2235 "Python/bytecodes.c" -+ #line 2236 "Python/bytecodes.c" - if (attrs) { - assert(PyTuple_CheckExact(attrs)); // Success! - } -@@ -3168,7 +3169,7 @@ - if (_PyErr_Occurred(tstate)) goto pop_3_error; - attrs = Py_None; // Failure! - } -- #line 3172 "Python/generated_cases.c.h" -+ #line 3173 "Python/generated_cases.c.h" - STACK_SHRINK(2); - stack_pointer[-1] = attrs; - DISPATCH(); -@@ -3177,10 +3178,10 @@ - TARGET(MATCH_MAPPING) { - PyObject *subject = stack_pointer[-1]; - PyObject *res; -- #line 2245 "Python/bytecodes.c" -+ #line 2246 "Python/bytecodes.c" - int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; - res = match ? Py_True : Py_False; -- #line 3184 "Python/generated_cases.c.h" -+ #line 3185 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = res; - PREDICT(POP_JUMP_IF_FALSE); -@@ -3190,10 +3191,10 @@ - TARGET(MATCH_SEQUENCE) { - PyObject *subject = stack_pointer[-1]; - PyObject *res; -- #line 2251 "Python/bytecodes.c" -+ #line 2252 "Python/bytecodes.c" - int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; - res = match ? Py_True : Py_False; -- #line 3197 "Python/generated_cases.c.h" -+ #line 3198 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = res; - PREDICT(POP_JUMP_IF_FALSE); -@@ -3204,11 +3205,11 @@ - PyObject *keys = stack_pointer[-1]; - PyObject *subject = stack_pointer[-2]; - PyObject *values_or_none; -- #line 2257 "Python/bytecodes.c" -+ #line 2258 "Python/bytecodes.c" - // On successful match, PUSH(values). Otherwise, PUSH(None). - values_or_none = match_keys(tstate, subject, keys); - if (values_or_none == NULL) goto error; -- #line 3212 "Python/generated_cases.c.h" -+ #line 3213 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = values_or_none; - DISPATCH(); -@@ -3217,14 +3218,14 @@ - TARGET(GET_ITER) { - PyObject *iterable = stack_pointer[-1]; - PyObject *iter; -- #line 2263 "Python/bytecodes.c" -+ #line 2264 "Python/bytecodes.c" - /* before: [obj]; after [getiter(obj)] */ - iter = PyObject_GetIter(iterable); -- #line 3224 "Python/generated_cases.c.h" -+ #line 3225 "Python/generated_cases.c.h" - Py_DECREF(iterable); -- #line 2266 "Python/bytecodes.c" -+ #line 2267 "Python/bytecodes.c" - if (iter == NULL) goto pop_1_error; -- #line 3228 "Python/generated_cases.c.h" -+ #line 3229 "Python/generated_cases.c.h" - stack_pointer[-1] = iter; - DISPATCH(); - } -@@ -3232,7 +3233,7 @@ - TARGET(GET_YIELD_FROM_ITER) { - PyObject *iterable = stack_pointer[-1]; - PyObject *iter; -- #line 2270 "Python/bytecodes.c" -+ #line 2271 "Python/bytecodes.c" - /* before: [obj]; after [getiter(obj)] */ - if (PyCoro_CheckExact(iterable)) { - /* `iterable` is a coroutine */ -@@ -3255,11 +3256,11 @@ - if (iter == NULL) { - goto error; - } -- #line 3259 "Python/generated_cases.c.h" -+ #line 3260 "Python/generated_cases.c.h" - Py_DECREF(iterable); -- #line 2293 "Python/bytecodes.c" -+ #line 2294 "Python/bytecodes.c" - } -- #line 3263 "Python/generated_cases.c.h" -+ #line 3264 "Python/generated_cases.c.h" - stack_pointer[-1] = iter; - PREDICT(LOAD_CONST); - DISPATCH(); -@@ -3270,7 +3271,7 @@ - static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); - PyObject *iter = stack_pointer[-1]; - PyObject *next; -- #line 2312 "Python/bytecodes.c" -+ #line 2313 "Python/bytecodes.c" - #if ENABLE_SPECIALIZATION - _PyForIterCache *cache = (_PyForIterCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { -@@ -3301,7 +3302,7 @@ - DISPATCH(); - } - // Common case: no jump, leave it to the code generator -- #line 3305 "Python/generated_cases.c.h" -+ #line 3306 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = next; - next_instr += 1; -@@ -3309,7 +3310,7 @@ - } - - TARGET(INSTRUMENTED_FOR_ITER) { -- #line 2345 "Python/bytecodes.c" -+ #line 2346 "Python/bytecodes.c" - _Py_CODEUNIT *here = next_instr-1; - _Py_CODEUNIT *target; - PyObject *iter = TOP(); -@@ -3335,14 +3336,14 @@ - target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1; - } - INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH); -- #line 3339 "Python/generated_cases.c.h" -+ #line 3340 "Python/generated_cases.c.h" - DISPATCH(); - } - - TARGET(FOR_ITER_LIST) { - PyObject *iter = stack_pointer[-1]; - PyObject *next; -- #line 2373 "Python/bytecodes.c" -+ #line 2374 "Python/bytecodes.c" - DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER); - _PyListIterObject *it = (_PyListIterObject *)iter; - STAT_INC(FOR_ITER, hit); -@@ -3362,7 +3363,7 @@ - DISPATCH(); - end_for_iter_list: - // Common case: no jump, leave it to the code generator -- #line 3366 "Python/generated_cases.c.h" -+ #line 3367 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = next; - next_instr += 1; -@@ -3372,7 +3373,7 @@ - TARGET(FOR_ITER_TUPLE) { - PyObject *iter = stack_pointer[-1]; - PyObject *next; -- #line 2395 "Python/bytecodes.c" -+ #line 2396 "Python/bytecodes.c" - _PyTupleIterObject *it = (_PyTupleIterObject *)iter; - DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER); - STAT_INC(FOR_ITER, hit); -@@ -3392,7 +3393,7 @@ - DISPATCH(); - end_for_iter_tuple: - // Common case: no jump, leave it to the code generator -- #line 3396 "Python/generated_cases.c.h" -+ #line 3397 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = next; - next_instr += 1; -@@ -3402,7 +3403,7 @@ - TARGET(FOR_ITER_RANGE) { - PyObject *iter = stack_pointer[-1]; - PyObject *next; -- #line 2417 "Python/bytecodes.c" -+ #line 2418 "Python/bytecodes.c" - _PyRangeIterObject *r = (_PyRangeIterObject *)iter; - DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); - STAT_INC(FOR_ITER, hit); -@@ -3420,7 +3421,7 @@ - if (next == NULL) { - goto error; - } -- #line 3424 "Python/generated_cases.c.h" -+ #line 3425 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = next; - next_instr += 1; -@@ -3429,7 +3430,7 @@ - - TARGET(FOR_ITER_GEN) { - PyObject *iter = stack_pointer[-1]; -- #line 2437 "Python/bytecodes.c" -+ #line 2438 "Python/bytecodes.c" - DEOPT_IF(tstate->interp->eval_frame, FOR_ITER); - PyGenObject *gen = (PyGenObject *)iter; - DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); -@@ -3445,14 +3446,14 @@ - assert(next_instr[oparg].op.code == END_FOR || - next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - DISPATCH_INLINED(gen_frame); -- #line 3449 "Python/generated_cases.c.h" -+ #line 3450 "Python/generated_cases.c.h" - } - - TARGET(BEFORE_ASYNC_WITH) { - PyObject *mgr = stack_pointer[-1]; - PyObject *exit; - PyObject *res; -- #line 2455 "Python/bytecodes.c" -+ #line 2456 "Python/bytecodes.c" - PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); - if (enter == NULL) { - if (!_PyErr_Occurred(tstate)) { -@@ -3475,16 +3476,16 @@ - Py_DECREF(enter); - goto error; - } -- #line 3479 "Python/generated_cases.c.h" -+ #line 3480 "Python/generated_cases.c.h" - Py_DECREF(mgr); -- #line 2478 "Python/bytecodes.c" -+ #line 2479 "Python/bytecodes.c" - res = _PyObject_CallNoArgs(enter); - Py_DECREF(enter); - if (res == NULL) { - Py_DECREF(exit); - if (true) goto pop_1_error; - } -- #line 3488 "Python/generated_cases.c.h" -+ #line 3489 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = res; - stack_pointer[-2] = exit; -@@ -3496,7 +3497,7 @@ - PyObject *mgr = stack_pointer[-1]; - PyObject *exit; - PyObject *res; -- #line 2488 "Python/bytecodes.c" -+ #line 2489 "Python/bytecodes.c" - /* pop the context manager, push its __exit__ and the - * value returned from calling its __enter__ - */ -@@ -3522,16 +3523,16 @@ - Py_DECREF(enter); - goto error; - } -- #line 3526 "Python/generated_cases.c.h" -+ #line 3527 "Python/generated_cases.c.h" - Py_DECREF(mgr); -- #line 2514 "Python/bytecodes.c" -+ #line 2515 "Python/bytecodes.c" - res = _PyObject_CallNoArgs(enter); - Py_DECREF(enter); - if (res == NULL) { - Py_DECREF(exit); - if (true) goto pop_1_error; - } -- #line 3535 "Python/generated_cases.c.h" -+ #line 3536 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = res; - stack_pointer[-2] = exit; -@@ -3543,7 +3544,7 @@ - PyObject *lasti = stack_pointer[-3]; - PyObject *exit_func = stack_pointer[-4]; - PyObject *res; -- #line 2523 "Python/bytecodes.c" -+ #line 2524 "Python/bytecodes.c" - /* At the top of the stack are 4 values: - - val: TOP = exc_info() - - unused: SECOND = previous exception -@@ -3569,7 +3570,7 @@ - res = PyObject_Vectorcall(exit_func, stack + 1, - 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); - if (res == NULL) goto error; -- #line 3573 "Python/generated_cases.c.h" -+ #line 3574 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = res; - DISPATCH(); -@@ -3578,7 +3579,7 @@ - TARGET(PUSH_EXC_INFO) { - PyObject *new_exc = stack_pointer[-1]; - PyObject *prev_exc; -- #line 2551 "Python/bytecodes.c" -+ #line 2552 "Python/bytecodes.c" - _PyErr_StackItem *exc_info = tstate->exc_info; - if (exc_info->exc_value != NULL) { - prev_exc = exc_info->exc_value; -@@ -3588,7 +3589,7 @@ - } - assert(PyExceptionInstance_Check(new_exc)); - exc_info->exc_value = Py_NewRef(new_exc); -- #line 3592 "Python/generated_cases.c.h" -+ #line 3593 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = new_exc; - stack_pointer[-2] = prev_exc; -@@ -3602,7 +3603,7 @@ - uint32_t type_version = read_u32(&next_instr[1].cache); - uint32_t keys_version = read_u32(&next_instr[3].cache); - PyObject *descr = read_obj(&next_instr[5].cache); -- #line 2563 "Python/bytecodes.c" -+ #line 2564 "Python/bytecodes.c" - /* Cached method object */ - PyTypeObject *self_cls = Py_TYPE(self); - assert(type_version != 0); -@@ -3619,7 +3620,7 @@ - assert(_PyType_HasFeature(Py_TYPE(res2), Py_TPFLAGS_METHOD_DESCRIPTOR)); - res = self; - assert(oparg & 1); -- #line 3623 "Python/generated_cases.c.h" -+ #line 3624 "Python/generated_cases.c.h" - STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } -@@ -3633,7 +3634,7 @@ - PyObject *res; - uint32_t type_version = read_u32(&next_instr[1].cache); - PyObject *descr = read_obj(&next_instr[5].cache); -- #line 2582 "Python/bytecodes.c" -+ #line 2583 "Python/bytecodes.c" - PyTypeObject *self_cls = Py_TYPE(self); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_dictoffset == 0); -@@ -3643,7 +3644,7 @@ - res2 = Py_NewRef(descr); - res = self; - assert(oparg & 1); -- #line 3647 "Python/generated_cases.c.h" -+ #line 3648 "Python/generated_cases.c.h" - STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } -@@ -3657,7 +3658,7 @@ - PyObject *res; - uint32_t type_version = read_u32(&next_instr[1].cache); - PyObject *descr = read_obj(&next_instr[5].cache); -- #line 2594 "Python/bytecodes.c" -+ #line 2595 "Python/bytecodes.c" - PyTypeObject *self_cls = Py_TYPE(self); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - Py_ssize_t dictoffset = self_cls->tp_dictoffset; -@@ -3671,7 +3672,7 @@ - res2 = Py_NewRef(descr); - res = self; - assert(oparg & 1); -- #line 3675 "Python/generated_cases.c.h" -+ #line 3676 "Python/generated_cases.c.h" - STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } -@@ -3680,16 +3681,16 @@ - } - - TARGET(KW_NAMES) { -- #line 2610 "Python/bytecodes.c" -+ #line 2611 "Python/bytecodes.c" - assert(kwnames == NULL); - assert(oparg < PyTuple_GET_SIZE(frame->f_code->co_consts)); - kwnames = GETITEM(frame->f_code->co_consts, oparg); -- #line 3688 "Python/generated_cases.c.h" -+ #line 3689 "Python/generated_cases.c.h" - DISPATCH(); - } - - TARGET(INSTRUMENTED_CALL) { -- #line 2616 "Python/bytecodes.c" -+ #line 2617 "Python/bytecodes.c" - int is_meth = PEEK(oparg+2) != NULL; - int total_args = oparg + is_meth; - PyObject *function = PEEK(total_args + 1); -@@ -3702,7 +3703,7 @@ - _PyCallCache *cache = (_PyCallCache *)next_instr; - INCREMENT_ADAPTIVE_COUNTER(cache->counter); - GO_TO_INSTRUCTION(CALL); -- #line 3706 "Python/generated_cases.c.h" -+ #line 3707 "Python/generated_cases.c.h" - } - - TARGET(CALL) { -@@ -3712,7 +3713,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 2661 "Python/bytecodes.c" -+ #line 2662 "Python/bytecodes.c" - int is_meth = method != NULL; - int total_args = oparg; - if (is_meth) { -@@ -3794,7 +3795,7 @@ - Py_DECREF(args[i]); - } - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 3798 "Python/generated_cases.c.h" -+ #line 3799 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -3806,7 +3807,7 @@ - TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; -- #line 2749 "Python/bytecodes.c" -+ #line 2750 "Python/bytecodes.c" - DEOPT_IF(method != NULL, CALL); - DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL); - STAT_INC(CALL, hit); -@@ -3816,7 +3817,7 @@ - PEEK(oparg + 2) = Py_NewRef(meth); // method - Py_DECREF(callable); - GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); -- #line 3820 "Python/generated_cases.c.h" -+ #line 3821 "Python/generated_cases.c.h" - } - - TARGET(CALL_PY_EXACT_ARGS) { -@@ -3825,7 +3826,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - uint32_t func_version = read_u32(&next_instr[1].cache); -- #line 2761 "Python/bytecodes.c" -+ #line 2762 "Python/bytecodes.c" - assert(kwnames == NULL); - DEOPT_IF(tstate->interp->eval_frame, CALL); - int is_meth = method != NULL; -@@ -3851,7 +3852,7 @@ - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - frame->return_offset = 0; - DISPATCH_INLINED(new_frame); -- #line 3855 "Python/generated_cases.c.h" -+ #line 3856 "Python/generated_cases.c.h" - } - - TARGET(CALL_PY_WITH_DEFAULTS) { -@@ -3859,7 +3860,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - uint32_t func_version = read_u32(&next_instr[1].cache); -- #line 2789 "Python/bytecodes.c" -+ #line 2790 "Python/bytecodes.c" - assert(kwnames == NULL); - DEOPT_IF(tstate->interp->eval_frame, CALL); - int is_meth = method != NULL; -@@ -3895,7 +3896,7 @@ - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - frame->return_offset = 0; - DISPATCH_INLINED(new_frame); -- #line 3899 "Python/generated_cases.c.h" -+ #line 3900 "Python/generated_cases.c.h" - } - - TARGET(CALL_NO_KW_TYPE_1) { -@@ -3903,7 +3904,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 2827 "Python/bytecodes.c" -+ #line 2828 "Python/bytecodes.c" - assert(kwnames == NULL); - assert(oparg == 1); - DEOPT_IF(null != NULL, CALL); -@@ -3913,7 +3914,7 @@ - res = Py_NewRef(Py_TYPE(obj)); - Py_DECREF(obj); - Py_DECREF(&PyType_Type); // I.e., callable -- #line 3917 "Python/generated_cases.c.h" -+ #line 3918 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -3926,7 +3927,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 2839 "Python/bytecodes.c" -+ #line 2840 "Python/bytecodes.c" - assert(kwnames == NULL); - assert(oparg == 1); - DEOPT_IF(null != NULL, CALL); -@@ -3937,7 +3938,7 @@ - Py_DECREF(arg); - Py_DECREF(&PyUnicode_Type); // I.e., callable - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 3941 "Python/generated_cases.c.h" -+ #line 3942 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -3951,7 +3952,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 2853 "Python/bytecodes.c" -+ #line 2854 "Python/bytecodes.c" - assert(kwnames == NULL); - assert(oparg == 1); - DEOPT_IF(null != NULL, CALL); -@@ -3962,7 +3963,7 @@ - Py_DECREF(arg); - Py_DECREF(&PyTuple_Type); // I.e., tuple - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 3966 "Python/generated_cases.c.h" -+ #line 3967 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -3976,7 +3977,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 2867 "Python/bytecodes.c" -+ #line 2868 "Python/bytecodes.c" - int is_meth = method != NULL; - int total_args = oparg; - if (is_meth) { -@@ -3998,7 +3999,7 @@ - } - Py_DECREF(tp); - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 4002 "Python/generated_cases.c.h" -+ #line 4003 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4012,7 +4013,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 2892 "Python/bytecodes.c" -+ #line 2893 "Python/bytecodes.c" - /* Builtin METH_O functions */ - assert(kwnames == NULL); - int is_meth = method != NULL; -@@ -4040,7 +4041,7 @@ - Py_DECREF(arg); - Py_DECREF(callable); - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 4044 "Python/generated_cases.c.h" -+ #line 4045 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4054,7 +4055,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 2923 "Python/bytecodes.c" -+ #line 2924 "Python/bytecodes.c" - /* Builtin METH_FASTCALL functions, without keywords */ - assert(kwnames == NULL); - int is_meth = method != NULL; -@@ -4086,7 +4087,7 @@ - 'invalid'). In those cases an exception is set, so we must - handle it. - */ -- #line 4090 "Python/generated_cases.c.h" -+ #line 4091 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4100,7 +4101,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 2958 "Python/bytecodes.c" -+ #line 2959 "Python/bytecodes.c" - /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ - int is_meth = method != NULL; - int total_args = oparg; -@@ -4132,7 +4133,7 @@ - } - Py_DECREF(callable); - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 4136 "Python/generated_cases.c.h" -+ #line 4137 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4146,7 +4147,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 2993 "Python/bytecodes.c" -+ #line 2994 "Python/bytecodes.c" - assert(kwnames == NULL); - /* len(o) */ - int is_meth = method != NULL; -@@ -4171,7 +4172,7 @@ - Py_DECREF(callable); - Py_DECREF(arg); - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 4175 "Python/generated_cases.c.h" -+ #line 4176 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4184,7 +4185,7 @@ - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 3020 "Python/bytecodes.c" -+ #line 3021 "Python/bytecodes.c" - assert(kwnames == NULL); - /* isinstance(o, o2) */ - int is_meth = method != NULL; -@@ -4211,7 +4212,7 @@ - Py_DECREF(cls); - Py_DECREF(callable); - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 4215 "Python/generated_cases.c.h" -+ #line 4216 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4223,7 +4224,7 @@ - PyObject **args = (stack_pointer - oparg); - PyObject *self = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; -- #line 3050 "Python/bytecodes.c" -+ #line 3051 "Python/bytecodes.c" - assert(kwnames == NULL); - assert(oparg == 1); - PyInterpreterState *interp = _PyInterpreterState_GET(); -@@ -4241,14 +4242,14 @@ - JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); - assert(next_instr[-1].op.code == POP_TOP); - DISPATCH(); -- #line 4245 "Python/generated_cases.c.h" -+ #line 4246 "Python/generated_cases.c.h" - } - - TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 3070 "Python/bytecodes.c" -+ #line 3071 "Python/bytecodes.c" - assert(kwnames == NULL); - int is_meth = method != NULL; - int total_args = oparg; -@@ -4279,7 +4280,7 @@ - Py_DECREF(arg); - Py_DECREF(callable); - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 4283 "Python/generated_cases.c.h" -+ #line 4284 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4292,7 +4293,7 @@ - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 3104 "Python/bytecodes.c" -+ #line 3105 "Python/bytecodes.c" - int is_meth = method != NULL; - int total_args = oparg; - if (is_meth) { -@@ -4321,7 +4322,7 @@ - } - Py_DECREF(callable); - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 4325 "Python/generated_cases.c.h" -+ #line 4326 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4334,7 +4335,7 @@ - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 3136 "Python/bytecodes.c" -+ #line 3137 "Python/bytecodes.c" - assert(kwnames == NULL); - assert(oparg == 0 || oparg == 1); - int is_meth = method != NULL; -@@ -4363,7 +4364,7 @@ - Py_DECREF(self); - Py_DECREF(callable); - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 4367 "Python/generated_cases.c.h" -+ #line 4368 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4376,7 +4377,7 @@ - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; - PyObject *res; -- #line 3168 "Python/bytecodes.c" -+ #line 3169 "Python/bytecodes.c" - assert(kwnames == NULL); - int is_meth = method != NULL; - int total_args = oparg; -@@ -4404,7 +4405,7 @@ - } - Py_DECREF(callable); - if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } -- #line 4408 "Python/generated_cases.c.h" -+ #line 4409 "Python/generated_cases.c.h" - STACK_SHRINK(oparg); - STACK_SHRINK(1); - stack_pointer[-1] = res; -@@ -4414,9 +4415,9 @@ - } - - TARGET(INSTRUMENTED_CALL_FUNCTION_EX) { -- #line 3199 "Python/bytecodes.c" -+ #line 3200 "Python/bytecodes.c" - GO_TO_INSTRUCTION(CALL_FUNCTION_EX); -- #line 4420 "Python/generated_cases.c.h" -+ #line 4421 "Python/generated_cases.c.h" - } - - TARGET(CALL_FUNCTION_EX) { -@@ -4425,7 +4426,7 @@ - PyObject *callargs = stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))]; - PyObject *func = stack_pointer[-(2 + ((oparg & 1) ? 1 : 0))]; - PyObject *result; -- #line 3203 "Python/bytecodes.c" -+ #line 3204 "Python/bytecodes.c" - // DICT_MERGE is called before this opcode if there are kwargs. - // It converts all dict subtypes in kwargs into regular dicts. - assert(kwargs == NULL || PyDict_CheckExact(kwargs)); -@@ -4487,14 +4488,14 @@ - } - result = PyObject_Call(func, callargs, kwargs); - } -- #line 4491 "Python/generated_cases.c.h" -+ #line 4492 "Python/generated_cases.c.h" - Py_DECREF(func); - Py_DECREF(callargs); - Py_XDECREF(kwargs); -- #line 3265 "Python/bytecodes.c" -+ #line 3266 "Python/bytecodes.c" - assert(PEEK(3 + (oparg & 1)) == NULL); - if (result == NULL) { STACK_SHRINK(((oparg & 1) ? 1 : 0)); goto pop_3_error; } -- #line 4498 "Python/generated_cases.c.h" -+ #line 4499 "Python/generated_cases.c.h" - STACK_SHRINK(((oparg & 1) ? 1 : 0)); - STACK_SHRINK(2); - stack_pointer[-1] = result; -@@ -4509,7 +4510,7 @@ - PyObject *kwdefaults = (oparg & 0x02) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0))] : NULL; - PyObject *defaults = (oparg & 0x01) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x01) ? 1 : 0))] : NULL; - PyObject *func; -- #line 3275 "Python/bytecodes.c" -+ #line 3276 "Python/bytecodes.c" - - PyFunctionObject *func_obj = (PyFunctionObject *) - PyFunction_New(codeobj, GLOBALS()); -@@ -4538,14 +4539,14 @@ - - func_obj->func_version = ((PyCodeObject *)codeobj)->co_version; - func = (PyObject *)func_obj; -- #line 4542 "Python/generated_cases.c.h" -+ #line 4543 "Python/generated_cases.c.h" - STACK_SHRINK(((oparg & 0x01) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x08) ? 1 : 0)); - stack_pointer[-1] = func; - DISPATCH(); - } - - TARGET(RETURN_GENERATOR) { -- #line 3306 "Python/bytecodes.c" -+ #line 3307 "Python/bytecodes.c" - assert(PyFunction_Check(frame->f_funcobj)); - PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; - PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); -@@ -4566,7 +4567,7 @@ - frame = cframe.current_frame = prev; - _PyFrame_StackPush(frame, (PyObject *)gen); - goto resume_frame; -- #line 4570 "Python/generated_cases.c.h" -+ #line 4571 "Python/generated_cases.c.h" - } - - TARGET(BUILD_SLICE) { -@@ -4574,15 +4575,15 @@ - PyObject *stop = stack_pointer[-(1 + ((oparg == 3) ? 1 : 0))]; - PyObject *start = stack_pointer[-(2 + ((oparg == 3) ? 1 : 0))]; - PyObject *slice; -- #line 3329 "Python/bytecodes.c" -+ #line 3330 "Python/bytecodes.c" - slice = PySlice_New(start, stop, step); -- #line 4580 "Python/generated_cases.c.h" -+ #line 4581 "Python/generated_cases.c.h" - Py_DECREF(start); - Py_DECREF(stop); - Py_XDECREF(step); -- #line 3331 "Python/bytecodes.c" -+ #line 3332 "Python/bytecodes.c" - if (slice == NULL) { STACK_SHRINK(((oparg == 3) ? 1 : 0)); goto pop_2_error; } -- #line 4586 "Python/generated_cases.c.h" -+ #line 4587 "Python/generated_cases.c.h" - STACK_SHRINK(((oparg == 3) ? 1 : 0)); - STACK_SHRINK(1); - stack_pointer[-1] = slice; -@@ -4593,7 +4594,7 @@ - PyObject *fmt_spec = ((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? stack_pointer[-((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))] : NULL; - PyObject *value = stack_pointer[-(1 + (((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))]; - PyObject *result; -- #line 3335 "Python/bytecodes.c" -+ #line 3336 "Python/bytecodes.c" - /* Handles f-string value formatting. */ - PyObject *(*conv_fn)(PyObject *); - int which_conversion = oparg & FVC_MASK; -@@ -4628,7 +4629,7 @@ - Py_DECREF(value); - Py_XDECREF(fmt_spec); - if (result == NULL) { STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); goto pop_1_error; } -- #line 4632 "Python/generated_cases.c.h" -+ #line 4633 "Python/generated_cases.c.h" - STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); - stack_pointer[-1] = result; - DISPATCH(); -@@ -4637,10 +4638,10 @@ - TARGET(COPY) { - PyObject *bottom = stack_pointer[-(1 + (oparg-1))]; - PyObject *top; -- #line 3372 "Python/bytecodes.c" -+ #line 3373 "Python/bytecodes.c" - assert(oparg > 0); - top = Py_NewRef(bottom); -- #line 4644 "Python/generated_cases.c.h" -+ #line 4645 "Python/generated_cases.c.h" - STACK_GROW(1); - stack_pointer[-1] = top; - DISPATCH(); -@@ -4652,7 +4653,7 @@ - PyObject *rhs = stack_pointer[-1]; - PyObject *lhs = stack_pointer[-2]; - PyObject *res; -- #line 3377 "Python/bytecodes.c" -+ #line 3378 "Python/bytecodes.c" - #if ENABLE_SPECIALIZATION - _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { -@@ -4667,12 +4668,12 @@ - assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); - assert(binary_ops[oparg]); - res = binary_ops[oparg](lhs, rhs); -- #line 4671 "Python/generated_cases.c.h" -+ #line 4672 "Python/generated_cases.c.h" - Py_DECREF(lhs); - Py_DECREF(rhs); -- #line 3392 "Python/bytecodes.c" -+ #line 3393 "Python/bytecodes.c" - if (res == NULL) goto pop_2_error; -- #line 4676 "Python/generated_cases.c.h" -+ #line 4677 "Python/generated_cases.c.h" - STACK_SHRINK(1); - stack_pointer[-1] = res; - next_instr += 1; -@@ -4682,16 +4683,16 @@ - TARGET(SWAP) { - PyObject *top = stack_pointer[-1]; - PyObject *bottom = stack_pointer[-(2 + (oparg-2))]; -- #line 3397 "Python/bytecodes.c" -+ #line 3398 "Python/bytecodes.c" - assert(oparg >= 2); -- #line 4688 "Python/generated_cases.c.h" -+ #line 4689 "Python/generated_cases.c.h" - stack_pointer[-1] = bottom; - stack_pointer[-(2 + (oparg-2))] = top; - DISPATCH(); - } - - TARGET(INSTRUMENTED_INSTRUCTION) { -- #line 3401 "Python/bytecodes.c" -+ #line 3402 "Python/bytecodes.c" - int next_opcode = _Py_call_instrumentation_instruction( - tstate, frame, next_instr-1); - if (next_opcode < 0) goto error; -@@ -4703,26 +4704,26 @@ - assert(next_opcode > 0 && next_opcode < 256); - opcode = next_opcode; - DISPATCH_GOTO(); -- #line 4707 "Python/generated_cases.c.h" -+ #line 4708 "Python/generated_cases.c.h" - } - - TARGET(INSTRUMENTED_JUMP_FORWARD) { -- #line 3415 "Python/bytecodes.c" -+ #line 3416 "Python/bytecodes.c" - INSTRUMENTED_JUMP(next_instr-1, next_instr+oparg, PY_MONITORING_EVENT_JUMP); -- #line 4713 "Python/generated_cases.c.h" -+ #line 4714 "Python/generated_cases.c.h" - DISPATCH(); - } ++ echo "Cannot run the iOS testbed for a non-iOS build."; \ ++ exit 1;\ ++ fi ++ @if test "$(findstring -iphonesimulator,$(MULTIARCH))" != "-iphonesimulator"; then \ ++ echo "Cannot run the iOS testbed for non-simulator builds."; \ ++ exit 1;\ ++ fi ++ @if test $(PYTHONFRAMEWORK) != "Python"; then \ ++ echo "Cannot run the iOS testbed with a non-default framework name."; \ ++ exit 1;\ ++ fi ++ @if ! test -d $(PYTHONFRAMEWORKPREFIX); then \ ++ echo "Cannot find a finalized iOS Python.framework. Have you run 'make install' to finalize the framework build?"; \ ++ exit 1;\ ++ fi ++ # Copy the testbed project into the build folder ++ cp -r $(srcdir)/iOS/testbed $(XCFOLDER) ++ # Copy the framework from the install location to the testbed project. ++ cp -r $(PYTHONFRAMEWORKPREFIX)/* $(XCFOLDER)/Python.xcframework/ios-arm64_x86_64-simulator ++ ++ # Run the test suite for the Xcode project, targeting the iOS simulator. ++ # If the suite fails, touch a file in the test folder as a marker ++ if ! xcodebuild test -project $(XCFOLDER)/iOSTestbed.xcodeproj -scheme "iOSTestbed" -destination "platform=iOS Simulator,name=iPhone SE (3rd Generation)" -resultBundlePath $(XCRESULT) -derivedDataPath $(XCFOLDER)/DerivedData ; then \ ++ touch $(XCFOLDER)/failed; \ ++ fi ++ ++ # Regardless of success or failure, extract and print the test output ++ xcrun xcresulttool get --path $(XCRESULT) \ ++ --id $$( \ ++ xcrun xcresulttool get --path $(XCRESULT) --format json | \ ++ $(PYTHON_FOR_BUILD) -c "import sys, json; result = json.load(sys.stdin); print(result['actions']['_values'][0]['actionResult']['logRef']['id']['_value'])" \ ++ ) \ ++ --format json | \ ++ $(PYTHON_FOR_BUILD) -c "import sys, json; result = json.load(sys.stdin); print(result['subsections']['_values'][1]['subsections']['_values'][0]['emittedOutput']['_value'])" ++ ++ @if test -e $(XCFOLDER)/failed ; then \ ++ exit 1; \ ++ fi ++ + # Like testall, but with only one pass and without multiple processes. + # Run an optional script to include information about the build environment. + .PHONY: buildbottest +@@ -1900,7 +1983,7 @@ + # which can lead to two parallel `./python setup.py build` processes that + # step on each others toes. + .PHONY: install +-install: @FRAMEWORKINSTALLFIRST@ commoninstall bininstall maninstall @FRAMEWORKINSTALLLAST@ ++install: @FRAMEWORKINSTALLFIRST@ @INSTALLTARGETS@ @FRAMEWORKINSTALLLAST@ + if test "x$(ENSUREPIP)" != "xno" ; then \ + case $(ENSUREPIP) in \ + upgrade) ensurepip="--upgrade" ;; \ +@@ -2329,6 +2412,14 @@ + $(INSTALL_DATA) `cat pybuilddir.txt`/_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH).py \ + $(DESTDIR)$(LIBDEST); \ + $(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt ++ @ # If app store compliance has been configured, apply the patch to the ++ @ # installed library code. The patch has been previously validated against ++ @ # the original source tree, so we can ignore any errors that are raised ++ @ # due to files that are missing because of --disable-test-modules etc. ++ @if [ "$(APP_STORE_COMPLIANCE_PATCH)" != "" ]; then \ ++ echo "Applying app store compliance patch"; \ ++ patch --force --reject-file "$(abs_builddir)/app-store-compliance.rej" --strip 2 --directory "$(DESTDIR)$(LIBDEST)" --input "$(abs_srcdir)/$(APP_STORE_COMPLIANCE_PATCH)" || true ; \ ++ fi + @ # Build PYC files for the 3 optimization levels (0, 1, 2) + -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ + $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ +@@ -2505,10 +2596,11 @@ + # only have to cater for the structural bits of the framework. - TARGET(INSTRUMENTED_JUMP_BACKWARD) { -- #line 3419 "Python/bytecodes.c" -+ #line 3420 "Python/bytecodes.c" - INSTRUMENTED_JUMP(next_instr-1, next_instr-oparg, PY_MONITORING_EVENT_JUMP); -- #line 4720 "Python/generated_cases.c.h" -+ #line 4721 "Python/generated_cases.c.h" - CHECK_EVAL_BREAKER(); - DISPATCH(); - } + .PHONY: frameworkinstallframework +-frameworkinstallframework: frameworkinstallstructure install frameworkinstallmaclib ++frameworkinstallframework: @FRAMEWORKINSTALLFIRST@ install frameworkinstallmaclib - TARGET(INSTRUMENTED_POP_JUMP_IF_TRUE) { -- #line 3424 "Python/bytecodes.c" -+ #line 3425 "Python/bytecodes.c" - PyObject *cond = POP(); - int err = PyObject_IsTrue(cond); - Py_DECREF(cond); -@@ -4731,12 +4732,12 @@ - assert(err == 0 || err == 1); - int offset = err*oparg; - INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); -- #line 4735 "Python/generated_cases.c.h" -+ #line 4736 "Python/generated_cases.c.h" - DISPATCH(); - } +-.PHONY: frameworkinstallstructure +-frameworkinstallstructure: $(LDLIBRARY) ++# macOS uses a versioned frameworks structure that includes a full install ++.PHONY: frameworkinstallversionedstructure ++frameworkinstallversionedstructure: $(LDLIBRARY) + @if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ + echo Not configured with --enable-framework; \ + exit 1; \ +@@ -2529,6 +2621,27 @@ + $(LN) -fsn Versions/Current/Resources $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Resources + $(INSTALL_SHARED) $(LDLIBRARY) $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/$(LDLIBRARY) - TARGET(INSTRUMENTED_POP_JUMP_IF_FALSE) { -- #line 3435 "Python/bytecodes.c" -+ #line 3436 "Python/bytecodes.c" - PyObject *cond = POP(); - int err = PyObject_IsTrue(cond); - Py_DECREF(cond); -@@ -4745,12 +4746,12 @@ - assert(err == 0 || err == 1); - int offset = (1-err)*oparg; - INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); -- #line 4749 "Python/generated_cases.c.h" -+ #line 4750 "Python/generated_cases.c.h" - DISPATCH(); - } ++# iOS/tvOS/watchOS uses a non-versioned framework with Info.plist in the ++# framework root, no .lproj data, and only stub compilation assistance binaries ++.PHONY: frameworkinstallunversionedstructure ++frameworkinstallunversionedstructure: $(LDLIBRARY) ++ @if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ ++ echo Not configured with --enable-framework; \ ++ exit 1; \ ++ else true; \ ++ fi ++ if test -d $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include; then \ ++ echo "Clearing stale header symlink directory"; \ ++ rm -rf $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include; \ ++ fi ++ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR) ++ sed 's/%VERSION%/'"`$(RUNSHARED) $(PYTHON_FOR_BUILD) -c 'import platform; print(platform.python_version())'`"'/g' < $(RESSRCDIR)/Info.plist > $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Info.plist ++ $(INSTALL_SHARED) $(LDLIBRARY) $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/$(LDLIBRARY) ++ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(BINDIR) ++ for file in $(srcdir)/$(RESSRCDIR)/bin/* ; do \ ++ $(INSTALL) -m $(EXEMODE) $$file $(DESTDIR)$(BINDIR); \ ++ done ++ + # This installs Mac/Lib into the framework + # Install a number of symlinks to keep software that expects a normal unix + # install (which includes python-config) happy. +@@ -2569,6 +2682,19 @@ + frameworkinstallextras: + cd Mac && $(MAKE) installextras DESTDIR="$(DESTDIR)" - TARGET(INSTRUMENTED_POP_JUMP_IF_NONE) { -- #line 3446 "Python/bytecodes.c" -+ #line 3447 "Python/bytecodes.c" - PyObject *value = POP(); - _Py_CODEUNIT *here = next_instr-1; - int offset; -@@ -4762,12 +4763,12 @@ - offset = 0; - } - INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); -- #line 4766 "Python/generated_cases.c.h" -+ #line 4767 "Python/generated_cases.c.h" - DISPATCH(); - } ++# On iOS, bin/lib can't live inside the framework; include needs to be called ++# "Headers", but *must* be in the framework, and *not* include the `python3.X` ++# subdirectory. The install has put these folders in the same folder as ++# Python.framework; Move the headers to their final framework-compatible home. ++.PHONY: frameworkinstallmobileheaders ++frameworkinstallmobileheaders: frameworkinstallunversionedstructure inclinstall ++ if test -d $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers; then \ ++ echo "Removing old framework headers"; \ ++ rm -rf $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers; \ ++ fi ++ mv "$(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include/python$(LDVERSION)" "$(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers" ++ $(LN) -fs "../$(PYTHONFRAMEWORKDIR)/Headers" "$(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include/python$(LDVERSION)" ++ + # Build the toplevel Makefile + Makefile.pre: $(srcdir)/Makefile.pre.in config.status + CONFIG_FILES=Makefile.pre CONFIG_HEADERS= ./config.status +@@ -2679,6 +2805,10 @@ + -find build -type f -a ! -name '*.gc??' -exec rm -f {} ';' + -rm -f Include/pydtrace_probes.h + -rm -f profile-gen-stamp ++ -rm -rf iOS/testbed/Python.xcframework/ios-*/bin ++ -rm -rf iOS/testbed/Python.xcframework/ios-*/lib ++ -rm -rf iOS/testbed/Python.xcframework/ios-*/include ++ -rm -rf iOS/testbed/Python.xcframework/ios-*/Python.framework - TARGET(INSTRUMENTED_POP_JUMP_IF_NOT_NONE) { -- #line 3460 "Python/bytecodes.c" -+ #line 3461 "Python/bytecodes.c" - PyObject *value = POP(); - _Py_CODEUNIT *here = next_instr-1; - int offset; -@@ -4779,30 +4780,30 @@ - offset = oparg; - } - INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); -- #line 4783 "Python/generated_cases.c.h" -+ #line 4784 "Python/generated_cases.c.h" - DISPATCH(); - } + .PHONY: profile-removal + profile-removal: +@@ -2704,6 +2834,8 @@ + config.cache config.log pyconfig.h Modules/config.c + -rm -rf build platform + -rm -rf $(PYTHONFRAMEWORKDIR) ++ -rm -rf iOS/Frameworks ++ -rm -rf iOSTestbed.* + -rm -f python-config.py python-config - TARGET(EXTENDED_ARG) { -- #line 3474 "Python/bytecodes.c" -+ #line 3475 "Python/bytecodes.c" - assert(oparg); - opcode = next_instr->op.code; - oparg = oparg << 8 | next_instr->op.arg; - PRE_DISPATCH_GOTO(); - DISPATCH_GOTO(); -- #line 4794 "Python/generated_cases.c.h" -+ #line 4795 "Python/generated_cases.c.h" - } + # Make things extra clean, before making a distribution: +diff --git a/Modules/getpath.c b/Modules/getpath.c +index 0a310000751..83a2bc469ae 100644 +--- a/Modules/getpath.c ++++ b/Modules/getpath.c +@@ -15,6 +15,7 @@ + #endif - TARGET(CACHE) { -- #line 3482 "Python/bytecodes.c" -+ #line 3483 "Python/bytecodes.c" - assert(0 && "Executing a cache."); - Py_UNREACHABLE(); -- #line 4801 "Python/generated_cases.c.h" -+ #line 4802 "Python/generated_cases.c.h" - } + #ifdef __APPLE__ ++# include "TargetConditionals.h" + # include + #endif - TARGET(RESERVED) { -- #line 3487 "Python/bytecodes.c" -+ #line 3488 "Python/bytecodes.c" - assert(0 && "Executing RESERVED instruction."); - Py_UNREACHABLE(); -- #line 4808 "Python/generated_cases.c.h" -+ #line 4809 "Python/generated_cases.c.h" - } -diff --git a/Python/getargs.c b/Python/getargs.c -index 02bddf0618e..e7c2654f93a 100644 ---- a/Python/getargs.c -+++ b/Python/getargs.c -@@ -2071,6 +2071,18 @@ - if (parser->initialized == 1) { - Py_CLEAR(parser->kwtuple); +@@ -759,7 +760,7 @@ + return winmodule_to_dict(dict, key, PyWin_DLLhModule); } -+ -+ if (parser->format) { -+ parser->fname = NULL; -+ } -+ else { -+ assert(parser->fname != NULL); -+ } -+ parser->custom_msg = NULL; -+ parser->pos = 0; -+ parser->min = 0; -+ parser->max = 0; -+ parser->initialized = 0; - } + #endif +-#elif defined(WITH_NEXT_FRAMEWORK) ++#elif defined(WITH_NEXT_FRAMEWORK) && !defined(TARGET_OS_IPHONE) + static char modPath[MAXPATHLEN + 1]; + static int modPathInitialized = -1; + if (modPathInitialized < 0) { +@@ -953,4 +954,3 @@ - static PyObject* -@@ -2628,7 +2640,7 @@ - * - * Otherwise, we leave a place at `buf[vararg]` for vararg tuple - * so the index is `i + 1`. */ -- if (nargs < vararg) { -+ if (i < vararg) { - buf[i] = current_arg; - } - else { -diff --git a/Python/legacy_tracing.c b/Python/legacy_tracing.c -index 43fa5910ef6..4a6565bebc1 100644 ---- a/Python/legacy_tracing.c -+++ b/Python/legacy_tracing.c -@@ -103,6 +103,19 @@ - Py_DECREF(meth); - return res; - } -+ else if (Py_TYPE(callable) == &PyMethod_Type) { -+ // CALL instruction will grab the function from the method, -+ // so if the function is a C function, the return event will -+ // be emitted. However, CALL event happens before CALL -+ // instruction, so we need to handle this case here. -+ PyObject* func = PyMethod_GET_FUNCTION(callable); -+ if (func == NULL) { -+ return NULL; -+ } -+ if (PyCFunction_Check(func)) { -+ return call_profile_func(self, func); -+ } -+ } - Py_RETURN_NONE; + return _PyStatus_OK(); } - +- diff --git a/Python/marshal.c b/Python/marshal.c -index 90953cbb728..79ae624c975 100644 +index 8ecdb738147..79ae624c975 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -15,6 +15,10 @@ @@ -49641,24 +4223,6 @@ index 90953cbb728..79ae624c975 100644 #endif #define TYPE_NULL '0' -@@ -1851,7 +1858,7 @@ - Not all Python object types are supported; in general, only objects\n\ - whose value is independent from a particular invocation of Python can be\n\ - written and read by this module. The following types are supported:\n\ --None, integers, floating point numbers, strings, bytes, bytearrays,\n\ -+None, integers, floating-point numbers, strings, bytes, bytearrays,\n\ - tuples, lists, sets, dictionaries, and code objects, where it\n\ - should be understood that tuples, lists and dictionaries are only\n\ - supported as long as the values contained therein are themselves\n\ -@@ -1862,7 +1869,7 @@ - \n\ - version -- indicates the format that the module uses. Version 0 is the\n\ - historical format, version 1 shares interned strings and version 2\n\ -- uses a binary format for floating point numbers.\n\ -+ uses a binary format for floating-point numbers.\n\ - Version 3 shares common object references (New in version 3.4).\n\ - \n\ - Functions:\n\ diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h index ed4a0ac2dd3..bc8da328ba9 100644 --- a/Python/stdlib_module_names.h @@ -49671,281 +4235,6 @@ index ed4a0ac2dd3..bc8da328ba9 100644 "_json", "_locale", "_lsprof", -diff --git a/Python/symtable.c b/Python/symtable.c -index ba4284210bb..f99ca4fdd06 100644 ---- a/Python/symtable.c -+++ b/Python/symtable.c -@@ -675,22 +675,19 @@ - if (existing == NULL && PyErr_Occurred()) { - return 0; - } -+ // __class__ is never allowed to be free through a class scope (see -+ // drop_class_free) -+ if (scope == FREE && ste->ste_type == ClassBlock && -+ _PyUnicode_EqualToASCIIString(k, "__class__")) { -+ scope = GLOBAL_IMPLICIT; -+ if (PySet_Discard(comp_free, k) < 0) { -+ return 0; -+ } -+ remove_dunder_class = 1; -+ } - if (!existing) { - // name does not exist in scope, copy from comprehension - assert(scope != FREE || PySet_Contains(comp_free, k) == 1); -- if (scope == FREE && ste->ste_type == ClassBlock && -- _PyUnicode_EqualToASCIIString(k, "__class__")) { -- // if __class__ is unbound in the enclosing class scope and free -- // in the comprehension scope, it needs special handling; just -- // letting it be marked as free in class scope will break due to -- // drop_class_free -- scope = GLOBAL_IMPLICIT; -- only_flags &= ~DEF_FREE; -- if (PySet_Discard(comp_free, k) < 0) { -- return 0; -- } -- remove_dunder_class = 1; -- } - PyObject *v_flags = PyLong_FromLong(only_flags); - if (v_flags == NULL) { - return 0; -diff --git a/Python/tracemalloc.c b/Python/tracemalloc.c -index bc765623522..e13064bd145 100644 ---- a/Python/tracemalloc.c -+++ b/Python/tracemalloc.c -@@ -836,7 +836,7 @@ - - tracemalloc_tracebacks = hashtable_new(hashtable_hash_traceback, - hashtable_compare_traceback, -- NULL, raw_free); -+ raw_free, NULL); - - tracemalloc_traces = tracemalloc_create_traces_table(); - tracemalloc_domains = tracemalloc_create_domains_table(); -diff --git a/README.rst b/README.rst -index 840ac75bf8c..49d503bbe45 100644 ---- a/README.rst -+++ b/README.rst -@@ -1,4 +1,4 @@ --This is Python version 3.12.4 -+This is Python version 3.12.6 - ============================= - - .. image:: https://github.com/python/cpython/workflows/Tests/badge.svg -diff --git a/Tools/build/generate_re_casefix.py b/Tools/build/generate_re_casefix.py -index b57ac07426c..6cebfbd025c 100755 ---- a/Tools/build/generate_re_casefix.py -+++ b/Tools/build/generate_re_casefix.py -@@ -23,9 +23,9 @@ - - # Maps the code of lowercased character to codes of different lowercased - # characters which have the same uppercase. --_EXTRA_CASES = { -+_EXTRA_CASES = {{ - %s --} -+}} - """ - - def uname(i): -diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py -index c08568f2e00..9cc89b8caee 100644 ---- a/Tools/build/generate_sbom.py -+++ b/Tools/build/generate_sbom.py -@@ -96,6 +96,19 @@ - sys.exit(1) - - -+def is_root_directory_git_index() -> bool: -+ """Checks if the root directory is a git index""" -+ try: -+ subprocess.check_call( -+ ["git", "-C", str(CPYTHON_ROOT_DIR), "rev-parse"], -+ stdout=subprocess.DEVNULL, -+ stderr=subprocess.DEVNULL, -+ ) -+ except subprocess.CalledProcessError: -+ return False -+ return True -+ -+ - def filter_gitignored_paths(paths: list[str]) -> list[str]: - """ - Filter out paths excluded by the gitignore file. -@@ -108,6 +121,10 @@ - - '.gitignore:9:*.a Tools/lib.a' - """ -+ # No paths means no filtering to be done. -+ if not paths: -+ return [] -+ - # Filter out files in gitignore. - # Non-matching files show up as '::' - git_check_ignore_proc = subprocess.run( -@@ -337,6 +354,11 @@ - - - def main() -> None: -+ # Don't regenerate the SBOM if we're not a git repository. -+ if not is_root_directory_git_index(): -+ print("Skipping SBOM generation due to not being a git repository") -+ return -+ - create_source_sbom() - create_externals_sbom() - -diff --git a/Tools/build/stable_abi.py b/Tools/build/stable_abi.py -index c6363fda349..ef1622776bb 100644 ---- a/Tools/build/stable_abi.py -+++ b/Tools/build/stable_abi.py -@@ -226,9 +226,9 @@ - key=sort_key): - write(f'EXPORT_DATA({item.name})') - --REST_ROLES = { -- 'function': 'function', -- 'data': 'var', -+ITEM_KIND_TO_DOC_ROLE = { -+ 'function': 'func', -+ 'data': 'data', - 'struct': 'type', - 'macro': 'macro', - # 'const': 'const', # all undocumented -@@ -237,22 +237,28 @@ - - @generator("doc_list", 'Doc/data/stable_abi.dat') - def gen_doc_annotations(manifest, args, outfile): -- """Generate/check the stable ABI list for documentation annotations""" -+ """Generate/check the stable ABI list for documentation annotations -+ -+ See ``StableABIEntry`` in ``Doc/tools/extensions/c_annotations.py`` -+ for a description of each field. -+ """ - writer = csv.DictWriter( - outfile, - ['role', 'name', 'added', 'ifdef_note', 'struct_abi_kind'], - lineterminator='\n') - writer.writeheader() -- for item in manifest.select(REST_ROLES.keys(), include_abi_only=False): -+ kinds = set(ITEM_KIND_TO_DOC_ROLE) -+ for item in manifest.select(kinds, include_abi_only=False): - if item.ifdef: - ifdef_note = manifest.contents[item.ifdef].doc - else: - ifdef_note = None - row = { -- 'role': REST_ROLES[item.kind], -+ 'role': ITEM_KIND_TO_DOC_ROLE[item.kind], - 'name': item.name, - 'added': item.added, -- 'ifdef_note': ifdef_note} -+ 'ifdef_note': ifdef_note, -+ } - rows = [row] - if item.kind == 'struct': - row['struct_abi_kind'] = item.struct_abi_kind -@@ -260,7 +266,8 @@ - rows.append({ - 'role': 'member', - 'name': f'{item.name}.{member_name}', -- 'added': item.added}) -+ 'added': item.added, -+ }) - writer.writerows(rows) - - @generator("ctypes_test", 'Lib/test/test_stable_abi_ctypes.py') -diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv -index b47393e6fdd..ac60ee4e141 100644 ---- a/Tools/c-analyzer/cpython/globals-to-fix.tsv -+++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv -@@ -422,7 +422,6 @@ - Modules/_ctypes/_ctypes.c - _unpickle - - Modules/_ctypes/_ctypes.c PyCArrayType_from_ctype cache - - Modules/_cursesmodule.c - ModDict - --Modules/_datetimemodule.c datetime_strptime module - - Modules/_datetimemodule.c - PyDateTime_TimeZone_UTC - - Modules/_datetimemodule.c - PyDateTime_Epoch - - Modules/_datetimemodule.c - us_per_ms - -diff --git a/Tools/peg_generator/peg_extension/peg_extension.c b/Tools/peg_generator/peg_extension/peg_extension.c -index 7df134b5ade..d8545c9d0ab 100644 ---- a/Tools/peg_generator/peg_extension/peg_extension.c -+++ b/Tools/peg_generator/peg_extension/peg_extension.c -@@ -108,7 +108,7 @@ - static PyObject * - clear_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) - { --#if defined(PY_DEBUG) -+#if defined(Py_DEBUG) - _PyPegen_clear_memo_statistics(); - #endif - Py_RETURN_NONE; -@@ -117,7 +117,7 @@ - static PyObject * - get_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) - { --#if defined(PY_DEBUG) -+#if defined(Py_DEBUG) - return _PyPegen_get_memo_statistics(); - #else - Py_RETURN_NONE; -@@ -128,7 +128,7 @@ - static PyObject * - dump_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) - { --#if defined(PY_DEBUG) -+#if defined(Py_DEBUG) - PyObject *list = _PyPegen_get_memo_statistics(); - if (list == NULL) { - return NULL; -diff --git a/Tools/requirements-hypothesis.txt b/Tools/requirements-hypothesis.txt -index 9db2b74c87c..66898885c0a 100644 ---- a/Tools/requirements-hypothesis.txt -+++ b/Tools/requirements-hypothesis.txt -@@ -1,4 +1,4 @@ - # Requirements file for hypothesis that - # we use to run our property-based tests in CI. - --hypothesis==6.84.0 -+hypothesis==6.111.2 -diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py -index efc4b243f64..2cab972a5f8 100755 ---- a/Tools/ssl/multissltests.py -+++ b/Tools/ssl/multissltests.py -@@ -43,13 +43,14 @@ - log = logging.getLogger("multissl") - - OPENSSL_OLD_VERSIONS = [ -+ "1.1.1w", - ] - - OPENSSL_RECENT_VERSIONS = [ -- "1.1.1w", -- "3.0.13", -- "3.1.5", -- "3.2.1", -+ "3.0.15", -+ "3.1.7", -+ "3.2.3", -+ "3.3.2", - ] - - LIBRESSL_OLD_VERSIONS = [ -@@ -394,6 +395,7 @@ - class BuildOpenSSL(AbstractBuilder): - library = "OpenSSL" - url_templates = ( -+ "https://github.com/openssl/openssl/releases/download/openssl-{v}/openssl-{v}.tar.gz", - "https://www.openssl.org/source/openssl-{v}.tar.gz", - "https://www.openssl.org/source/old/{s}/openssl-{v}.tar.gz" - ) -@@ -436,6 +438,7 @@ - parsed = parsed[:2] - return ".".join(str(i) for i in parsed) - -+ - class BuildLibreSSL(AbstractBuilder): - library = "LibreSSL" - url_templates = ( diff --git a/config.sub b/config.sub index d74fb6deac9..1bb6a05dc11 100755 --- a/config.sub @@ -50480,7 +4769,7 @@ index d74fb6deac9..1bb6a05dc11 100755 # Local variables: diff --git a/configure b/configure -index 6dc8a66e487..97f6ed048ad 100755 +index 4dfaeecfc0b..97f6ed048ad 100755 --- a/configure +++ b/configure @@ -975,10 +975,14 @@ @@ -50766,9 +5055,22 @@ index 6dc8a66e487..97f6ed048ad 100755 - FRAMEWORKALTINSTALLLAST="frameworkinstallmaclib frameworkinstallapps frameworkaltinstallunixtools" - FRAMEWORKPYTHONW="frameworkpythonw" - FRAMEWORKINSTALLAPPSPREFIX="/Applications" - +- - if test "x${prefix}" = "xNONE" ; then - FRAMEWORKUNIXTOOLSPREFIX="${ac_default_prefix}" + +- else +- FRAMEWORKUNIXTOOLSPREFIX="${prefix}" +- fi +- +- case "${enableval}" in +- /System*) +- FRAMEWORKINSTALLAPPSPREFIX="/Applications" +- if test "${prefix}" = "NONE" ; then +- # See below +- FRAMEWORKUNIXTOOLSPREFIX="/usr" +- fi +- ;; + case $ac_sys_system in #( + Darwin) : + FRAMEWORKINSTALLFIRST="frameworkinstallversionedstructure" @@ -50786,9 +5088,9 @@ index 6dc8a66e487..97f6ed048ad 100755 + FRAMEWORKUNIXTOOLSPREFIX="${prefix}" + fi -- else -- FRAMEWORKUNIXTOOLSPREFIX="${prefix}" -- fi +- /Library*) +- FRAMEWORKINSTALLAPPSPREFIX="/Applications" +- ;; + case "${enableval}" in + /System*) + FRAMEWORKINSTALLAPPSPREFIX="/Applications" @@ -50818,19 +5120,6 @@ index 6dc8a66e487..97f6ed048ad 100755 + fi + ;; -- case "${enableval}" in -- /System*) -- FRAMEWORKINSTALLAPPSPREFIX="/Applications" -- if test "${prefix}" = "NONE" ; then -- # See below -- FRAMEWORKUNIXTOOLSPREFIX="/usr" -- fi -- ;; -- -- /Library*) -- FRAMEWORKINSTALLAPPSPREFIX="/Applications" -- ;; -- - */Library/Frameworks) - MDIR="`dirname "${enableval}"`" - MDIR="`dirname "${MDIR}"`" @@ -51508,92 +5797,18 @@ index 6dc8a66e487..97f6ed048ad 100755 # LIBRARY_DEPS, LINK_PYTHON_OBJS and LINK_PYTHON_DEPS variable case $ac_sys_system/$ac_sys_emscripten_target in #( Emscripten/browser*) : -@@ -9509,7 +9910,7 @@ - else $as_nop - - py_cflags=$CFLAGS -- as_fn_append CFLAGS "-Wextra -Werror" -+ as_fn_append CFLAGS " -Wextra -Werror" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - -@@ -9627,7 +10028,7 @@ - else $as_nop - - py_cflags=$CFLAGS -- as_fn_append CFLAGS "-Wunused-result -Werror" -+ as_fn_append CFLAGS " -Wunused-result -Werror" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - -@@ -9672,7 +10073,7 @@ - else $as_nop - - py_cflags=$CFLAGS -- as_fn_append CFLAGS "-Wunused-parameter -Werror" -+ as_fn_append CFLAGS " -Wunused-parameter -Werror" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - -@@ -9713,7 +10114,7 @@ - else $as_nop - - py_cflags=$CFLAGS -- as_fn_append CFLAGS "-Wint-conversion -Werror" -+ as_fn_append CFLAGS " -Wint-conversion -Werror" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - -@@ -9754,7 +10155,7 @@ - else $as_nop - - py_cflags=$CFLAGS -- as_fn_append CFLAGS "-Wmissing-field-initializers -Werror" -+ as_fn_append CFLAGS " -Wmissing-field-initializers -Werror" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - -@@ -9795,7 +10196,7 @@ - else $as_nop - - py_cflags=$CFLAGS -- as_fn_append CFLAGS "-Wsign-compare -Werror" -+ as_fn_append CFLAGS " -Wsign-compare -Werror" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - -@@ -9836,7 +10237,7 @@ - else $as_nop - - py_cflags=$CFLAGS -- as_fn_append CFLAGS "-Wunreachable-code -Werror" -+ as_fn_append CFLAGS " -Wunreachable-code -Werror" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - -@@ -9888,7 +10289,7 @@ - else $as_nop - - py_cflags=$CFLAGS -- as_fn_append CFLAGS "-Wstrict-prototypes -Werror" -+ as_fn_append CFLAGS " -Wstrict-prototypes -Werror" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - -@@ -12789,7 +13190,12 @@ +@@ -12789,6 +13190,11 @@ BLDSHARED="$LDSHARED" fi ;; -- Emscripten|WASI) + iOS/*|tvOS/*|watchOS/*) + LDSHARED='$(CC) -dynamiclib -F . -framework $(PYTHONFRAMEWORK)' + LDCXXSHARED='$(CXX) -dynamiclib -F . -framework $(PYTHONFRAMEWORK)' + BLDSHARED="$LDSHARED" + ;; -+ Emscripten*|WASI*) + Emscripten*|WASI*) LDSHARED='$(CC) -shared' LDCXXSHARED='$(CXX) -shared';; - Linux*|GNU*|QNX*|VxWorks*|Haiku*) @@ -12918,30 +13324,34 @@ Linux-android*) LINKFORSHARED="-pie -Xlinker -export-dynamic";; Linux*|GNU*) LINKFORSHARED="-Xlinker -export-dynamic";; @@ -52031,23 +6246,7 @@ index 6dc8a66e487..97f6ed048ad 100755 CYGWIN*) : -@@ -28646,9 +29111,15 @@ - - - py_cv_module__ctypes_test=n/a -+ py_cv_module__testexternalinspection=n/a -+ py_cv_module__testimportmultiple=n/a -+ py_cv_module__testmultiphase=n/a -+ py_cv_module__testsinglephase=n/a - py_cv_module_fcntl=n/a - py_cv_module_mmap=n/a - py_cv_module_termios=n/a -+ py_cv_module_xxlimited=n/a -+ py_cv_module_xxlimited_35=n/a - py_cv_module_=n/a - - -@@ -32326,6 +32797,9 @@ +@@ -32332,6 +32797,9 @@ "Mac/PythonLauncher/Makefile") CONFIG_FILES="$CONFIG_FILES Mac/PythonLauncher/Makefile" ;; "Mac/Resources/framework/Info.plist") CONFIG_FILES="$CONFIG_FILES Mac/Resources/framework/Info.plist" ;; "Mac/Resources/app/Info.plist") CONFIG_FILES="$CONFIG_FILES Mac/Resources/app/Info.plist" ;; @@ -52058,7 +6257,7 @@ index 6dc8a66e487..97f6ed048ad 100755 "Misc/python.pc") CONFIG_FILES="$CONFIG_FILES Misc/python.pc" ;; "Misc/python-embed.pc") CONFIG_FILES="$CONFIG_FILES Misc/python-embed.pc" ;; diff --git a/configure.ac b/configure.ac -index 8a32cb58f4e..6ecb9fb8e85 100644 +index 0d6df8e24e4..6ecb9fb8e85 100644 --- a/configure.ac +++ b/configure.ac @@ -307,6 +307,161 @@ @@ -53009,29 +7208,18 @@ index 8a32cb58f4e..6ecb9fb8e85 100644 # LIBRARY_DEPS, LINK_PYTHON_OBJS and LINK_PYTHON_DEPS variable AS_CASE([$ac_sys_system/$ac_sys_emscripten_target], [Emscripten/browser*], [LIBRARY_DEPS='$(PY3LIBRARY) $(WASM_STDLIB) python.html python.worker.js'], -@@ -2371,7 +2740,7 @@ - AS_VAR_PUSHDEF([py_var], [ac_cv_$1_]m4_normalize($2)[_warning]) - AC_CACHE_CHECK([m4_ifblank([$3], [if we can $1 $CC $2 warning], [$3])], [py_var], [ - AS_VAR_COPY([py_cflags], [CFLAGS]) -- AS_VAR_APPEND([CFLAGS], ["-W$2 -Werror"]) -+ AS_VAR_APPEND([CFLAGS], [" -W$2 -Werror"]) - AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[]])], - [AS_VAR_SET([py_var], [yes])], - [AS_VAR_SET([py_var], [no])]) -@@ -3356,7 +3725,12 @@ +@@ -3356,6 +3725,11 @@ BLDSHARED="$LDSHARED" fi ;; -- Emscripten|WASI) + iOS/*|tvOS/*|watchOS/*) + LDSHARED='$(CC) -dynamiclib -F . -framework $(PYTHONFRAMEWORK)' + LDCXXSHARED='$(CXX) -dynamiclib -F . -framework $(PYTHONFRAMEWORK)' + BLDSHARED="$LDSHARED" + ;; -+ Emscripten*|WASI*) + Emscripten*|WASI*) LDSHARED='$(CC) -shared' LDCXXSHARED='$(CXX) -shared';; - Linux*|GNU*|QNX*|VxWorks*|Haiku*) @@ -3476,30 +3850,34 @@ Linux-android*) LINKFORSHARED="-pie -Xlinker -export-dynamic";; Linux*|GNU*) LINKFORSHARED="-Xlinker -export-dynamic";; @@ -53331,26 +7519,6 @@ index 8a32cb58f4e..6ecb9fb8e85 100644 [CYGWIN*], [PY_STDLIB_MOD_SET_NA([_scproxy], [nis])], [QNX*], [PY_STDLIB_MOD_SET_NA([_scproxy], [nis])], [FreeBSD*], [PY_STDLIB_MOD_SET_NA([_scproxy], [spwd])], -@@ -7300,11 +7737,19 @@ - [Emscripten/node*], [], - [WASI/*], [ - dnl WASI SDK 15.0 does not support file locking, mmap, and more. -+ dnl Test modules that must be compiled as shared libraries are not supported -+ dnl (see Modules/Setup.stdlib.in). - PY_STDLIB_MOD_SET_NA( - [_ctypes_test], -+ [_testexternalinspection], -+ [_testimportmultiple], -+ [_testmultiphase], -+ [_testsinglephase], - [fcntl], - [mmap], - [termios], -+ [xxlimited], -+ [xxlimited_35], - ) - ] - ) --- /dev/null +++ b/iOS/README.rst @@ -0,0 +1,385 @@