diff --git a/.azure-pipelines/posix-steps.yml b/.azure-pipelines/posix-steps.yml index 29b43e0934472e..9d7c5e1279f46d 100644 --- a/.azure-pipelines/posix-steps.yml +++ b/.azure-pipelines/posix-steps.yml @@ -68,7 +68,7 @@ steps: - ${{ if eq(parameters.patchcheck, 'true') }}: - script: | git fetch origin - ./python Tools/scripts/patchcheck.py --ci true + ./python Tools/patchcheck/patchcheck.py --ci true displayName: 'Run patchcheck.py' condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest')) diff --git a/.gitattributes b/.gitattributes index d7e0ab2f36f5db..13289182400109 100644 --- a/.gitattributes +++ b/.gitattributes @@ -69,7 +69,7 @@ Doc/library/token-list.inc generated Include/internal/pycore_ast.h generated Include/internal/pycore_ast_state.h generated Include/internal/pycore_opcode.h generated -Include/internal/pycore_runtime_init_generated.h generated +Include/internal/pycore_*_generated.h generated Include/opcode.h generated Include/token.h generated Lib/keyword.py generated @@ -82,6 +82,7 @@ Parser/parser.c generated Parser/token.c generated Programs/test_frozenmain.h generated Python/Python-ast.c generated +Python/generated_cases.c.h generated Python/opcode_targets.h generated Python/stdlib_module_names.h generated Tools/peg_generator/pegen/grammar_parser.py generated diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 4c10bd3be1a8e2..5d30c0928e5ab8 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,8 +4,11 @@ # It uses the same pattern rule for gitignore file # https://git-scm.com/docs/gitignore#_pattern_format +# GitHub +.github/** @ezio-melotti + # asyncio -**/*asyncio* @1st1 @asvetlov @gvanrossum +**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303 # Core **/*context* @1st1 @@ -52,7 +55,7 @@ Python/traceback.c @iritkatriel /Lib/html/ @ezio-melotti /Lib/_markupbase.py @ezio-melotti /Lib/test/test_html*.py @ezio-melotti -/Tools/scripts/*html5* @ezio-melotti +/Tools/build/parse_html5_entities.py @ezio-melotti # Import (including importlib). # Ignoring importlib.h so as to not get flagged on @@ -60,7 +63,7 @@ Python/traceback.c @iritkatriel # bytecode. **/*import*.c @brettcannon @encukou @ericsnowcurrently @ncoghlan @warsaw **/*import*.py @brettcannon @encukou @ericsnowcurrently @ncoghlan @warsaw -**/*importlib/resources/* @jaraco @warsaw @brettcannon +**/*importlib/resources/* @jaraco @warsaw @brettcannon @FFY00 **/importlib/metadata/* @jaraco @warsaw # Dates and times @@ -132,10 +135,8 @@ Lib/ast.py @isidentical **/*idlelib* @terryjreedy -**/*typing* @gvanrossum @Fidget-Spinner @JelleZijlstra +**/*typing* @gvanrossum @Fidget-Spinner @JelleZijlstra @AlexWaygood -**/*asyncore @giampaolo -**/*asynchat @giampaolo **/*ftplib @giampaolo **/*shutil @giampaolo @@ -145,6 +146,8 @@ Lib/ast.py @isidentical **/*tomllib* @encukou +**/*sysconfig* @FFY00 + # macOS /Mac/ @python/macos-team **/*osx_support* @python/macos-team diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst index 627f57070d200b..2ef9cdc191481e 100644 --- a/.github/CONTRIBUTING.rst +++ b/.github/CONTRIBUTING.rst @@ -4,21 +4,9 @@ Contributing to Python Build Status ------------ -- main +- `Buildbot status overview `_ - + `Stable buildbots `_ - -- 3.9 - - + `Stable buildbots `_ - -- 3.8 - - + `Stable buildbots `_ - -- 3.7 - - + `Stable buildbots `_ +- `GitHub Actions status `_ Thank You @@ -38,7 +26,7 @@ also suggestions on how you can most effectively help the project. Please be aware that our workflow does deviate slightly from the typical GitHub project. Details on how to properly submit a pull request are covered in -`Lifecycle of a Pull Request `_. +`Lifecycle of a Pull Request `_. We utilize various bots and status checks to help with this, so do follow the comments they leave and their "Details" links, respectively. The key points of our workflow that are not covered by a bot or status check are: diff --git a/.github/SECURITY.md b/.github/SECURITY.md index 2aebc5a0bf717f..923720bce0bc3b 100644 --- a/.github/SECURITY.md +++ b/.github/SECURITY.md @@ -4,7 +4,7 @@ The Python team applies security fixes according to the table in [the devguide]( -https://devguide.python.org/#status-of-python-branches +https://devguide.python.org/versions/#supported-versions ). ## Reporting a Vulnerability diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1db5c505149397..a1bdfa0681e00f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,21 +25,23 @@ on: permissions: contents: read +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + jobs: check_source: name: 'Check for source changes' runs-on: ubuntu-latest outputs: run_tests: ${{ steps.check.outputs.run_tests }} - run_ssl_tests: ${{ steps.check.outputs.run_ssl_tests }} steps: - uses: actions/checkout@v3 - name: Check for source changes id: check run: | if [ -z "$GITHUB_BASE_REF" ]; then - echo '::set-output name=run_tests::true' - echo '::set-output name=run_ssl_tests::true' + echo "run_tests=true" >> $GITHUB_OUTPUT else git fetch origin $GITHUB_BASE_REF --depth=1 # git diff "origin/$GITHUB_BASE_REF..." (3 dots) may be more @@ -55,8 +57,7 @@ jobs: # into the PR branch anyway. # # https://github.com/python/core-workflow/issues/373 - git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qvE '(\.rst$|^Doc|^Misc)' && echo '::set-output name=run_tests::true' || true - git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qE '(ssl|hashlib|hmac|^.github)' && echo '::set-output name=run_ssl_tests::true' || true + git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qvE '(\.rst$|^Doc|^Misc)' && echo "run_tests=true" >> $GITHUB_OUTPUT || true fi check_generated_files: @@ -72,7 +73,7 @@ jobs: - name: Add ccache to PATH run: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV - name: Configure ccache action - uses: hendrikmuhs/ccache-action@v1 + uses: hendrikmuhs/ccache-action@v1.2 - name: Check Autoconf version 2.69 and aclocal 1.16.3 run: | grep "Generated by GNU Autoconf 2.69" configure @@ -175,7 +176,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: - OPENSSL_VER: 1.1.1q + OPENSSL_VER: 1.1.1s PYTHONSTRICTEXTENSIONBUILD: 1 steps: - uses: actions/checkout@v3 @@ -201,7 +202,7 @@ jobs: run: | echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV - name: Configure ccache action - uses: hendrikmuhs/ccache-action@v1 + uses: hendrikmuhs/ccache-action@v1.2 - name: Setup directory envs for out-of-tree builds run: | echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV @@ -230,11 +231,11 @@ jobs: name: 'Ubuntu SSL tests with OpenSSL' runs-on: ubuntu-20.04 needs: check_source - if: needs.check_source.outputs.run_tests == 'true' && needs.check_source.outputs.run_ssl_tests == 'true' + if: needs.check_source.outputs.run_tests == 'true' strategy: fail-fast: false matrix: - openssl_ver: [1.1.1q, 3.0.5] + openssl_ver: [1.1.1s, 3.0.7] env: OPENSSL_VER: ${{ matrix.openssl_ver }} MULTISSL_DIR: ${{ github.workspace }}/multissl @@ -264,7 +265,7 @@ jobs: run: | echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV - name: Configure ccache action - uses: hendrikmuhs/ccache-action@v1 + uses: hendrikmuhs/ccache-action@v1.2 - name: Configure CPython run: ./configure --with-pydebug --with-openssl=$OPENSSL_DIR - name: Build CPython @@ -281,7 +282,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: - OPENSSL_VER: 1.1.1q + OPENSSL_VER: 1.1.1s PYTHONSTRICTEXTENSIONBUILD: 1 ASAN_OPTIONS: detect_leaks=0:allocator_may_return_null=1:handle_segv=0 steps: @@ -308,7 +309,7 @@ jobs: run: | echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV - name: Configure ccache action - uses: hendrikmuhs/ccache-action@v1 + uses: hendrikmuhs/ccache-action@v1.2 - name: Configure CPython run: ./configure --with-address-sanitizer --without-pymalloc - name: Build CPython diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml index 528679c0ac6b37..5f1dcae190efbc 100644 --- a/.github/workflows/build_msi.yml +++ b/.github/workflows/build_msi.yml @@ -18,6 +18,10 @@ on: permissions: contents: read +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + jobs: build: name: Windows Installer diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index d95d089ed66755..44a1f206df1eb9 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -28,6 +28,10 @@ on: permissions: contents: read +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + jobs: build_doc: name: 'Docs' @@ -46,9 +50,14 @@ jobs: run: make -C Doc/ venv - name: 'Check documentation' run: make -C Doc/ check + - name: 'Upload NEWS' + uses: actions/upload-artifact@v3 + with: + name: NEWS + path: Doc/build/NEWS - name: 'Build HTML documentation' run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html - - name: 'Upload' + - name: 'Upload docs' uses: actions/upload-artifact@v3 with: name: doc-html diff --git a/.github/workflows/project-updater.yml b/.github/workflows/project-updater.yml index 77e55ed019b25a..99c7a05ae8cab0 100644 --- a/.github/workflows/project-updater.yml +++ b/.github/workflows/project-updater.yml @@ -6,6 +6,9 @@ on: - opened - labeled +permissions: + contents: read + jobs: add-to-project: name: Add issues to projects diff --git a/.github/workflows/verify-ensurepip-wheels.yml b/.github/workflows/verify-ensurepip-wheels.yml index 61e3d1adf534d5..969515ed287b55 100644 --- a/.github/workflows/verify-ensurepip-wheels.yml +++ b/.github/workflows/verify-ensurepip-wheels.yml @@ -6,16 +6,20 @@ on: paths: - 'Lib/ensurepip/_bundled/**' - '.github/workflows/verify-ensurepip-wheels.yml' - - 'Tools/scripts/verify_ensurepip_wheels.py' + - 'Tools/build/verify_ensurepip_wheels.py' pull_request: paths: - 'Lib/ensurepip/_bundled/**' - '.github/workflows/verify-ensurepip-wheels.yml' - - 'Tools/scripts/verify_ensurepip_wheels.py' + - 'Tools/build/verify_ensurepip_wheels.py' permissions: contents: read +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + jobs: verify: runs-on: ubuntu-latest @@ -25,4 +29,4 @@ jobs: with: python-version: '3' - name: Compare checksums of bundled pip and setuptools to ones published on PyPI - run: ./Tools/scripts/verify_ensurepip_wheels.py + run: ./Tools/build/verify_ensurepip_wheels.py diff --git a/.gitignore b/.gitignore index 924c136ba9aa05..d9c4a7972f076d 100644 --- a/.gitignore +++ b/.gitignore @@ -41,7 +41,6 @@ gmon.out .DS_Store *.exe -!Lib/distutils/command/*.exe # Ignore core dumps... but not Tools/msi/core/ or the like. core @@ -58,7 +57,6 @@ Doc/.venv/ Doc/env/ Doc/.env/ Include/pydtrace_probes.h -Lib/distutils/command/*.pdb Lib/lib2to3/*.pickle Lib/site-packages/* !Lib/site-packages/README.txt @@ -116,6 +114,7 @@ PCbuild/win32/ Tools/unicode/data/ /autom4te.cache /build/ +/builddir/ /config.cache /config.log /config.status @@ -143,7 +142,7 @@ Tools/ssl/win32 Tools/freeze/test/outdir # The frozen modules are always generated by the build so we don't -# keep them in the repo. Also see Tools/scripts/freeze_modules.py. +# keep them in the repo. Also see Tools/build/freeze_modules.py. Python/frozen_modules/*.h # The manifest can be generated at any time with "make regen-frozen". Python/frozen_modules/MANIFEST diff --git a/Doc/Makefile b/Doc/Makefile index 5b6a95813abee5..b09a9d754fb5aa 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -21,9 +21,9 @@ PAPEROPT_letter = -D latex_elements.papersize=letterpaper ALLSPHINXOPTS = -b $(BUILDER) -d build/doctrees $(PAPEROPT_$(PAPER)) -j auto \ $(SPHINXOPTS) $(SPHINXERRORHANDLING) . build/$(BUILDER) $(SOURCES) -.PHONY: help build html htmlhelp latex text texinfo changes linkcheck \ - suspicious coverage doctest pydoc-topics htmlview clean dist check serve \ - autobuild-dev autobuild-stable venv +.PHONY: help build html htmlhelp latex text texinfo epub changes linkcheck \ + coverage doctest pydoc-topics htmlview clean clean-venv venv dist check serve \ + autobuild-dev autobuild-dev-html autobuild-stable autobuild-stable-html help: @echo "Please use \`make ' where is one of" @@ -42,7 +42,6 @@ help: @echo " doctest to run doctests in the documentation" @echo " pydoc-topics to regenerate the pydoc topics file" @echo " dist to create a \"dist\" directory with archived docs for download" - @echo " suspicious to check for suspicious markup in output text" @echo " check to run a check for frequent markup errors" build: @@ -110,18 +109,6 @@ linkcheck: "or in build/$(BUILDER)/output.txt"; \ false; } -suspicious: BUILDER = suspicious -suspicious: - @$(MAKE) build BUILDER=$(BUILDER) || { \ - echo "Suspicious check complete; look for any errors in the above output" \ - "or in build/$(BUILDER)/suspicious.csv. If all issues are false" \ - "positives, append that file to tools/susp-ignored.csv."; \ - false; } - @echo "⚠ make suspicious is deprecated and will be removed soon." - @echo "⚠ Use:" - @echo "⚠ make check" - @echo "⚠ instead." - coverage: BUILDER = coverage coverage: build @echo "Coverage finished; see c.txt and python.txt in build/coverage" diff --git a/Doc/README.rst b/Doc/README.rst index d67cad79916b38..a3bb5fa5445c23 100644 --- a/Doc/README.rst +++ b/Doc/README.rst @@ -93,9 +93,6 @@ Available make targets are: plain text documentation for the labels defined in ``tools/pyspecific.py`` -- pydoc needs these to show topic and keyword help. -* "suspicious", which checks the parsed markup for text that looks like - malformed and thus unconverted reST. - * "check", which checks for frequent markup errors. * "serve", which serves the build/html directory on port 8000. diff --git a/Doc/c-api/buffer.rst b/Doc/c-api/buffer.rst index 05e131d06b909d..a04062fb2a68f1 100644 --- a/Doc/c-api/buffer.rst +++ b/Doc/c-api/buffer.rst @@ -99,7 +99,7 @@ a buffer, see :c:func:`PyObject_GetBuffer`. For :term:`contiguous` arrays, the value points to the beginning of the memory block. - .. c:member:: void *obj + .. c:member:: PyObject *obj A new reference to the exporting object. The reference is owned by the consumer and automatically decremented and set to ``NULL`` by diff --git a/Doc/c-api/call.rst b/Doc/c-api/call.rst index 6fb2e15196103d..4dc66e318cd12e 100644 --- a/Doc/c-api/call.rst +++ b/Doc/c-api/call.rst @@ -93,7 +93,7 @@ This is a pointer to a function with the following signature: and they must be unique. If there are no keyword arguments, then *kwnames* can instead be *NULL*. -.. c:macro:: PY_VECTORCALL_ARGUMENTS_OFFSET +.. data:: PY_VECTORCALL_ARGUMENTS_OFFSET If this flag is set in a vectorcall *nargsf* argument, the callee is allowed to temporarily change ``args[-1]``. In other words, *args* points to diff --git a/Doc/c-api/conversion.rst b/Doc/c-api/conversion.rst index 9b9c4ffa4d0343..fdb321fe7ab3f2 100644 --- a/Doc/c-api/conversion.rst +++ b/Doc/c-api/conversion.rst @@ -28,7 +28,8 @@ not. The wrappers ensure that ``str[size-1]`` is always ``'\0'`` upon return. They never write more than *size* bytes (including the trailing ``'\0'``) into str. Both functions require that ``str != NULL``, ``size > 0``, ``format != NULL`` -and ``size < INT_MAX``. +and ``size < INT_MAX``. Note that this means there is no equivalent to the C99 +``n = snprintf(NULL, 0, ...)`` which would determine the necessary buffer size. The return value (*rv*) for these functions should be interpreted as follows: diff --git a/Doc/c-api/dict.rst b/Doc/c-api/dict.rst index 819168d48707c1..e5f28b59a701e0 100644 --- a/Doc/c-api/dict.rst +++ b/Doc/c-api/dict.rst @@ -238,3 +238,73 @@ Dictionary Objects for key, value in seq2: if override or key not in a: a[key] = value + +.. c:function:: int PyDict_AddWatcher(PyDict_WatchCallback callback) + + Register *callback* as a dictionary watcher. Return a non-negative integer + id which must be passed to future calls to :c:func:`PyDict_Watch`. In case + of error (e.g. no more watcher IDs available), return ``-1`` and set an + exception. + + .. versionadded:: 3.12 + +.. c:function:: int PyDict_ClearWatcher(int watcher_id) + + Clear watcher identified by *watcher_id* previously returned from + :c:func:`PyDict_AddWatcher`. Return ``0`` on success, ``-1`` on error (e.g. + if the given *watcher_id* was never registered.) + + .. versionadded:: 3.12 + +.. c:function:: int PyDict_Watch(int watcher_id, PyObject *dict) + + Mark dictionary *dict* as watched. The callback granted *watcher_id* by + :c:func:`PyDict_AddWatcher` will be called when *dict* is modified or + deallocated. Return ``0`` on success or ``-1`` on error. + + .. versionadded:: 3.12 + +.. c:function:: int PyDict_Unwatch(int watcher_id, PyObject *dict) + + Mark dictionary *dict* as no longer watched. The callback granted + *watcher_id* by :c:func:`PyDict_AddWatcher` will no longer be called when + *dict* is modified or deallocated. The dict must previously have been + watched by this watcher. Return ``0`` on success or ``-1`` on error. + + .. versionadded:: 3.12 + +.. c:type:: PyDict_WatchEvent + + Enumeration of possible dictionary watcher events: ``PyDict_EVENT_ADDED``, + ``PyDict_EVENT_MODIFIED``, ``PyDict_EVENT_DELETED``, ``PyDict_EVENT_CLONED``, + ``PyDict_EVENT_CLEARED``, or ``PyDict_EVENT_DEALLOCATED``. + + .. versionadded:: 3.12 + +.. c:type:: int (*PyDict_WatchCallback)(PyDict_WatchEvent event, PyObject *dict, PyObject *key, PyObject *new_value) + + Type of a dict watcher callback function. + + If *event* is ``PyDict_EVENT_CLEARED`` or ``PyDict_EVENT_DEALLOCATED``, both + *key* and *new_value* will be ``NULL``. If *event* is ``PyDict_EVENT_ADDED`` + or ``PyDict_EVENT_MODIFIED``, *new_value* will be the new value for *key*. + If *event* is ``PyDict_EVENT_DELETED``, *key* is being deleted from the + dictionary and *new_value* will be ``NULL``. + + ``PyDict_EVENT_CLONED`` occurs when *dict* was previously empty and another + dict is merged into it. To maintain efficiency of this operation, per-key + ``PyDict_EVENT_ADDED`` events are not issued in this case; instead a + single ``PyDict_EVENT_CLONED`` is issued, and *key* will be the source + dictionary. + + The callback may inspect but must not modify *dict*; doing so could have + unpredictable effects, including infinite recursion. + + Callbacks occur before the notified modification to *dict* takes place, so + the prior state of *dict* can be inspected. + + If the callback returns with an exception set, it must return ``-1``; this + exception will be printed as an unraisable exception using + :c:func:`PyErr_WriteUnraisable`. Otherwise it should return ``0``. + + .. versionadded:: 3.12 diff --git a/Doc/c-api/frame.rst b/Doc/c-api/frame.rst index 46ce700abf1474..1ac8f03d6e48f8 100644 --- a/Doc/c-api/frame.rst +++ b/Doc/c-api/frame.rst @@ -19,6 +19,24 @@ can be used to get a frame object. See also :ref:`Reflection `. +.. c:var:: PyTypeObject PyFrame_Type + + The type of frame objects. + It is the same object as :py:class:`types.FrameType` in the Python layer. + + .. versionchanged:: 3.11 + + Previously, this type was only available after including + ````. + +.. c:function:: int PyFrame_Check(PyObject *obj) + + Return non-zero if *obj* is a frame object. + + .. versionchanged:: 3.11 + + Previously, this function was only available after including + ````. .. c:function:: PyFrameObject* PyFrame_GetBack(PyFrameObject *frame) @@ -79,6 +97,27 @@ See also :ref:`Reflection `. .. versionadded:: 3.11 +.. c:function:: PyObject* PyFrame_GetVar(PyFrameObject *frame, PyObject *name) + + Get the variable *name* of *frame*. + + * Return a :term:`strong reference` to the variable value on success. + * Raise :exc:`NameError` and return ``NULL`` if the variable does not exist. + * Raise an exception and return ``NULL`` on error. + + *name* type must be a :class:`str`. + + .. versionadded:: 3.12 + + +.. c:function:: PyObject* PyFrame_GetVarString(PyFrameObject *frame, const char *name) + + Similar to :c:func:`PyFrame_GetVar`, but the variable name is a C string + encoded in UTF-8. + + .. versionadded:: 3.12 + + .. c:function:: PyObject* PyFrame_GetLocals(PyFrameObject *frame) Get the *frame*'s ``f_locals`` attribute (:class:`dict`). diff --git a/Doc/c-api/function.rst b/Doc/c-api/function.rst index df88e85e518829..3cce18bdde3057 100644 --- a/Doc/c-api/function.rst +++ b/Doc/c-api/function.rst @@ -118,3 +118,63 @@ There are a few functions specific to Python functions. must be a dictionary or ``Py_None``. Raises :exc:`SystemError` and returns ``-1`` on failure. + + +.. c:function:: int PyFunction_AddWatcher(PyFunction_WatchCallback callback) + + Register *callback* as a function watcher for the current interpreter. + Return an ID which may be passed to :c:func:`PyFunction_ClearWatcher`. + In case of error (e.g. no more watcher IDs available), + return ``-1`` and set an exception. + + .. versionadded:: 3.12 + + +.. c:function:: int PyFunction_ClearWatcher(int watcher_id) + + Clear watcher identified by *watcher_id* previously returned from + :c:func:`PyFunction_AddWatcher` for the current interpreter. + Return ``0`` on success, or ``-1`` and set an exception on error + (e.g. if the given *watcher_id* was never registered.) + + .. versionadded:: 3.12 + + +.. c:type:: PyFunction_WatchEvent + + Enumeration of possible function watcher events: + - ``PyFunction_EVENT_CREATE`` + - ``PyFunction_EVENT_DESTROY`` + - ``PyFunction_EVENT_MODIFY_CODE`` + - ``PyFunction_EVENT_MODIFY_DEFAULTS`` + - ``PyFunction_EVENT_MODIFY_KWDEFAULTS`` + + .. versionadded:: 3.12 + + +.. c:type:: int (*PyFunction_WatchCallback)(PyFunction_WatchEvent event, PyFunctionObject *func, PyObject *new_value) + + Type of a function watcher callback function. + + If *event* is ``PyFunction_EVENT_CREATE`` or ``PyFunction_EVENT_DESTROY`` + then *new_value* will be ``NULL``. Otherwise, *new_value* will hold a + :term:`borrowed reference` to the new value that is about to be stored in + *func* for the attribute that is being modified. + + The callback may inspect but must not modify *func*; doing so could have + unpredictable effects, including infinite recursion. + + If *event* is ``PyFunction_EVENT_CREATE``, then the callback is invoked + after `func` has been fully initialized. Otherwise, the callback is invoked + before the modification to *func* takes place, so the prior state of *func* + can be inspected. The runtime is permitted to optimize away the creation of + function objects when possible. In such cases no event will be emitted. + Although this creates the possitibility of an observable difference of + runtime behavior depending on optimization decisions, it does not change + the semantics of the Python code being executed. + + If the callback returns with an exception set, it must return ``-1``; this + exception will be printed as an unraisable exception using + :c:func:`PyErr_WriteUnraisable`. Otherwise it should return ``0``. + + .. versionadded:: 3.12 diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst index 0922956c607bc3..a51619db6d3d97 100644 --- a/Doc/c-api/import.rst +++ b/Doc/c-api/import.rst @@ -150,6 +150,11 @@ Importing Modules See also :c:func:`PyImport_ExecCodeModuleEx` and :c:func:`PyImport_ExecCodeModuleWithPathnames`. + .. versionchanged:: 3.12 + The setting of :attr:`__cached__` and :attr:`__loader__` is + deprecated. See :class:`~importlib.machinery.ModuleSpec` for + alternatives. + .. c:function:: PyObject* PyImport_ExecCodeModuleEx(const char *name, PyObject *co, const char *pathname) @@ -167,6 +172,10 @@ Importing Modules .. versionadded:: 3.3 + .. versionchanged:: 3.12 + Setting :attr:`__cached__` is deprecated. See + :class:`~importlib.machinery.ModuleSpec` for alternatives. + .. c:function:: PyObject* PyImport_ExecCodeModuleWithPathnames(const char *name, PyObject *co, const char *pathname, const char *cpathname) diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 513ef93a384202..afb17719a77ab2 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -1929,11 +1929,11 @@ is not possible due to its implementation being opaque at build time. Free the given *key* allocated by :c:func:`PyThread_tss_alloc`, after first calling :c:func:`PyThread_tss_delete` to ensure any associated thread locals have been unassigned. This is a no-op if the *key* - argument is `NULL`. + argument is ``NULL``. .. note:: A freed key becomes a dangling pointer. You should reset the key to - `NULL`. + ``NULL``. Methods diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index ea76315fc09b62..c3346b0421ddef 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -254,7 +254,7 @@ PyPreConfig .. c:member:: int configure_locale - Set the LC_CTYPE locale to the user preferred locale? + Set the LC_CTYPE locale to the user preferred locale. If equals to ``0``, set :c:member:`~PyPreConfig.coerce_c_locale` and :c:member:`~PyPreConfig.coerce_c_locale_warn` members to ``0``. @@ -1571,8 +1571,6 @@ Private provisional API: * :c:member:`PyConfig._init_main`: if set to ``0``, :c:func:`Py_InitializeFromConfig` stops at the "Core" initialization phase. -* :c:member:`PyConfig._isolated_interpreter`: if non-zero, - disallow threads, subprocesses and fork. .. c:function:: PyStatus _Py_InitializeMain(void) diff --git a/Doc/c-api/memory.rst b/Doc/c-api/memory.rst index f726cd48663b1c..7041c15d23fb83 100644 --- a/Doc/c-api/memory.rst +++ b/Doc/c-api/memory.rst @@ -95,6 +95,8 @@ for the I/O buffer escapes completely the Python memory manager. Allocator Domains ================= +.. _allocator-domains: + All allocating functions belong to one of three different "domains" (see also :c:type:`PyMemAllocatorDomain`). These domains represent different allocation strategies and are optimized for different purposes. The specific details on @@ -479,6 +481,25 @@ Customize Memory Allocators See also :c:member:`PyPreConfig.allocator` and :ref:`Preinitialize Python with PyPreConfig `. + .. warning:: + + :c:func:`PyMem_SetAllocator` does have the following contract: + + * It can be called after :c:func:`Py_PreInitialize` and before + :c:func:`Py_InitializeFromConfig` to install a custom memory + allocator. There are no restrictions over the installed allocator + other than the ones imposed by the domain (for instance, the Raw + Domain allows the allocator to be called without the GIL held). See + :ref:`the section on allocator domains ` for more + information. + + * If called after Python has finish initializing (after + :c:func:`Py_InitializeFromConfig` has been called) the allocator + **must** wrap the existing allocator. Substituting the current + allocator for some other arbitrary one is **not supported**. + + + .. c:function:: void PyMem_SetupDebugHooks(void) Setup :ref:`debug hooks in the Python memory allocators ` diff --git a/Doc/c-api/memoryview.rst b/Doc/c-api/memoryview.rst index 4d94b3f545f327..ebd5c7760437bf 100644 --- a/Doc/c-api/memoryview.rst +++ b/Doc/c-api/memoryview.rst @@ -55,7 +55,7 @@ any other object. *mview* **must** be a memoryview instance; this macro doesn't check its type, you must do it yourself or you will risk crashes. -.. c:function:: Py_buffer *PyMemoryView_GET_BASE(PyObject *mview) +.. c:function:: PyObject *PyMemoryView_GET_BASE(PyObject *mview) Return either a pointer to the exporting object that the memoryview is based on or ``NULL`` if the memoryview has been created by one of the functions diff --git a/Doc/c-api/refcounting.rst b/Doc/c-api/refcounting.rst index 1cff4e7215cf49..cd1f2ef7076836 100644 --- a/Doc/c-api/refcounting.rst +++ b/Doc/c-api/refcounting.rst @@ -11,6 +11,26 @@ The macros in this section are used for managing reference counts of Python objects. +.. c:function:: Py_ssize_t Py_REFCNT(PyObject *o) + + Get the reference count of the Python object *o*. + + Use the :c:func:`Py_SET_REFCNT()` function to set an object reference count. + + .. versionchanged:: 3.11 + The parameter type is no longer :c:expr:`const PyObject*`. + + .. versionchanged:: 3.10 + :c:func:`Py_REFCNT()` is changed to the inline static function. + + +.. c:function:: void Py_SET_REFCNT(PyObject *o, Py_ssize_t refcnt) + + Set the object *o* reference counter to *refcnt*. + + .. versionadded:: 3.9 + + .. c:function:: void Py_INCREF(PyObject *o) Increment the reference count for object *o*. diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 76803a093353b5..827d624fc99edb 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -17,7 +17,8 @@ All Python objects ultimately share a small number of fields at the beginning of the object's representation in memory. These are represented by the :c:type:`PyObject` and :c:type:`PyVarObject` types, which are defined, in turn, by the expansions of some macros also used, whether directly or indirectly, in -the definition of all other Python objects. +the definition of all other Python objects. Additional macros can be found +under :ref:`reference counting `. .. c:type:: PyObject @@ -121,26 +122,6 @@ the definition of all other Python objects. .. versionadded:: 3.9 -.. c:function:: Py_ssize_t Py_REFCNT(PyObject *o) - - Get the reference count of the Python object *o*. - - Use the :c:func:`Py_SET_REFCNT()` function to set an object reference count. - - .. versionchanged:: 3.11 - The parameter type is no longer :c:expr:`const PyObject*`. - - .. versionchanged:: 3.10 - :c:func:`Py_REFCNT()` is changed to the inline static function. - - -.. c:function:: void Py_SET_REFCNT(PyObject *o, Py_ssize_t refcnt) - - Set the object *o* reference counter to *refcnt*. - - .. versionadded:: 3.9 - - .. c:function:: Py_ssize_t Py_SIZE(PyVarObject *o) Get the size of the Python object *o*. @@ -404,86 +385,67 @@ Accessing attributes of extension types .. c:type:: PyMemberDef Structure which describes an attribute of a type which corresponds to a C - struct member. Its fields are: + struct member. Its fields are, in order: - +------------------+---------------+-------------------------------+ - | Field | C Type | Meaning | - +==================+===============+===============================+ - | :attr:`name` | const char \* | name of the member | - +------------------+---------------+-------------------------------+ - | :attr:`!type` | int | the type of the member in the | - | | | C struct | - +------------------+---------------+-------------------------------+ - | :attr:`offset` | Py_ssize_t | the offset in bytes that the | - | | | member is located on the | - | | | type's object struct | - +------------------+---------------+-------------------------------+ - | :attr:`flags` | int | flag bits indicating if the | - | | | field should be read-only or | - | | | writable | - +------------------+---------------+-------------------------------+ - | :attr:`doc` | const char \* | points to the contents of the | - | | | docstring | - +------------------+---------------+-------------------------------+ + .. c:member:: const char* name - :attr:`!type` can be one of many ``T_`` macros corresponding to various C - types. When the member is accessed in Python, it will be converted to the - equivalent Python type. - - =============== ================== - Macro name C type - =============== ================== - T_SHORT short - T_INT int - T_LONG long - T_FLOAT float - T_DOUBLE double - T_STRING const char \* - T_OBJECT PyObject \* - T_OBJECT_EX PyObject \* - T_CHAR char - T_BYTE char - T_UBYTE unsigned char - T_UINT unsigned int - T_USHORT unsigned short - T_ULONG unsigned long - T_BOOL char - T_LONGLONG long long - T_ULONGLONG unsigned long long - T_PYSSIZET Py_ssize_t - =============== ================== - - :c:macro:`T_OBJECT` and :c:macro:`T_OBJECT_EX` differ in that - :c:macro:`T_OBJECT` returns ``None`` if the member is ``NULL`` and - :c:macro:`T_OBJECT_EX` raises an :exc:`AttributeError`. Try to use - :c:macro:`T_OBJECT_EX` over :c:macro:`T_OBJECT` because :c:macro:`T_OBJECT_EX` - handles use of the :keyword:`del` statement on that attribute more correctly - than :c:macro:`T_OBJECT`. - - :attr:`flags` can be ``0`` for write and read access or :c:macro:`READONLY` for - read-only access. Using :c:macro:`T_STRING` for :attr:`type` implies - :c:macro:`READONLY`. :c:macro:`T_STRING` data is interpreted as UTF-8. - Only :c:macro:`T_OBJECT` and :c:macro:`T_OBJECT_EX` - members can be deleted. (They are set to ``NULL``). + Name of the member. + A NULL value marks the end of a ``PyMemberDef[]`` array. + + The string should be static, no copy is made of it. + + .. c:member:: Py_ssize_t PyMemberDef.offset + + The offset in bytes that the member is located on the type’s object struct. + + .. c:member:: int type + + The type of the member in the C struct. + See :ref:`PyMemberDef-types` for the possible values. + + .. c:member:: int flags + + Zero or more of the :ref:`PyMemberDef-flags`, combined using bitwise OR. + + .. c:member:: const char* doc + + The docstring, or NULL. + The string should be static, no copy is made of it. + Typically, it is defined using :c:macro:`PyDoc_STR`. + + By default (when :c:member:`flags` is ``0``), members allow + both read and write access. + Use the :c:macro:`Py_READONLY` flag for read-only access. + Certain types, like :c:macro:`Py_T_STRING`, imply :c:macro:`Py_READONLY`. + Only :c:macro:`Py_T_OBJECT_EX` (and legacy :c:macro:`T_OBJECT`) members can + be deleted. .. _pymemberdef-offsets: - Heap allocated types (created using :c:func:`PyType_FromSpec` or similar), - ``PyMemberDef`` may contain definitions for the special member - ``__vectorcalloffset__``, corresponding to + For heap-allocated types (created using :c:func:`PyType_FromSpec` or similar), + ``PyMemberDef`` may contain a definition for the special member + ``"__vectorcalloffset__"``, corresponding to :c:member:`~PyTypeObject.tp_vectorcall_offset` in type objects. - These must be defined with ``T_PYSSIZET`` and ``READONLY``, for example:: + These must be defined with ``Py_T_PYSSIZET`` and ``Py_READONLY``, for example:: static PyMemberDef spam_type_members[] = { - {"__vectorcalloffset__", T_PYSSIZET, offsetof(Spam_object, vectorcall), READONLY}, + {"__vectorcalloffset__", Py_T_PYSSIZET, + offsetof(Spam_object, vectorcall), Py_READONLY}, {NULL} /* Sentinel */ }; + (You may need to ``#include `` for :c:func:`!offsetof`.) + The legacy offsets :c:member:`~PyTypeObject.tp_dictoffset` and - :c:member:`~PyTypeObject.tp_weaklistoffset` are still supported, but extensions are - strongly encouraged to use ``Py_TPFLAGS_MANAGED_DICT`` and - ``Py_TPFLAGS_MANAGED_WEAKREF`` instead. + :c:member:`~PyTypeObject.tp_weaklistoffset` can be defined similarly using + ``"__dictoffset__"`` and ``"__weaklistoffset__"`` members, but extensions + are strongly encouraged to use :const:`Py_TPFLAGS_MANAGED_DICT` and + :const:`Py_TPFLAGS_MANAGED_WEAKREF` instead. + + .. versionchanged:: 3.12 + ``PyMemberDef`` is always available. + Previously, it required including ``"structmember.h"``. .. c:function:: PyObject* PyMember_GetOne(const char *obj_addr, struct PyMemberDef *m) @@ -491,6 +453,10 @@ Accessing attributes of extension types attribute is described by ``PyMemberDef`` *m*. Returns ``NULL`` on error. + .. versionchanged:: 3.12 + + ``PyMember_GetOne`` is always available. + Previously, it required including ``"structmember.h"``. .. c:function:: int PyMember_SetOne(char *obj_addr, struct PyMemberDef *m, PyObject *o) @@ -498,29 +464,169 @@ Accessing attributes of extension types The attribute to set is described by ``PyMemberDef`` *m*. Returns ``0`` if successful and a negative value on failure. + .. versionchanged:: 3.12 + + ``PyMember_SetOne`` is always available. + Previously, it required including ``"structmember.h"``. + +.. _PyMemberDef-flags: + +Member flags +^^^^^^^^^^^^ + +The following flags can be used with :c:member:`PyMemberDef.flags`: + +.. c:macro:: Py_READONLY + + Not writable. + +.. c:macro:: Py_AUDIT_READ + + Emit an ``object.__getattr__`` :ref:`audit event ` + before reading. + +.. index:: + single: READ_RESTRICTED + single: WRITE_RESTRICTED + single: RESTRICTED + +.. versionchanged:: 3.10 + + The :const:`!RESTRICTED`, :const:`!READ_RESTRICTED` and + :const:`!WRITE_RESTRICTED` macros available with + ``#include "structmember.h"`` are deprecated. + :const:`!READ_RESTRICTED` and :const:`!RESTRICTED` are equivalent to + :const:`Py_AUDIT_READ`; :const:`!WRITE_RESTRICTED` does nothing. + +.. index:: + single: READONLY + +.. versionchanged:: 3.12 + + The :const:`!READONLY` macro was renamed to :const:`Py_READONLY`. + The :const:`!PY_AUDIT_READ` macro was renamed with the ``Py_`` prefix. + The new names are now always available. + Previously, these required ``#include "structmember.h"``. + The header is still available and it provides the old names. + +.. _PyMemberDef-types: + +Member types +^^^^^^^^^^^^ + +:c:member:`PyMemberDef.type` can be one of the following macros corresponding +to various C types. +When the member is accessed in Python, it will be converted to the +equivalent Python type. +When it is set from Python, it will be converted back to the C type. +If that is not possible, an exception such as :exc:`TypeError` or +:exc:`ValueError` is raised. + +Unless marked (D), attributes defined this way cannot be deleted +using e.g. :keyword:`del` or :py:func:`delattr`. + +================================ ============================= ====================== +Macro name C type Python type +================================ ============================= ====================== +.. c:macro:: Py_T_BYTE :c:expr:`char` :py:class:`int` +.. c:macro:: Py_T_SHORT :c:expr:`short` :py:class:`int` +.. c:macro:: Py_T_INT :c:expr:`int` :py:class:`int` +.. c:macro:: Py_T_LONG :c:expr:`long` :py:class:`int` +.. c:macro:: Py_T_LONGLONG :c:expr:`long long` :py:class:`int` +.. c:macro:: Py_T_UBYTE :c:expr:`unsigned char` :py:class:`int` +.. c:macro:: Py_T_UINT :c:expr:`unsigned int` :py:class:`int` +.. c:macro:: Py_T_USHORT :c:expr:`unsigned short` :py:class:`int` +.. c:macro:: Py_T_ULONG :c:expr:`unsigned long` :py:class:`int` +.. c:macro:: Py_T_ULONGLONG :c:expr:`unsigned long long` :py:class:`int` +.. c:macro:: Py_T_PYSSIZET :c:expr:`Py_ssize_t` :py:class:`int` +.. c:macro:: Py_T_FLOAT :c:expr:`float` :py:class:`float` +.. c:macro:: Py_T_DOUBLE :c:expr:`double` :py:class:`float` +.. c:macro:: Py_T_BOOL :c:expr:`char` :py:class:`bool` + (written as 0 or 1) +.. c:macro:: Py_T_STRING :c:expr:`const char *` (*) :py:class:`str` (RO) +.. c:macro:: Py_T_STRING_INPLACE :c:expr:`const char[]` (*) :py:class:`str` (RO) +.. c:macro:: Py_T_CHAR :c:expr:`char` (0-127) :py:class:`str` (**) +.. c:macro:: Py_T_OBJECT_EX :c:expr:`PyObject *` :py:class:`object` (D) +================================ ============================= ====================== + + (*): Zero-terminated, UTF8-encoded C string. + With :c:macro:`!Py_T_STRING` the C representation is a pointer; + with :c:macro:`!Py_T_STRING_INLINE` the string is stored directly + in the structure. + + (**): String of length 1. Only ASCII is accepted. + + (RO): Implies :c:macro:`Py_READONLY`. + + (D): Can be deleted, in which case the pointer is set to ``NULL``. + Reading a ``NULL`` pointer raises :py:exc:`AttributeError`. + +.. index:: + single: T_BYTE + single: T_SHORT + single: T_INT + single: T_LONG + single: T_LONGLONG + single: T_UBYTE + single: T_USHORT + single: T_UINT + single: T_ULONG + single: T_ULONGULONG + single: T_PYSSIZET + single: T_FLOAT + single: T_DOUBLE + single: T_BOOL + single: T_CHAR + single: T_STRING + single: T_STRING_INPLACE + single: T_OBJECT_EX + single: structmember.h + +.. versionadded:: 3.12 + + In previous versions, the macros were only available with + ``#include "structmember.h"`` and were named without the ``Py_`` prefix + (e.g. as ``T_INT``). + The header is still available and contains the old names, along with + the following deprecated types: + + .. c:macro:: T_OBJECT + + Like ``Py_T_OBJECT_EX``, but ``NULL`` is converted to ``None``. + This results in surprising behavior in Python: deleting the attribute + effectively sets it to ``None``. + + .. c:macro:: T_NONE + + Always ``None``. Must be used with :c:macro:`Py_READONLY`. + +Defining Getters and Setters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. c:type:: PyGetSetDef Structure to define property-like access for a type. See also description of the :c:member:`PyTypeObject.tp_getset` slot. - +-------------+------------------+-----------------------------------+ - | Field | C Type | Meaning | - +=============+==================+===================================+ - | name | const char \* | attribute name | - +-------------+------------------+-----------------------------------+ - | get | getter | C function to get the attribute | - +-------------+------------------+-----------------------------------+ - | set | setter | optional C function to set or | - | | | delete the attribute, if omitted | - | | | the attribute is readonly | - +-------------+------------------+-----------------------------------+ - | doc | const char \* | optional docstring | - +-------------+------------------+-----------------------------------+ - | closure | void \* | optional function pointer, | - | | | providing additional data for | - | | | getter and setter | - +-------------+------------------+-----------------------------------+ + .. c:member:: const char* PyGetSetDef.name + + attribute name + + .. c:member:: getter PyGetSetDef.get + + C funtion to get the attribute. + + .. c:member:: setter PyGetSetDef.set + + Optional C function to set or delete the attribute, if omitted the attribute is readonly. + + .. c:member:: const char* PyGetSetDef.doc + + optional docstring + + .. c:member:: void* PyGetSetDef.closure + + Optional function pointer, providing additional data for getter and setter. The ``get`` function takes one :c:expr:`PyObject*` parameter (the instance) and a function pointer (the associated ``closure``):: diff --git a/Doc/c-api/tuple.rst b/Doc/c-api/tuple.rst index 0bfd4b308d9372..5acddf7849aa33 100644 --- a/Doc/c-api/tuple.rst +++ b/Doc/c-api/tuple.rst @@ -69,7 +69,7 @@ Tuple Objects Return the slice of the tuple pointed to by *p* between *low* and *high*, or ``NULL`` on failure. This is the equivalent of the Python expression - ``p[low:high]``. Indexing from the end of the list is not supported. + ``p[low:high]``. Indexing from the end of the tuple is not supported. .. c:function:: int PyTuple_SetItem(PyObject *p, Py_ssize_t pos, PyObject *o) diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index deb5502a4dff9e..7b5d1fac40ed87 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -40,7 +40,7 @@ Type Objects .. c:function:: unsigned long PyType_GetFlags(PyTypeObject* type) Return the :c:member:`~PyTypeObject.tp_flags` member of *type*. This function is primarily - meant for use with `Py_LIMITED_API`; the individual flag bits are + meant for use with ``Py_LIMITED_API``; the individual flag bits are guaranteed to be stable across Python releases, but access to :c:member:`~PyTypeObject.tp_flags` itself is not part of the limited API. @@ -57,6 +57,55 @@ Type Objects modification of the attributes or base classes of the type. +.. c:function:: int PyType_AddWatcher(PyType_WatchCallback callback) + + Register *callback* as a type watcher. Return a non-negative integer ID + which must be passed to future calls to :c:func:`PyType_Watch`. In case of + error (e.g. no more watcher IDs available), return ``-1`` and set an + exception. + + .. versionadded:: 3.12 + + +.. c:function:: int PyType_ClearWatcher(int watcher_id) + + Clear watcher identified by *watcher_id* (previously returned from + :c:func:`PyType_AddWatcher`). Return ``0`` on success, ``-1`` on error (e.g. + if *watcher_id* was never registered.) + + An extension should never call ``PyType_ClearWatcher`` with a *watcher_id* + that was not returned to it by a previous call to + :c:func:`PyType_AddWatcher`. + + .. versionadded:: 3.12 + + +.. c:function:: int PyType_Watch(int watcher_id, PyObject *type) + + Mark *type* as watched. The callback granted *watcher_id* by + :c:func:`PyType_AddWatcher` will be called whenever + :c:func:`PyType_Modified` reports a change to *type*. (The callback may be + called only once for a series of consecutive modifications to *type*, if + :c:func:`PyType_Lookup` is not called on *type* between the modifications; + this is an implementation detail and subject to change.) + + An extension should never call ``PyType_Watch`` with a *watcher_id* that was + not returned to it by a previous call to :c:func:`PyType_AddWatcher`. + + .. versionadded:: 3.12 + + +.. c:type:: int (*PyType_WatchCallback)(PyObject *type) + + Type of a type-watcher callback function. + + The callback must not modify *type* or cause :c:func:`PyType_Modified` to be + called on *type* or any type in its MRO; violating this rule could cause + infinite recursion. + + .. versionadded:: 3.12 + + .. c:function:: int PyType_HasFeature(PyTypeObject *o, int feature) Return non-zero if the type object *o* sets the feature *feature*. diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index 8ccdece3efc54c..4c462f46056739 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -1245,6 +1245,17 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** This flag is not inherited. + However, subclasses will not be instantiable unless they provide a + non-NULL :c:member:`~PyTypeObject.tp_new` (which is only possible + via the C API). + + .. note:: + + To disallow instantiating a class directly but allow instantiating + its subclasses (e.g. for an :term:`abstract base class`), + do not use this flag. + Instead, make :c:member:`~PyTypeObject.tp_new` only succeed for + subclasses. .. versionadded:: 3.10 diff --git a/Doc/conf.py b/Doc/conf.py index 909f992d9d821a..6fad5c668dab31 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -197,7 +197,6 @@ coverage_ignore_modules = [ r'[T|t][k|K]', r'Tix', - r'distutils.*', ] coverage_ignore_functions = [ @@ -239,28 +238,3 @@ # Relative filename of the data files refcount_file = 'data/refcounts.dat' stable_abi_file = 'data/stable_abi.dat' - -# Sphinx 2 and Sphinx 3 compatibility -# ----------------------------------- - -# bpo-40204: Allow Sphinx 2 syntax in the C domain -c_allow_pre_v3 = True - -# bpo-40204: Disable warnings on Sphinx 2 syntax of the C domain since the -# documentation is built with -W (warnings treated as errors). -c_warn_on_allowed_pre_v3 = False - -# Fix '!' not working with C domain when pre_v3 is enabled -import sphinx - -if sphinx.version_info[:2] < (5, 3): - from sphinx.domains.c import CXRefRole - - original_run = CXRefRole.run - - def new_run(self): - if self.disabled: - return super(CXRefRole, self).run() - return original_run(self) - - CXRefRole.run = new_run diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat index fde62eacd00a7c..53895bbced8408 100644 --- a/Doc/data/stable_abi.dat +++ b/Doc/data/stable_abi.dat @@ -1,4 +1,5 @@ role,name,added,ifdef_note,struct_abi_kind +macro,PY_VECTORCALL_ARGUMENTS_OFFSET,3.12,, function,PyAIter_Check,3.10,, function,PyArg_Parse,3.2,, function,PyArg_ParseTuple,3.2,, @@ -385,6 +386,8 @@ function,PyMem_Malloc,3.2,, function,PyMem_Realloc,3.2,, type,PyMemberDef,3.2,,full-abi var,PyMemberDescr_Type,3.2,, +function,PyMember_GetOne,3.2,, +function,PyMember_SetOne,3.2,, function,PyMemoryView_FromBuffer,3.11,, function,PyMemoryView_FromMemory,3.7,, function,PyMemoryView_FromObject,3.2,, @@ -536,6 +539,8 @@ function,PyObject_SetItem,3.2,, function,PyObject_Size,3.2,, function,PyObject_Str,3.2,, function,PyObject_Type,3.2,, +function,PyObject_Vectorcall,3.12,, +function,PyObject_VectorcallMethod,3.12,, var,PyProperty_Type,3.2,, var,PyRangeIter_Type,3.2,, var,PyRange_Type,3.2,, @@ -862,6 +867,7 @@ type,descrsetfunc,3.2,, type,destructor,3.2,, type,getattrfunc,3.2,, type,getattrofunc,3.2,, +type,getbufferproc,3.12,, type,getiterfunc,3.2,, type,getter,3.2,, type,hashfunc,3.2,, @@ -872,6 +878,7 @@ type,lenfunc,3.2,, type,newfunc,3.2,, type,objobjargproc,3.2,, type,objobjproc,3.2,, +type,releasebufferproc,3.12,, type,reprfunc,3.2,, type,richcmpfunc,3.2,, type,setattrfunc,3.2,, diff --git a/Doc/extending/embedding.rst b/Doc/extending/embedding.rst index 5f5abdf9c15067..e64db373344038 100644 --- a/Doc/extending/embedding.rst +++ b/Doc/extending/embedding.rst @@ -298,16 +298,16 @@ be directly useful to you: .. code-block:: shell-session - $ /opt/bin/python3.4-config --cflags - -I/opt/include/python3.4m -I/opt/include/python3.4m -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes + $ /opt/bin/python3.11-config --cflags + -I/opt/include/python3.11 -I/opt/include/python3.11 -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -* ``pythonX.Y-config --ldflags`` will give you the recommended flags when - linking: +* ``pythonX.Y-config --ldflags --embed`` will give you the recommended flags + when linking: .. code-block:: shell-session - $ /opt/bin/python3.4-config --ldflags - -L/opt/lib/python3.4/config-3.4m -lpthread -ldl -lutil -lm -lpython3.4m -Xlinker -export-dynamic + $ /opt/bin/python3.11-config --ldflags --embed + -L/opt/lib/python3.11/config-3.11-x86_64-linux-gnu -L/opt/lib -lpython3.11 -lpthread -ldl -lutil -lm .. note:: To avoid confusion between several Python installations (and especially diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst index a076eae534b91e..80a1387db200c2 100644 --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -208,7 +208,7 @@ a special case, for which the new value passed to the handler is ``NULL``. Python supports two pairs of attribute handlers; a type that supports attributes only needs to implement the functions for one pair. The difference is that one pair takes the name of the attribute as a :c:expr:`char\*`, while the other -accepts a :c:type:`PyObject\*`. Each type can use whichever pair makes more +accepts a :c:expr:`PyObject*`. Each type can use whichever pair makes more sense for the implementation's convenience. :: getattrfunc tp_getattr; /* char * version */ @@ -219,7 +219,7 @@ sense for the implementation's convenience. :: If accessing attributes of an object is always a simple operation (this will be explained shortly), there are generic implementations which can be used to -provide the :c:type:`PyObject\*` version of the attribute management functions. +provide the :c:expr:`PyObject*` version of the attribute management functions. The actual need for type-specific attribute handlers almost completely disappeared starting with Python 2.2, though there are many examples which have not been updated to use some of the new generic mechanism that is available. @@ -286,36 +286,11 @@ be read-only or read-write. The structures in the table are defined as:: For each entry in the table, a :term:`descriptor` will be constructed and added to the type which will be able to extract a value from the instance structure. The -:attr:`type` field should contain one of the type codes defined in the -:file:`structmember.h` header; the value will be used to determine how to +:attr:`type` field should contain a type code like :c:macro:`Py_T_INT` or +:c:macro:`Py_T_DOUBLE`; the value will be used to determine how to convert Python values to and from C values. The :attr:`flags` field is used to -store flags which control how the attribute can be accessed. - -The following flag constants are defined in :file:`structmember.h`; they may be -combined using bitwise-OR. - -+---------------------------+----------------------------------------------+ -| Constant | Meaning | -+===========================+==============================================+ -| :const:`READONLY` | Never writable. | -+---------------------------+----------------------------------------------+ -| :const:`PY_AUDIT_READ` | Emit an ``object.__getattr__`` | -| | :ref:`audit events ` before | -| | reading. | -+---------------------------+----------------------------------------------+ - -.. versionchanged:: 3.10 - :const:`RESTRICTED`, :const:`READ_RESTRICTED` and :const:`WRITE_RESTRICTED` - are deprecated. However, :const:`READ_RESTRICTED` is an alias for - :const:`PY_AUDIT_READ`, so fields that specify either :const:`RESTRICTED` - or :const:`READ_RESTRICTED` will also raise an audit event. - -.. index:: - single: READONLY - single: READ_RESTRICTED - single: WRITE_RESTRICTED - single: RESTRICTED - single: PY_AUDIT_READ +store flags which control how the attribute can be accessed: you can set it to +:c:macro:`Py_READONLY` to prevent Python code from setting it. An interesting advantage of using the :c:member:`~PyTypeObject.tp_members` table to build descriptors that are used at runtime is that any attribute defined this way can @@ -341,7 +316,7 @@ Type-specific Attribute Management For simplicity, only the :c:expr:`char\*` version will be demonstrated here; the type of the name parameter is the only difference between the :c:expr:`char\*` -and :c:type:`PyObject\*` flavors of the interface. This example effectively does +and :c:expr:`PyObject*` flavors of the interface. This example effectively does the same thing as the generic example above, but does not use the generic support added in Python 2.2. It explains how the handler functions are called, so that if you do need to extend their functionality, you'll understand diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst index 34c25d1f6f199c..54de3fd42437d9 100644 --- a/Doc/extending/newtypes_tutorial.rst +++ b/Doc/extending/newtypes_tutorial.rst @@ -24,7 +24,7 @@ The Basics ========== The :term:`CPython` runtime sees all Python objects as variables of type -:c:type:`PyObject\*`, which serves as a "base type" for all Python objects. +:c:expr:`PyObject*`, which serves as a "base type" for all Python objects. The :c:type:`PyObject` structure itself only contains the object's :term:`reference count` and a pointer to the object's "type object". This is where the action is; the type object determines which (C) functions @@ -239,13 +239,6 @@ adds these capabilities: This version of the module has a number of changes. -We've added an extra include:: - - #include - -This include provides declarations that we use to handle attributes, as -described a bit later. - The :class:`Custom` type now has three data attributes in its C struct, *first*, *last*, and *number*. The *first* and *last* variables are Python strings containing first and last names. The *number* attribute is a C integer. @@ -436,11 +429,11 @@ We want to expose our instance variables as attributes. There are a number of ways to do that. The simplest way is to define member definitions:: static PyMemberDef Custom_members[] = { - {"first", T_OBJECT_EX, offsetof(CustomObject, first), 0, + {"first", Py_T_OBJECT_EX, offsetof(CustomObject, first), 0, "first name"}, - {"last", T_OBJECT_EX, offsetof(CustomObject, last), 0, + {"last", Py_T_OBJECT_EX, offsetof(CustomObject, last), 0, "last name"}, - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; @@ -609,7 +602,7 @@ above. In this case, we aren't using a closure, so we just pass ``NULL``. We also remove the member definitions for these attributes:: static PyMemberDef Custom_members[] = { - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; diff --git a/Doc/faq/design.rst b/Doc/faq/design.rst index 52388fca5cdbca..11d01374dc1e79 100644 --- a/Doc/faq/design.rst +++ b/Doc/faq/design.rst @@ -129,7 +129,7 @@ reference or call the method from a particular class. In C++, if you want to use a method from a base class which is overridden in a derived class, you have to use the ``::`` operator -- in Python you can write ``baseclass.methodname(self, )``. This is particularly useful -for :meth:`__init__` methods, and in general in cases where a derived class +for :meth:`~object.__init__` methods, and in general in cases where a derived class method wants to extend the base class method of the same name and thus has to call the base class method somehow. @@ -155,7 +155,7 @@ Why can't I use an assignment in an expression? Starting in Python 3.8, you can! -Assignment expressions using the walrus operator `:=` assign a variable in an +Assignment expressions using the walrus operator ``:=`` assign a variable in an expression:: while chunk := fp.read(200): @@ -232,7 +232,8 @@ Similar methods exist for bytes and bytearray objects. How fast are exceptions? ------------------------ -A try/except block is extremely efficient if no exceptions are raised. Actually +A :keyword:`try`/:keyword:`except` block is extremely efficient if no exceptions +are raised. Actually catching an exception is expensive. In versions of Python prior to 2.0 it was common to use this idiom:: @@ -352,7 +353,7 @@ will probably run out of file descriptors:: c = f.read(1) Indeed, using CPython's reference counting and destructor scheme, each new -assignment to *f* closes the previous file. With a traditional GC, however, +assignment to ``f`` closes the previous file. With a traditional GC, however, those file objects will only get collected (and closed) at varying and possibly long intervals. @@ -376,10 +377,10 @@ Python to work with it.) Traditional GC also becomes a problem when Python is embedded into other applications. While in a standalone Python it's fine to replace the standard -malloc() and free() with versions provided by the GC library, an application -embedding Python may want to have its *own* substitute for malloc() and free(), +``malloc()`` and ``free()`` with versions provided by the GC library, an application +embedding Python may want to have its *own* substitute for ``malloc()`` and ``free()``, and may not want Python's. Right now, CPython works with anything that -implements malloc() and free() properly. +implements ``malloc()`` and ``free()`` properly. Why isn't all memory freed when CPython exits? @@ -401,14 +402,15 @@ Why are there separate tuple and list data types? Lists and tuples, while similar in many respects, are generally used in fundamentally different ways. Tuples can be thought of as being similar to -Pascal records or C structs; they're small collections of related data which may +Pascal ``records`` or C ``structs``; they're small collections of related data which may be of different types which are operated on as a group. For example, a Cartesian coordinate is appropriately represented as a tuple of two or three numbers. Lists, on the other hand, are more like arrays in other languages. They tend to hold a varying number of objects all of which have the same type and which are -operated on one-by-one. For example, ``os.listdir('.')`` returns a list of +operated on one-by-one. For example, :func:`os.listdir('.') ` +returns a list of strings representing the files in the current directory. Functions which operate on this output would generally not break if you added another file or two to the directory. @@ -444,9 +446,9 @@ far) under most circumstances, and the implementation is simpler. Dictionaries work by computing a hash code for each key stored in the dictionary using the :func:`hash` built-in function. The hash code varies widely depending -on the key and a per-process seed; for example, "Python" could hash to --539294296 while "python", a string that differs by a single bit, could hash -to 1142331976. The hash code is then used to calculate a location in an +on the key and a per-process seed; for example, ``'Python'`` could hash to +``-539294296`` while ``'python'``, a string that differs by a single bit, could hash +to ``1142331976``. The hash code is then used to calculate a location in an internal array where the value will be stored. Assuming that you're storing keys that all have different hash values, this means that dictionaries take constant time -- O(1), in Big-O notation -- to retrieve a key. @@ -497,7 +499,8 @@ Some unacceptable solutions that have been proposed: There is a trick to get around this if you need to, but use it at your own risk: You can wrap a mutable structure inside a class instance which has both a -:meth:`__eq__` and a :meth:`__hash__` method. You must then make sure that the +:meth:`~object.__eq__` and a :meth:`~object.__hash__` method. +You must then make sure that the hash value for all such wrapper objects that reside in a dictionary (or other hash based structure), remain fixed while the object is in the dictionary (or other structure). :: @@ -528,7 +531,7 @@ is True``) then ``hash(o1) == hash(o2)`` (ie, ``o1.__hash__() == o2.__hash__()`` regardless of whether the object is in a dictionary or not. If you fail to meet these restrictions dictionaries and other hash based structures will misbehave. -In the case of ListWrapper, whenever the wrapper object is in a dictionary the +In the case of :class:`!ListWrapper`, whenever the wrapper object is in a dictionary the wrapped list must not change to avoid anomalies. Don't do this unless you are prepared to think hard about the requirements and the consequences of not meeting them correctly. Consider yourself warned. @@ -581,9 +584,9 @@ exhaustive test suites that exercise every line of code in a module. An appropriate testing discipline can help build large complex applications in Python as well as having interface specifications would. In fact, it can be better because an interface specification cannot test certain properties of a -program. For example, the :meth:`append` method is expected to add new elements +program. For example, the :meth:`list.append` method is expected to add new elements to the end of some internal list; an interface specification cannot test that -your :meth:`append` implementation will actually do this correctly, but it's +your :meth:`list.append` implementation will actually do this correctly, but it's trivial to check this property in a test suite. Writing test suites is very helpful, and you might want to design your code to @@ -599,14 +602,14 @@ Why is there no goto? In the 1970s people realized that unrestricted goto could lead to messy "spaghetti" code that was hard to understand and revise. In a high-level language, it is also unneeded as long as there -are ways to branch (in Python, with ``if`` statements and ``or``, -``and``, and ``if-else`` expressions) and loop (with ``while`` -and ``for`` statements, possibly containing ``continue`` and ``break``). +are ways to branch (in Python, with :keyword:`if` statements and :keyword:`or`, +:keyword:`and`, and :keyword:`if`/:keyword:`else` expressions) and loop (with :keyword:`while` +and :keyword:`for` statements, possibly containing :keyword:`continue` and :keyword:`break`). One can also use exceptions to provide a "structured goto" that works even across function calls. Many feel that exceptions can conveniently emulate all -reasonable uses of the "go" or "goto" constructs of C, Fortran, and other +reasonable uses of the ``go`` or ``goto`` constructs of C, Fortran, and other languages. For example:: class label(Exception): pass # declare a label @@ -620,7 +623,7 @@ languages. For example:: ... This doesn't allow you to jump into the middle of a loop, but that's usually -considered an abuse of goto anyway. Use sparingly. +considered an abuse of ``goto`` anyway. Use sparingly. Why can't raw strings (r-strings) end with a backslash? @@ -652,7 +655,7 @@ If you're trying to build a pathname for a DOS command, try e.g. one of :: Why doesn't Python have a "with" statement for attribute assignments? --------------------------------------------------------------------- -Python has a 'with' statement that wraps the execution of a block, calling code +Python has a :keyword:`with` statement that wraps the execution of a block, calling code on the entrance and exit from the block. Some languages have a construct that looks like this:: @@ -679,13 +682,13 @@ For instance, take the following incomplete snippet:: with a: print(x) -The snippet assumes that "a" must have a member attribute called "x". However, +The snippet assumes that ``a`` must have a member attribute called ``x``. However, there is nothing in Python that tells the interpreter this. What should happen -if "a" is, let us say, an integer? If there is a global variable named "x", -will it be used inside the with block? As you see, the dynamic nature of Python +if ``a`` is, let us say, an integer? If there is a global variable named ``x``, +will it be used inside the :keyword:`with` block? As you see, the dynamic nature of Python makes such choices much harder. -The primary benefit of "with" and similar language features (reduction of code +The primary benefit of :keyword:`with` and similar language features (reduction of code volume) can, however, easily be achieved in Python by assignment. Instead of:: function(args).mydict[index][index].a = 21 @@ -714,7 +717,8 @@ Why don't generators support the with statement? For technical reasons, a generator used directly as a context manager would not work correctly. When, as is most common, a generator is used as an iterator run to completion, no closing is needed. When it is, wrap -it as "contextlib.closing(generator)" in the 'with' statement. +it as :func:`contextlib.closing(generator) ` +in the :keyword:`with` statement. Why are colons required for the if/while/def/class statements? diff --git a/Doc/faq/general.rst b/Doc/faq/general.rst index 81842fce4303a6..489bca76432d85 100644 --- a/Doc/faq/general.rst +++ b/Doc/faq/general.rst @@ -125,11 +125,15 @@ find packages of interest to you. How does the Python version numbering scheme work? -------------------------------------------------- -Python versions are numbered A.B.C or A.B. A is the major version number -- it -is only incremented for really major changes in the language. B is the minor -version number, incremented for less earth-shattering changes. C is the -micro-level -- it is incremented for each bugfix release. See :pep:`6` for more -information about bugfix releases. +Python versions are numbered "A.B.C" or "A.B": + +* *A* is the major version number -- it is only incremented for really major + changes in the language. +* *B* is the minor version number -- it is incremented for less earth-shattering + changes. +* *C* is the micro version number -- it is incremented for each bugfix release. + +See :pep:`6` for more information about bugfix releases. Not all releases are bugfix releases. In the run-up to a new major release, a series of development releases are made, denoted as alpha, beta, or release @@ -139,12 +143,14 @@ Betas are more stable, preserving existing interfaces but possibly adding new modules, and release candidates are frozen, making no changes except as needed to fix critical bugs. -Alpha, beta and release candidate versions have an additional suffix. The -suffix for an alpha version is "aN" for some small number N, the suffix for a -beta version is "bN" for some small number N, and the suffix for a release -candidate version is "rcN" for some small number N. In other words, all versions -labeled 2.0aN precede the versions labeled 2.0bN, which precede versions labeled -2.0rcN, and *those* precede 2.0. +Alpha, beta and release candidate versions have an additional suffix: + +* The suffix for an alpha version is "aN" for some small number *N*. +* The suffix for a beta version is "bN" for some small number *N*. +* The suffix for a release candidate version is "rcN" for some small number *N*. + +In other words, all versions labeled *2.0aN* precede the versions labeled +*2.0bN*, which precede versions labeled *2.0rcN*, and *those* precede 2.0. You may also find version numbers with a "+" suffix, e.g. "2.2+". These are unreleased versions, built directly from the CPython development repository. In @@ -429,7 +435,7 @@ With the interpreter, documentation is never far from the student as they are programming. There are also good IDEs for Python. IDLE is a cross-platform IDE for Python -that is written in Python using Tkinter. PythonWin is a Windows-specific IDE. +that is written in Python using Tkinter. Emacs users will be happy to know that there is a very good Python mode for Emacs. All of these programming environments provide syntax highlighting, auto-indenting, and access to the interactive interpreter while coding. Consult diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst index f79cf485712274..a9cde456575020 100644 --- a/Doc/faq/library.rst +++ b/Doc/faq/library.rst @@ -609,7 +609,7 @@ use ``p.read(n)``. substituted for standard input and output. You will have to use pseudo ttys ("ptys") instead of pipes. Or you can use a Python interface to Don Libes' "expect" library. A Python extension that interfaces to expect is called - "expy" and available from http://expectpy.sourceforge.net. A pure Python + "expy" and available from https://expectpy.sourceforge.net. A pure Python solution that works like expect is `pexpect `_. diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst index ad281f826dd42c..584d33e9622e33 100644 --- a/Doc/faq/programming.rst +++ b/Doc/faq/programming.rst @@ -25,8 +25,9 @@ Reference Manual `. You can also write your own debugger by using the code for pdb as an example. The IDLE interactive development environment, which is part of the standard -Python distribution (normally available as Tools/scripts/idle), includes a -graphical debugger. +Python distribution (normally available as +`Tools/scripts/idle3 `_), +includes a graphical debugger. PythonWin is a Python IDE that includes a GUI debugger based on pdb. The PythonWin debugger colors breakpoints and has quite a few cool features such as @@ -78,7 +79,8 @@ set of modules required by a program and bind these modules together with a Python binary to produce a single executable. One is to use the freeze tool, which is included in the Python source tree as -``Tools/freeze``. It converts Python byte code to C arrays; with a C compiler you can +`Tools/freeze `_. +It converts Python byte code to C arrays; with a C compiler you can embed all your modules into a new program, which is then linked with the standard Python modules. @@ -114,7 +116,7 @@ Core Language Why am I getting an UnboundLocalError when the variable has a value? -------------------------------------------------------------------- -It can be a surprise to get the UnboundLocalError in previously working +It can be a surprise to get the :exc:`UnboundLocalError` in previously working code when it is modified by adding an assignment statement somewhere in the body of a function. @@ -123,6 +125,7 @@ This code: >>> x = 10 >>> def bar(): ... print(x) + ... >>> bar() 10 @@ -133,7 +136,7 @@ works, but this code: ... print(x) ... x += 1 -results in an UnboundLocalError: +results in an :exc:`!UnboundLocalError`: >>> foo() Traceback (most recent call last): @@ -155,6 +158,7 @@ global: ... global x ... print(x) ... x += 1 + ... >>> foobar() 10 @@ -176,6 +180,7 @@ keyword: ... x += 1 ... bar() ... print(x) + ... >>> foo() 10 11 @@ -273,7 +278,7 @@ main.py:: import mod print(config.x) -Note that using a module is also the basis for implementing the Singleton design +Note that using a module is also the basis for implementing the singleton design pattern, for the same reason. @@ -291,9 +296,9 @@ using multiple imports per line uses less screen space. It's good practice if you import modules in the following order: -1. standard library modules -- e.g. ``sys``, ``os``, ``getopt``, ``re`` +1. standard library modules -- e.g. :mod:`sys`, :mod:`os`, :mod:`argparse`, :mod:`re` 2. third-party library modules (anything installed in Python's site-packages - directory) -- e.g. mx.DateTime, ZODB, PIL.Image, etc. + directory) -- e.g. :mod:`!dateutil`, :mod:`!requests`, :mod:`!PIL.Image` 3. locally developed modules It is sometimes necessary to move imports to a function or class to avoid @@ -471,7 +476,7 @@ object ``x`` refers to). After this assignment we have two objects (the ints Some operations (for example ``y.append(10)`` and ``y.sort()``) mutate the object, whereas superficially similar operations (for example ``y = y + [10]`` -and ``sorted(y)``) create a new object. In general in Python (and in all cases +and :func:`sorted(y) `) create a new object. In general in Python (and in all cases in the standard library) a method that mutates an object will return ``None`` to help avoid getting the two types of operations confused. So if you mistakenly write ``y.sort()`` thinking it will give you a sorted copy of ``y``, @@ -644,7 +649,7 @@ Sequences can be copied by slicing:: How can I find the methods or attributes of an object? ------------------------------------------------------ -For an instance x of a user-defined class, ``dir(x)`` returns an alphabetized +For an instance ``x`` of a user-defined class, :func:`dir(x) ` returns an alphabetized list of the names containing the instance attributes and methods and attributes defined by its class. @@ -669,9 +674,9 @@ callable. Consider the following code:: <__main__.A object at 0x16D07CC> Arguably the class has a name: even though it is bound to two names and invoked -through the name B the created instance is still reported as an instance of -class A. However, it is impossible to say whether the instance's name is a or -b, since both names are bound to the same value. +through the name ``B`` the created instance is still reported as an instance of +class ``A``. However, it is impossible to say whether the instance's name is ``a`` or +``b``, since both names are bound to the same value. Generally speaking it should not be necessary for your code to "know the names" of particular values. Unless you are deliberately writing introspective @@ -841,7 +846,7 @@ How do I get int literal attribute instead of SyntaxError? ---------------------------------------------------------- Trying to lookup an ``int`` literal attribute in the normal manner gives -a syntax error because the period is seen as a decimal point:: +a :exc:`SyntaxError` because the period is seen as a decimal point:: >>> 1.__class__ File "", line 1 @@ -887,7 +892,7 @@ leading '0' in a decimal number (except '0'). How do I convert a number to a string? -------------------------------------- -To convert, e.g., the number 144 to the string '144', use the built-in type +To convert, e.g., the number ``144`` to the string ``'144'``, use the built-in type constructor :func:`str`. If you want a hexadecimal or octal representation, use the built-in functions :func:`hex` or :func:`oct`. For fancy formatting, see the :ref:`f-strings` and :ref:`formatstrings` sections, @@ -1006,11 +1011,11 @@ Not as such. For simple input parsing, the easiest approach is usually to split the line into whitespace-delimited words using the :meth:`~str.split` method of string objects and then convert decimal strings to numeric values using :func:`int` or -:func:`float`. ``split()`` supports an optional "sep" parameter which is useful +:func:`float`. :meth:`!split()` supports an optional "sep" parameter which is useful if the line uses something other than whitespace as a separator. For more complicated input parsing, regular expressions are more powerful -than C's :c:func:`sscanf` and better suited for the task. +than C's ``sscanf`` and better suited for the task. What does 'UnicodeDecodeError' or 'UnicodeEncodeError' error mean? @@ -1206,15 +1211,16 @@ difference is that a Python list can contain objects of many different types. The ``array`` module also provides methods for creating arrays of fixed types with compact representations, but they are slower to index than lists. Also -note that NumPy and other third party packages define array-like structures with +note that `NumPy `_ +and other third party packages define array-like structures with various characteristics as well. -To get Lisp-style linked lists, you can emulate cons cells using tuples:: +To get Lisp-style linked lists, you can emulate *cons cells* using tuples:: lisp_list = ("like", ("this", ("example", None) ) ) If mutability is desired, you could use lists instead of tuples. Here the -analogue of lisp car is ``lisp_list[0]`` and the analogue of cdr is +analogue of a Lisp *car* is ``lisp_list[0]`` and the analogue of *cdr* is ``lisp_list[1]``. Only do this if you're sure you really need to, because it's usually a lot slower than using Python lists. @@ -1273,13 +1279,25 @@ Or, you can use an extension that provides a matrix datatype; `NumPy `_ is the best known. -How do I apply a method to a sequence of objects? -------------------------------------------------- +How do I apply a method or function to a sequence of objects? +------------------------------------------------------------- -Use a list comprehension:: +To call a method or function and accumulate the return values is a list, +a :term:`list comprehension` is an elegant solution:: result = [obj.method() for obj in mylist] + result = [function(obj) for obj in mylist] + +To just run the method or function without saving the return values, +a plain :keyword:`for` loop will suffice:: + + for obj in mylist: + obj.method() + + for obj in mylist: + function(obj) + .. _faq-augmented-assignment-tuple-error: Why does a_tuple[i] += ['item'] raise an exception when the addition works? @@ -1334,11 +1352,12 @@ that even though there was an error, the append worked:: ['foo', 'item'] To see why this happens, you need to know that (a) if an object implements an -``__iadd__`` magic method, it gets called when the ``+=`` augmented assignment +:meth:`~object.__iadd__` magic method, it gets called when the ``+=`` augmented +assignment is executed, and its return value is what gets used in the assignment statement; -and (b) for lists, ``__iadd__`` is equivalent to calling ``extend`` on the list +and (b) for lists, :meth:`!__iadd__` is equivalent to calling :meth:`~list.extend` on the list and returning the list. That's why we say that for lists, ``+=`` is a -"shorthand" for ``list.extend``:: +"shorthand" for :meth:`!list.extend`:: >>> a_list = [] >>> a_list += [1] @@ -1363,7 +1382,7 @@ Thus, in our tuple example what is happening is equivalent to:: ... TypeError: 'tuple' object does not support item assignment -The ``__iadd__`` succeeds, and thus the list is extended, but even though +The :meth:`!__iadd__` succeeds, and thus the list is extended, but even though ``result`` points to the same object that ``a_tuple[0]`` already points to, that final assignment still results in an error, because tuples are immutable. @@ -1440,7 +1459,8 @@ See also :ref:`why-self`. How do I check if an object is an instance of a given class or of a subclass of it? ----------------------------------------------------------------------------------- -Use the built-in function ``isinstance(obj, cls)``. You can check if an object +Use the built-in function :func:`isinstance(obj, cls) `. You can +check if an object is an instance of any of a number of classes by providing a tuple instead of a single class, e.g. ``isinstance(obj, (class1, class2, ...))``, and can also check whether an object is one of Python's built-in types, e.g. @@ -1537,13 +1557,13 @@ Here the ``UpperOut`` class redefines the ``write()`` method to convert the argument string to uppercase before calling the underlying ``self._outfile.write()`` method. All other methods are delegated to the underlying ``self._outfile`` object. The delegation is accomplished via the -``__getattr__`` method; consult :ref:`the language reference ` +:meth:`~object.__getattr__` method; consult :ref:`the language reference ` for more information about controlling attribute access. Note that for more general cases delegation can get trickier. When attributes -must be set as well as retrieved, the class must define a :meth:`__setattr__` +must be set as well as retrieved, the class must define a :meth:`~object.__setattr__` method too, and it must do so carefully. The basic implementation of -:meth:`__setattr__` is roughly equivalent to the following:: +:meth:`!__setattr__` is roughly equivalent to the following:: class X: ... @@ -1551,7 +1571,8 @@ method too, and it must do so carefully. The basic implementation of self.__dict__[name] = value ... -Most :meth:`__setattr__` implementations must modify ``self.__dict__`` to store +Most :meth:`!__setattr__` implementations must modify +:meth:`self.__dict__ ` to store local state for self without causing an infinite recursion. @@ -1689,17 +1710,17 @@ My class defines __del__ but it is not called when I delete the object. There are several possible reasons for this. -The del statement does not necessarily call :meth:`__del__` -- it simply +The :keyword:`del` statement does not necessarily call :meth:`~object.__del__` -- it simply decrements the object's reference count, and if this reaches zero -:meth:`__del__` is called. +:meth:`!__del__` is called. If your data structures contain circular links (e.g. a tree where each child has a parent reference and each parent has a list of children) the reference counts will never go back to zero. Once in a while Python runs an algorithm to detect such cycles, but the garbage collector might run some time after the last -reference to your data structure vanishes, so your :meth:`__del__` method may be +reference to your data structure vanishes, so your :meth:`!__del__` method may be called at an inconvenient and random time. This is inconvenient if you're trying -to reproduce a problem. Worse, the order in which object's :meth:`__del__` +to reproduce a problem. Worse, the order in which object's :meth:`!__del__` methods are executed is arbitrary. You can run :func:`gc.collect` to force a collection, but there *are* pathological cases where objects will never be collected. @@ -1707,7 +1728,7 @@ collected. Despite the cycle collector, it's still a good idea to define an explicit ``close()`` method on objects to be called whenever you're done with them. The ``close()`` method can then remove attributes that refer to subobjects. Don't -call :meth:`__del__` directly -- :meth:`__del__` should call ``close()`` and +call :meth:`!__del__` directly -- :meth:`!__del__` should call ``close()`` and ``close()`` should make sure that it can be called more than once for the same object. @@ -1724,7 +1745,7 @@ and sibling references (if they need them!). Normally, calling :func:`sys.exc_clear` will take care of this by clearing the last recorded exception. -Finally, if your :meth:`__del__` method raises an exception, a warning message +Finally, if your :meth:`!__del__` method raises an exception, a warning message is printed to :data:`sys.stderr`. @@ -1852,8 +1873,8 @@ For example, here is the implementation of How can a subclass control what data is stored in an immutable instance? ------------------------------------------------------------------------ -When subclassing an immutable type, override the :meth:`__new__` method -instead of the :meth:`__init__` method. The latter only runs *after* an +When subclassing an immutable type, override the :meth:`~object.__new__` method +instead of the :meth:`~object.__init__` method. The latter only runs *after* an instance is created, which is too late to alter data in an immutable instance. @@ -1955,8 +1976,8 @@ can't be made to work because it cannot detect changes to the attributes. To make the *lru_cache* approach work when the *station_id* is mutable, -the class needs to define the *__eq__* and *__hash__* methods so that -the cache can detect relevant attribute updates:: +the class needs to define the :meth:`~object.__eq__` and :meth:`~object.__hash__` +methods so that the cache can detect relevant attribute updates:: class Weather: "Example with a mutable station identifier" diff --git a/Doc/faq/windows.rst b/Doc/faq/windows.rst index 7768aafd979685..c0c92fdbbc84d1 100644 --- a/Doc/faq/windows.rst +++ b/Doc/faq/windows.rst @@ -167,7 +167,7 @@ How can I embed Python into a Windows application? Embedding the Python interpreter in a Windows app can be summarized as follows: -1. Do _not_ build Python into your .exe file directly. On Windows, Python must +1. Do **not** build Python into your .exe file directly. On Windows, Python must be a DLL to handle importing modules that are themselves DLL's. (This is the first key undocumented fact.) Instead, link to :file:`python{NN}.dll`; it is typically installed in ``C:\Windows\System``. *NN* is the Python version, a @@ -191,7 +191,7 @@ Embedding the Python interpreter in a Windows app can be summarized as follows: 2. If you use SWIG, it is easy to create a Python "extension module" that will make the app's data and methods available to Python. SWIG will handle just about all the grungy details for you. The result is C code that you link - *into* your .exe file (!) You do _not_ have to create a DLL file, and this + *into* your .exe file (!) You do **not** have to create a DLL file, and this also simplifies linking. 3. SWIG will create an init function (a C function) whose name depends on the @@ -218,10 +218,10 @@ Embedding the Python interpreter in a Windows app can be summarized as follows: 5. There are two problems with Python's C API which will become apparent if you use a compiler other than MSVC, the compiler used to build pythonNN.dll. - Problem 1: The so-called "Very High Level" functions that take FILE * + Problem 1: The so-called "Very High Level" functions that take ``FILE *`` arguments will not work in a multi-compiler environment because each - compiler's notion of a struct FILE will be different. From an implementation - standpoint these are very _low_ level functions. + compiler's notion of a ``struct FILE`` will be different. From an implementation + standpoint these are very low level functions. Problem 2: SWIG generates the following code when generating wrappers to void functions: diff --git a/Doc/glossary.rst b/Doc/glossary.rst index 9385b8ddd13d10..3d74d550dc345a 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -210,6 +210,16 @@ Glossary A list of bytecode instructions can be found in the documentation for :ref:`the dis module `. + callable + A callable is an object that can be called, possibly with a set + of arguments (see :term:`argument`), with the following syntax:: + + callable(argument1, argument2, ...) + + A :term:`function`, and by extension a :term:`method`, is a callable. + An instance of a class that implements the :meth:`~object.__call__` + method is also a callable. + callback A subroutine function which is passed as an argument to be executed at some point in the future. @@ -872,7 +882,7 @@ Glossary package A Python :term:`module` which can contain submodules or recursively, - subpackages. Technically, a package is a Python module with an + subpackages. Technically, a package is a Python module with a ``__path__`` attribute. See also :term:`regular package` and :term:`namespace package`. diff --git a/Doc/howto/argparse.rst b/Doc/howto/argparse.rst index f3ad117a3d3bc6..f4d08e75d94642 100644 --- a/Doc/howto/argparse.rst +++ b/Doc/howto/argparse.rst @@ -761,9 +761,9 @@ your program, just in case they don't know:: if args.quiet: print(answer) elif args.verbose: - print("{} to the power {} equals {}".format(args.x, args.y, answer)) + print(f"{args.x} to the power {args.y} equals {answer}") else: - print("{}^{} == {}".format(args.x, args.y, answer)) + print(f"{args.x}^{args.y} == {answer}") Note that slight difference in the usage text. Note the ``[-v | -q]``, which tells us that we can either use ``-v`` or ``-q``, diff --git a/Doc/howto/clinic.rst b/Doc/howto/clinic.rst index d634c4b47db90e..a97f1d23f53f31 100644 --- a/Doc/howto/clinic.rst +++ b/Doc/howto/clinic.rst @@ -1,5 +1,7 @@ .. highlight:: c +.. _howto-clinic: + ********************** Argument Clinic How-To ********************** @@ -539,7 +541,15 @@ Let's dive in! }; -16. Compile, then run the relevant portions of the regression-test suite. +16. Argument Clinic may generate new instances of ``_Py_ID``. For example:: + + &_Py_ID(new_unique_py_id) + + If it does, you'll have to run ``Tools/scripts/generate_global_objects.py`` + to regenerate the list of precompiled identifiers at this point. + + +17. Compile, then run the relevant portions of the regression-test suite. This change should not introduce any new compile-time warnings or errors, and there should be no externally visible change to Python's behavior. diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst index 91a6c31c33b8bd..74710d9b3fc2ed 100644 --- a/Doc/howto/descriptor.rst +++ b/Doc/howto/descriptor.rst @@ -847,7 +847,7 @@ afterwards, :meth:`__set_name__` will need to be called manually. ORM example ----------- -The following code is simplified skeleton showing how data descriptors could +The following code is a simplified skeleton showing how data descriptors could be used to implement an `object relational mapping `_. @@ -1535,6 +1535,8 @@ by member descriptors: def __get__(self, obj, objtype=None): 'Emulate member_get() in Objects/descrobject.c' # Also see PyMember_GetOne() in Python/structmember.c + if obj is None: + return self value = obj._slotvalues[self.offset] if value is null: raise AttributeError(self.name) @@ -1563,13 +1565,13 @@ variables: class Type(type): 'Simulate how the type metaclass adds member objects for slots' - def __new__(mcls, clsname, bases, mapping): + def __new__(mcls, clsname, bases, mapping, **kwargs): 'Emulate type_new() in Objects/typeobject.c' # type_new() calls PyTypeReady() which calls add_methods() slot_names = mapping.get('slot_names', []) for offset, name in enumerate(slot_names): mapping[name] = Member(name, clsname, offset) - return type.__new__(mcls, clsname, bases, mapping) + return type.__new__(mcls, clsname, bases, mapping, **kwargs) The :meth:`object.__new__` method takes care of creating instances that have slots instead of an instance dictionary. Here is a rough simulation in pure @@ -1580,7 +1582,7 @@ Python: class Object: 'Simulate how object.__new__() allocates memory for __slots__' - def __new__(cls, *args): + def __new__(cls, *args, **kwargs): 'Emulate object_new() in Objects/typeobject.c' inst = super().__new__(cls) if hasattr(cls, 'slot_names'): @@ -1593,7 +1595,7 @@ Python: cls = type(self) if hasattr(cls, 'slot_names') and name not in cls.slot_names: raise AttributeError( - f'{type(self).__name__!r} object has no attribute {name!r}' + f'{cls.__name__!r} object has no attribute {name!r}' ) super().__setattr__(name, value) @@ -1602,7 +1604,7 @@ Python: cls = type(self) if hasattr(cls, 'slot_names') and name not in cls.slot_names: raise AttributeError( - f'{type(self).__name__!r} object has no attribute {name!r}' + f'{cls.__name__!r} object has no attribute {name!r}' ) super().__delattr__(name) diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst index 376934a0e79365..98d9f4febe2dfa 100644 --- a/Doc/howto/enum.rst +++ b/Doc/howto/enum.rst @@ -173,6 +173,7 @@ yourself some work and use :func:`auto()` for the values:: ... FRIDAY = auto() ... SATURDAY = auto() ... SUNDAY = auto() + ... WEEKEND = SATURDAY | SUNDAY .. _enum-advanced-tutorial: @@ -305,6 +306,10 @@ Iterating over the members of an enum does not provide the aliases:: >>> list(Shape) [, , ] + >>> list(Weekday) + [, , , , , , ] + +Note that the aliases ``Shape.ALIAS_FOR_SQUARE`` and ``Weekday.WEEKEND`` aren't shown. The special attribute ``__members__`` is a read-only ordered mapping of names to members. It includes all names defined in the enumeration, including the @@ -324,6 +329,11 @@ the enumeration members. For example, finding all the aliases:: >>> [name for name, member in Shape.__members__.items() if member.name != name] ['ALIAS_FOR_SQUARE'] +.. note:: + + Aliases for flags include values with multiple flags set, such as ``3``, + and no flags set, i.e. ``0``. + Comparisons ----------- @@ -751,7 +761,7 @@ flags being set, the boolean evaluation is :data:`False`:: False Individual flags should have values that are powers of two (1, 2, 4, 8, ...), -while combinations of flags won't:: +while combinations of flags will not:: >>> class Color(Flag): ... RED = auto() @@ -1096,8 +1106,8 @@ example of when ``KEEP`` is needed). .. _enum-class-differences: -How are Enums different? ------------------------- +How are Enums and Flags different? +---------------------------------- Enums have a custom metaclass that affects many aspects of both derived :class:`Enum` classes and their instances (members). @@ -1109,11 +1119,18 @@ Enum Classes The :class:`EnumType` metaclass is responsible for providing the :meth:`__contains__`, :meth:`__dir__`, :meth:`__iter__` and other methods that allow one to do things with an :class:`Enum` class that fail on a typical -class, such as `list(Color)` or `some_enum_var in Color`. :class:`EnumType` is +class, such as ``list(Color)`` or ``some_enum_var in Color``. :class:`EnumType` is responsible for ensuring that various other methods on the final :class:`Enum` class are correct (such as :meth:`__new__`, :meth:`__getnewargs__`, :meth:`__str__` and :meth:`__repr__`). +Flag Classes +^^^^^^^^^^^^ + +Flags have an expanded view of aliasing: to be canonical, the value of a flag +needs to be a power-of-two value, and not a duplicate name. So, in addition to the +:class:`Enum` definition of alias, a flag with no value (a.k.a. ``0``) or with more than one +power-of-two value (e.g. ``3``) is considered an alias. Enum Members (aka instances) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1123,9 +1140,35 @@ The most interesting thing about enum members is that they are singletons. and then puts a custom :meth:`__new__` in place to ensure that no new ones are ever instantiated by returning only the existing member instances. +Flag Members +^^^^^^^^^^^^ + +Flag members can be iterated over just like the :class:`Flag` class, and only the +canonical members will be returned. For example:: + + >>> list(Color) + [, , ] + +(Note that ``BLACK``, ``PURPLE``, and ``WHITE`` do not show up.) + +Inverting a flag member returns the corresponding positive value, +rather than a negative value --- for example:: + + >>> ~Color.RED + + +Flag members have a length corresponding to the number of power-of-two values +they contain. For example:: + + >>> len(Color.PURPLE) + 2 + .. _enum-cookbook: +Enum Cookbook +------------- + While :class:`Enum`, :class:`IntEnum`, :class:`StrEnum`, :class:`Flag`, and :class:`IntFlag` are expected to cover the majority of use-cases, they cannot @@ -1299,7 +1342,7 @@ enumerations):: DuplicateFreeEnum ^^^^^^^^^^^^^^^^^ -Raises an error if a duplicate member name is found instead of creating an +Raises an error if a duplicate member value is found instead of creating an alias:: >>> class DuplicateFreeEnum(Enum): diff --git a/Doc/howto/functional.rst b/Doc/howto/functional.rst index 7d30c343e372fe..38a651b0f964a6 100644 --- a/Doc/howto/functional.rst +++ b/Doc/howto/functional.rst @@ -994,7 +994,7 @@ requesting iterator-2 and its corresponding key. The functools module ==================== -The :mod:`functools` module in Python 2.5 contains some higher-order functions. +The :mod:`functools` module contains some higher-order functions. A **higher-order function** takes one or more functions as input and returns a new function. The most useful tool in this module is the :func:`functools.partial` function. diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst index 8ee7e5e28479a5..2eddb582da7c24 100644 --- a/Doc/howto/isolating-extensions.rst +++ b/Doc/howto/isolating-extensions.rst @@ -411,7 +411,7 @@ that subclass, which may be defined in different module than yours. pass For a method to get its "defining class", it must use the -:c:data:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS` +:data:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS` :c:type:`calling convention ` and the corresponding :c:type:`PyCMethod` signature:: @@ -461,7 +461,7 @@ Module State Access from Slot Methods, Getters and Setters .. After adding to limited API: - If you use the `limited API __, + If you use the :ref:`limited API , you must update ``Py_LIMITED_API`` to ``0x030b0000``, losing ABI compatibility with earlier versions. diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst index ff7ba0789608ff..bf6f54a841a7b9 100644 --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -276,6 +276,211 @@ choose a different directory name for the log - just ensure that the directory e and that you have the permissions to create and update files in it. +.. _custom-level-handling: + +Custom handling of levels +------------------------- + +Sometimes, you might want to do something slightly different from the standard +handling of levels in handlers, where all levels above a threshold get +processed by a handler. To do this, you need to use filters. Let's look at a +scenario where you want to arrange things as follows: + +* Send messages of severity ``INFO`` and ``WARNING`` to ``sys.stdout`` +* Send messages of severity ``ERROR`` and above to ``sys.stderr`` +* Send messages of severity ``DEBUG`` and above to file ``app.log`` + +Suppose you configure logging with the following JSON: + +.. code-block:: json + + { + "version": 1, + "disable_existing_loggers": false, + "formatters": { + "simple": { + "format": "%(levelname)-8s - %(message)s" + } + }, + "handlers": { + "stdout": { + "class": "logging.StreamHandler", + "level": "INFO", + "formatter": "simple", + "stream": "ext://sys.stdout", + }, + "stderr": { + "class": "logging.StreamHandler", + "level": "ERROR", + "formatter": "simple", + "stream": "ext://sys.stderr" + }, + "file": { + "class": "logging.FileHandler", + "formatter": "simple", + "filename": "app.log", + "mode": "w" + } + }, + "root": { + "level": "DEBUG", + "handlers": [ + "stderr", + "stdout", + "file" + ] + } + } + +This configuration does *almost* what we want, except that ``sys.stdout`` would +show messages of severity ``ERROR`` and above as well as ``INFO`` and +``WARNING`` messages. To prevent this, we can set up a filter which excludes +those messages and add it to the relevant handler. This can be configured by +adding a ``filters`` section parallel to ``formatters`` and ``handlers``: + +.. code-block:: json + + "filters": { + "warnings_and_below": { + "()" : "__main__.filter_maker", + "level": "WARNING" + } + } + +and changing the section on the ``stdout`` handler to add it: + +.. code-block:: json + + "stdout": { + "class": "logging.StreamHandler", + "level": "INFO", + "formatter": "simple", + "stream": "ext://sys.stdout", + "filters": ["warnings_and_below"] + } + +A filter is just a function, so we can define the ``filter_maker`` (a factory +function) as follows: + +.. code-block:: python + + def filter_maker(level): + level = getattr(logging, level) + + def filter(record): + return record.levelno <= level + + return filter + +This converts the string argument passed in to a numeric level, and returns a +function which only returns ``True`` if the level of the passed in record is +at or below the specified level. Note that in this example I have defined the +``filter_maker`` in a test script ``main.py`` that I run from the command line, +so its module will be ``__main__`` - hence the ``__main__.filter_maker`` in the +filter configuration. You will need to change that if you define it in a +different module. + +With the filter added, we can run ``main.py``, which in full is: + +.. code-block:: python + + import json + import logging + import logging.config + + CONFIG = ''' + { + "version": 1, + "disable_existing_loggers": false, + "formatters": { + "simple": { + "format": "%(levelname)-8s - %(message)s" + } + }, + "filters": { + "warnings_and_below": { + "()" : "__main__.filter_maker", + "level": "WARNING" + } + }, + "handlers": { + "stdout": { + "class": "logging.StreamHandler", + "level": "INFO", + "formatter": "simple", + "stream": "ext://sys.stdout", + "filters": ["warnings_and_below"] + }, + "stderr": { + "class": "logging.StreamHandler", + "level": "ERROR", + "formatter": "simple", + "stream": "ext://sys.stderr" + }, + "file": { + "class": "logging.FileHandler", + "formatter": "simple", + "filename": "app.log", + "mode": "w" + } + }, + "root": { + "level": "DEBUG", + "handlers": [ + "stderr", + "stdout", + "file" + ] + } + } + ''' + + def filter_maker(level): + level = getattr(logging, level) + + def filter(record): + return record.levelno <= level + + return filter + + logging.config.dictConfig(json.loads(CONFIG)) + logging.debug('A DEBUG message') + logging.info('An INFO message') + logging.warning('A WARNING message') + logging.error('An ERROR message') + logging.critical('A CRITICAL message') + +And after running it like this: + +.. code-block:: shell + + python main.py 2>stderr.log >stdout.log + +We can see the results are as expected: + +.. code-block:: shell + + $ more *.log + :::::::::::::: + app.log + :::::::::::::: + DEBUG - A DEBUG message + INFO - An INFO message + WARNING - A WARNING message + ERROR - An ERROR message + CRITICAL - A CRITICAL message + :::::::::::::: + stderr.log + :::::::::::::: + ERROR - An ERROR message + CRITICAL - A CRITICAL message + :::::::::::::: + stdout.log + :::::::::::::: + INFO - An INFO message + WARNING - A WARNING message + + Configuration server example ---------------------------- @@ -332,6 +537,8 @@ configuration:: print('complete') +.. _blocking-handlers: + Dealing with handlers that block -------------------------------- @@ -558,13 +765,71 @@ serialization. Running a logging socket listener in production ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -To run a logging listener in production, you may need to use a process-management tool -such as `Supervisor `_. `Here -`_ is a Gist which -provides the bare-bones files to run the above functionality using Supervisor: you -will need to change the `/path/to/` parts in the Gist to reflect the actual paths you -want to use. - +.. _socket-listener-gist: https://gist.github.com/vsajip/4b227eeec43817465ca835ca66f75e2b + +To run a logging listener in production, you may need to use a +process-management tool such as `Supervisor `_. +`Here is a Gist `__ +which provides the bare-bones files to run the above functionality using +Supervisor. It consists of the following files: + ++-------------------------+----------------------------------------------------+ +| File | Purpose | ++=========================+====================================================+ +| :file:`prepare.sh` | A Bash script to prepare the environment for | +| | testing | ++-------------------------+----------------------------------------------------+ +| :file:`supervisor.conf` | The Supervisor configuration file, which has | +| | entries for the listener and a multi-process web | +| | application | ++-------------------------+----------------------------------------------------+ +| :file:`ensure_app.sh` | A Bash script to ensure that Supervisor is running | +| | with the above configuration | ++-------------------------+----------------------------------------------------+ +| :file:`log_listener.py` | The socket listener program which receives log | +| | events and records them to a file | ++-------------------------+----------------------------------------------------+ +| :file:`main.py` | A simple web application which performs logging | +| | via a socket connected to the listener | ++-------------------------+----------------------------------------------------+ +| :file:`webapp.json` | A JSON configuration file for the web application | ++-------------------------+----------------------------------------------------+ +| :file:`client.py` | A Python script to exercise the web application | ++-------------------------+----------------------------------------------------+ + +The web application uses `Gunicorn `_, which is a +popular web application server that starts multiple worker processes to handle +requests. This example setup shows how the workers can write to the same log file +without conflicting with one another --- they all go through the socket listener. + +To test these files, do the following in a POSIX environment: + +#. Download `the Gist `__ + as a ZIP archive using the :guilabel:`Download ZIP` button. + +#. Unzip the above files from the archive into a scratch directory. + +#. In the scratch directory, run ``bash prepare.sh`` to get things ready. + This creates a :file:`run` subdirectory to contain Supervisor-related and + log files, and a :file:`venv` subdirectory to contain a virtual environment + into which ``bottle``, ``gunicorn`` and ``supervisor`` are installed. + +#. Run ``bash ensure_app.sh`` to ensure that Supervisor is running with + the above configuration. + +#. Run ``venv/bin/python client.py`` to exercise the web application, + which will lead to records being written to the log. + +#. Inspect the log files in the :file:`run` subdirectory. You should see the + most recent log lines in files matching the pattern :file:`app.log*`. They won't be in + any particular order, since they have been handled concurrently by different + worker processes in a non-deterministic way. + +#. You can shut down the listener and the web application by running + ``venv/bin/supervisorctl -c supervisor.conf shutdown``. + +You may need to tweak the configuration files in the unlikely event that the +configured ports clash with something else in your test environment. .. _context-info: @@ -2774,7 +3039,7 @@ Formatting times using UTC (GMT) via configuration -------------------------------------------------- Sometimes you want to format times using UTC, which can be done using a class -such as `UTCFormatter`, shown below:: +such as ``UTCFormatter``, shown below:: import logging import time @@ -3501,10 +3766,75 @@ instance). Then, you'd get this kind of result: WARNING:demo:Bar >>> -Of course, these above examples show output according to the format used by +Of course, the examples above show output according to the format used by :func:`~logging.basicConfig`, but you can use a different formatter when you configure logging. +Note that with the above scheme, you are somewhat at the mercy of buffering and +the sequence of write calls which you are intercepting. For example, with the +definition of ``LoggerWriter`` above, if you have the snippet + +.. code-block:: python + + sys.stderr = LoggerWriter(logger, logging.WARNING) + 1 / 0 + +then running the script results in + +.. code-block:: text + + WARNING:demo:Traceback (most recent call last): + + WARNING:demo: File "/home/runner/cookbook-loggerwriter/test.py", line 53, in + + WARNING:demo: + WARNING:demo:main() + WARNING:demo: File "/home/runner/cookbook-loggerwriter/test.py", line 49, in main + + WARNING:demo: + WARNING:demo:1 / 0 + WARNING:demo:ZeroDivisionError + WARNING:demo:: + WARNING:demo:division by zero + +As you can see, this output isn't ideal. That's because the underlying code +which writes to ``sys.stderr`` makes mutiple writes, each of which results in a +separate logged line (for example, the last three lines above). To get around +this problem, you need to buffer things and only output log lines when newlines +are seen. Let's use a slghtly better implementation of ``LoggerWriter``: + +.. code-block:: python + + class BufferingLoggerWriter(LoggerWriter): + def __init__(self, logger, level): + super().__init__(logger, level) + self.buffer = '' + + def write(self, message): + if '\n' not in message: + self.buffer += message + else: + parts = message.split('\n') + if self.buffer: + s = self.buffer + parts.pop(0) + self.logger.log(self.level, s) + self.buffer = parts.pop() + for part in parts: + self.logger.log(self.level, part) + +This just buffers up stuff until a newline is seen, and then logs complete +lines. With this approach, you get better output: + +.. code-block:: text + + WARNING:demo:Traceback (most recent call last): + WARNING:demo: File "/home/runner/cookbook-loggerwriter/main.py", line 55, in + WARNING:demo: main() + WARNING:demo: File "/home/runner/cookbook-loggerwriter/main.py", line 52, in main + WARNING:demo: 1/0 + WARNING:demo:ZeroDivisionError: division by zero + + .. patterns-to-avoid: Patterns to avoid @@ -3515,7 +3845,6 @@ need to do or deal with, it is worth mentioning some usage patterns which are *unhelpful*, and which should therefore be avoided in most cases. The following sections are in no particular order. - Opening the same log file multiple times ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -3564,7 +3893,6 @@ that in other languages such as Java and C#, loggers are often static class attributes. However, this pattern doesn't make sense in Python, where the module (and not the class) is the unit of software decomposition. - Adding handlers other than :class:`NullHandler` to a logger in a library ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -3573,7 +3901,6 @@ responsibility of the application developer, not the library developer. If you are maintaining a library, ensure that you don't add handlers to any of your loggers other than a :class:`~logging.NullHandler` instance. - Creating a lot of loggers ^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/Doc/howto/logging.rst b/Doc/howto/logging.rst index 0caff13bd15d82..145449b2dfbd9f 100644 --- a/Doc/howto/logging.rst +++ b/Doc/howto/logging.rst @@ -555,14 +555,14 @@ raw message. If there is no date format string, the default date format is: %Y-%m-%d %H:%M:%S -with the milliseconds tacked on at the end. The ``style`` is one of `%`, '{' -or '$'. If one of these is not specified, then '%' will be used. +with the milliseconds tacked on at the end. The ``style`` is one of ``'%'``, +``'{'``, or ``'$'``. If one of these is not specified, then ``'%'`` will be used. -If the ``style`` is '%', the message format string uses +If the ``style`` is ``'%'``, the message format string uses ``%()s`` styled string substitution; the possible keys are -documented in :ref:`logrecord-attributes`. If the style is '{', the message +documented in :ref:`logrecord-attributes`. If the style is ``'{'``, the message format string is assumed to be compatible with :meth:`str.format` (using -keyword arguments), while if the style is '$' then the message format string +keyword arguments), while if the style is ``'$'`` then the message format string should conform to what is expected by :meth:`string.Template.substitute`. .. versionchanged:: 3.2 diff --git a/Doc/howto/perf_profiling.rst b/Doc/howto/perf_profiling.rst index ed8de888b3bc21..ad2eb7b4d58aa5 100644 --- a/Doc/howto/perf_profiling.rst +++ b/Doc/howto/perf_profiling.rst @@ -8,10 +8,11 @@ Python support for the Linux ``perf`` profiler :author: Pablo Galindo -The Linux ``perf`` profiler is a very powerful tool that allows you to profile and -obtain information about the performance of your application. ``perf`` also has -a very vibrant ecosystem of tools that aid with the analysis of the data that it -produces. +`The Linux perf profiler `_ +is a very powerful tool that allows you to profile and obtain +information about the performance of your application. +``perf`` also has a very vibrant ecosystem of tools +that aid with the analysis of the data that it produces. The main problem with using the ``perf`` profiler with Python applications is that ``perf`` only allows to get information about native symbols, this is, the names of @@ -25,7 +26,7 @@ fly before the execution of every Python function and it will teach ``perf`` the relationship between this piece of code and the associated Python function using `perf map files`_. -.. warning:: +.. note:: Support for the ``perf`` profiler is only currently available for Linux on selected architectures. Check the output of the configure build step or @@ -51,11 +52,11 @@ For example, consider the following script: if __name__ == "__main__": baz(1000000) -We can run perf to sample CPU stack traces at 9999 Hertz: +We can run ``perf`` to sample CPU stack traces at 9999 Hertz:: $ perf record -F 9999 -g -o perf.data python my_script.py -Then we can use perf report to analyze the data: +Then we can use ``perf`` report to analyze the data: .. code-block:: shell-session @@ -101,7 +102,7 @@ As you can see here, the Python functions are not shown in the output, only ``_P functions use the same C function to evaluate bytecode so we cannot know which Python function corresponds to which bytecode-evaluating function. -Instead, if we run the same experiment with perf support activated we get: +Instead, if we run the same experiment with ``perf`` support enabled we get: .. code-block:: shell-session @@ -147,52 +148,58 @@ Instead, if we run the same experiment with perf support activated we get: -Enabling perf profiling mode ----------------------------- +How to enable ``perf`` profiling support +---------------------------------------- -There are two main ways to activate the perf profiling mode. If you want it to be -active since the start of the Python interpreter, you can use the `-Xperf` option: +``perf`` profiling support can either be enabled from the start using +the environment variable :envvar:`PYTHONPERFSUPPORT` or the +:option:`-X perf <-X>` option, +or dynamically using :func:`sys.activate_stack_trampoline` and +:func:`sys.deactivate_stack_trampoline`. - $ python -Xperf my_script.py +The :mod:`!sys` functions take precedence over the :option:`!-X` option, +the :option:`!-X` option takes precedence over the environment variable. -You can also set the :envvar:`PYTHONPERFSUPPORT` to a nonzero value to actiavate perf -profiling mode globally. +Example, using the environment variable:: -There is also support for dynamically activating and deactivating the perf -profiling mode by using the APIs in the :mod:`sys` module: + $ PYTHONPERFSUPPORT=1 + $ python script.py + $ perf report -g -i perf.data -.. code-block:: python - - import sys - sys.activate_stack_trampoline("perf") +Example, using the :option:`!-X` option:: - # Run some code with Perf profiling active + $ python -X perf script.py + $ perf report -g -i perf.data - sys.deactivate_stack_trampoline() +Example, using the :mod:`sys` APIs in file :file:`example.py`: - # Perf profiling is not active anymore +.. code-block:: python -These APIs can be handy if you want to activate/deactivate profiling mode in -response to a signal or other communication mechanism with your process. + import sys + sys.activate_stack_trampoline("perf") + do_profiled_stuff() + sys.deactivate_stack_trampoline() + non_profiled_stuff() -Now we can analyze the data with ``perf report``: +...then:: - $ perf report -g -i perf.data + $ python ./example.py + $ perf report -g -i perf.data How to obtain the best results -------------------------------- +------------------------------ For the best results, Python should be compiled with ``CFLAGS="-fno-omit-frame-pointer -mno-omit-leaf-frame-pointer"`` as this allows profilers to unwind using only the frame pointer and not on DWARF debug -information. This is because as the code that is interposed to allow perf +information. This is because as the code that is interposed to allow ``perf`` support is dynamically generated it doesn't have any DWARF debugging information available. -You can check if you system has been compiled with this flag by running: +You can check if your system has been compiled with this flag by running:: $ python -m sysconfig | grep 'no-omit-frame-pointer' diff --git a/Doc/howto/sorting.rst b/Doc/howto/sorting.rst index 53cbe01e92144b..decce12bf3faf6 100644 --- a/Doc/howto/sorting.rst +++ b/Doc/howto/sorting.rst @@ -186,8 +186,8 @@ The `Timsort `_ algorithm used in Python does multiple sorts efficiently because it can take advantage of any ordering already present in a dataset. -The Old Way Using Decorate-Sort-Undecorate -========================================== +Decorate-Sort-Undecorate +======================== This idiom is called Decorate-Sort-Undecorate after its three steps: @@ -226,90 +226,36 @@ after Randal L. Schwartz, who popularized it among Perl programmers. Now that Python sorting provides key-functions, this technique is not often needed. +Comparison Functions +==================== -The Old Way Using the *cmp* Parameter -===================================== - -Many constructs given in this HOWTO assume Python 2.4 or later. Before that, -there was no :func:`sorted` builtin and :meth:`list.sort` took no keyword -arguments. Instead, all of the Py2.x versions supported a *cmp* parameter to -handle user specified comparison functions. - -In Py3.0, the *cmp* parameter was removed entirely (as part of a larger effort to -simplify and unify the language, eliminating the conflict between rich -comparisons and the :meth:`__cmp__` magic method). - -In Py2.x, sort allowed an optional function which can be called for doing the -comparisons. That function should take two arguments to be compared and then -return a negative value for less-than, return zero if they are equal, or return -a positive value for greater-than. For example, we can do: - -.. doctest:: - - >>> def numeric_compare(x, y): - ... return x - y - >>> sorted([5, 2, 4, 1, 3], cmp=numeric_compare) # doctest: +SKIP - [1, 2, 3, 4, 5] - -Or you can reverse the order of comparison with: - -.. doctest:: - - >>> def reverse_numeric(x, y): - ... return y - x - >>> sorted([5, 2, 4, 1, 3], cmp=reverse_numeric) # doctest: +SKIP - [5, 4, 3, 2, 1] - -When porting code from Python 2.x to 3.x, the situation can arise when you have -the user supplying a comparison function and you need to convert that to a key -function. The following wrapper makes that easy to do: - -.. testcode:: - - def cmp_to_key(mycmp): - 'Convert a cmp= function into a key= function' - class K: - def __init__(self, obj, *args): - self.obj = obj - def __lt__(self, other): - return mycmp(self.obj, other.obj) < 0 - def __gt__(self, other): - return mycmp(self.obj, other.obj) > 0 - def __eq__(self, other): - return mycmp(self.obj, other.obj) == 0 - def __le__(self, other): - return mycmp(self.obj, other.obj) <= 0 - def __ge__(self, other): - return mycmp(self.obj, other.obj) >= 0 - def __ne__(self, other): - return mycmp(self.obj, other.obj) != 0 - return K - -.. doctest:: - :hide: +Unlike key functions that return an absolute value for sorting, a comparison +function computes the relative ordering for two inputs. - >>> sorted([5, 2, 4, 1, 3], key=cmp_to_key(reverse_numeric)) - [5, 4, 3, 2, 1] +For example, a `balance scale +`_ +compares two samples giving a relative ordering: lighter, equal, or heavier. +Likewise, a comparison function such as ``cmp(a, b)`` will return a negative +value for less-than, zero if the inputs are equal, or a positive value for +greater-than. -To convert to a key function, just wrap the old comparison function: - -.. testsetup:: - - from functools import cmp_to_key - -.. doctest:: +It is common to encounter comparison functions when translating algorithms from +other languages. Also, some libraries provide comparison functions as part of +their API. For example, :func:`locale.strcoll` is a comparison function. - >>> sorted([5, 2, 4, 1, 3], key=cmp_to_key(reverse_numeric)) - [5, 4, 3, 2, 1] +To accommodate those situations, Python provides +:class:`functools.cmp_to_key` to wrap the comparison function +to make it usable as a key function:: -In Python 3.2, the :func:`functools.cmp_to_key` function was added to the -:mod:`functools` module in the standard library. + sorted(words, key=cmp_to_key(strcoll)) # locale-aware sort order Odds and Ends ============= * For locale aware sorting, use :func:`locale.strxfrm` for a key function or - :func:`locale.strcoll` for a comparison function. + :func:`locale.strcoll` for a comparison function. This is necessary + because "alphabetical" sort orderings can vary across cultures even + if the underlying alphabet is the same. * The *reverse* parameter still maintains sort stability (so that records with equal keys retain the original order). Interestingly, that effect can be diff --git a/Doc/includes/custom2.c b/Doc/includes/custom2.c index 2a3c59f8f04c3d..a3b2d6ab78d3c4 100644 --- a/Doc/includes/custom2.c +++ b/Doc/includes/custom2.c @@ -1,6 +1,6 @@ #define PY_SSIZE_T_CLEAN #include -#include "structmember.h" +#include /* for offsetof() */ typedef struct { PyObject_HEAD @@ -42,7 +42,7 @@ static int Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"first", "last", "number", NULL}; - PyObject *first = NULL, *last = NULL, *tmp; + PyObject *first = NULL, *last = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist, &first, &last, @@ -50,26 +50,20 @@ Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) return -1; if (first) { - tmp = self->first; - Py_INCREF(first); - self->first = first; - Py_XDECREF(tmp); + Py_XSETREF(self->first, Py_NewRef(first)); } if (last) { - tmp = self->last; - Py_INCREF(last); - self->last = last; - Py_XDECREF(tmp); + Py_XSETREF(self->last, Py_NewRef(last)); } return 0; } static PyMemberDef Custom_members[] = { - {"first", T_OBJECT_EX, offsetof(CustomObject, first), 0, + {"first", Py_T_OBJECT_EX, offsetof(CustomObject, first), 0, "first name"}, - {"last", T_OBJECT_EX, offsetof(CustomObject, last), 0, + {"last", Py_T_OBJECT_EX, offsetof(CustomObject, last), 0, "last name"}, - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; @@ -127,9 +121,7 @@ PyInit_custom2(void) if (m == NULL) return NULL; - Py_INCREF(&CustomType); - if (PyModule_AddObject(m, "Custom", (PyObject *) &CustomType) < 0) { - Py_DECREF(&CustomType); + if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) { Py_DECREF(m); return NULL; } diff --git a/Doc/includes/custom3.c b/Doc/includes/custom3.c index 5a47530f0a6b0d..1a68bc4be8c399 100644 --- a/Doc/includes/custom3.c +++ b/Doc/includes/custom3.c @@ -1,6 +1,6 @@ #define PY_SSIZE_T_CLEAN #include -#include "structmember.h" +#include /* for offsetof() */ typedef struct { PyObject_HEAD @@ -42,7 +42,7 @@ static int Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"first", "last", "number", NULL}; - PyObject *first = NULL, *last = NULL, *tmp; + PyObject *first = NULL, *last = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|UUi", kwlist, &first, &last, @@ -50,22 +50,16 @@ Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) return -1; if (first) { - tmp = self->first; - Py_INCREF(first); - self->first = first; - Py_DECREF(tmp); + Py_SETREF(self->first, Py_NewRef(first)); } if (last) { - tmp = self->last; - Py_INCREF(last); - self->last = last; - Py_DECREF(tmp); + Py_SETREF(self->last, Py_NewRef(last)); } return 0; } static PyMemberDef Custom_members[] = { - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; @@ -73,14 +67,12 @@ static PyMemberDef Custom_members[] = { static PyObject * Custom_getfirst(CustomObject *self, void *closure) { - Py_INCREF(self->first); - return self->first; + return Py_NewRef(self->first); } static int Custom_setfirst(CustomObject *self, PyObject *value, void *closure) { - PyObject *tmp; if (value == NULL) { PyErr_SetString(PyExc_TypeError, "Cannot delete the first attribute"); return -1; @@ -90,24 +82,19 @@ Custom_setfirst(CustomObject *self, PyObject *value, void *closure) "The first attribute value must be a string"); return -1; } - tmp = self->first; - Py_INCREF(value); - self->first = value; - Py_DECREF(tmp); + Py_SETREF(self->first, Py_NewRef(value)); return 0; } static PyObject * Custom_getlast(CustomObject *self, void *closure) { - Py_INCREF(self->last); - return self->last; + return Py_NewRef(self->last); } static int Custom_setlast(CustomObject *self, PyObject *value, void *closure) { - PyObject *tmp; if (value == NULL) { PyErr_SetString(PyExc_TypeError, "Cannot delete the last attribute"); return -1; @@ -117,10 +104,7 @@ Custom_setlast(CustomObject *self, PyObject *value, void *closure) "The last attribute value must be a string"); return -1; } - tmp = self->last; - Py_INCREF(value); - self->last = value; - Py_DECREF(tmp); + Py_SETREF(self->last, Py_NewRef(value)); return 0; } @@ -178,9 +162,7 @@ PyInit_custom3(void) if (m == NULL) return NULL; - Py_INCREF(&CustomType); - if (PyModule_AddObject(m, "Custom", (PyObject *) &CustomType) < 0) { - Py_DECREF(&CustomType); + if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) { Py_DECREF(m); return NULL; } diff --git a/Doc/includes/custom4.c b/Doc/includes/custom4.c index c7ee55578488ed..b932d159d26e93 100644 --- a/Doc/includes/custom4.c +++ b/Doc/includes/custom4.c @@ -1,6 +1,6 @@ #define PY_SSIZE_T_CLEAN #include -#include "structmember.h" +#include /* for offsetof() */ typedef struct { PyObject_HEAD @@ -58,7 +58,7 @@ static int Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"first", "last", "number", NULL}; - PyObject *first = NULL, *last = NULL, *tmp; + PyObject *first = NULL, *last = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|UUi", kwlist, &first, &last, @@ -66,22 +66,16 @@ Custom_init(CustomObject *self, PyObject *args, PyObject *kwds) return -1; if (first) { - tmp = self->first; - Py_INCREF(first); - self->first = first; - Py_DECREF(tmp); + Py_SETREF(self->first, Py_NewRef(first)); } if (last) { - tmp = self->last; - Py_INCREF(last); - self->last = last; - Py_DECREF(tmp); + Py_SETREF(self->last, Py_NewRef(last)); } return 0; } static PyMemberDef Custom_members[] = { - {"number", T_INT, offsetof(CustomObject, number), 0, + {"number", Py_T_INT, offsetof(CustomObject, number), 0, "custom number"}, {NULL} /* Sentinel */ }; @@ -89,8 +83,7 @@ static PyMemberDef Custom_members[] = { static PyObject * Custom_getfirst(CustomObject *self, void *closure) { - Py_INCREF(self->first); - return self->first; + return Py_NewRef(self->first); } static int @@ -105,17 +98,14 @@ Custom_setfirst(CustomObject *self, PyObject *value, void *closure) "The first attribute value must be a string"); return -1; } - Py_INCREF(value); - Py_CLEAR(self->first); - self->first = value; + Py_XSETREF(self->first, Py_NewRef(value)); return 0; } static PyObject * Custom_getlast(CustomObject *self, void *closure) { - Py_INCREF(self->last); - return self->last; + return Py_NewRef(self->last); } static int @@ -130,9 +120,7 @@ Custom_setlast(CustomObject *self, PyObject *value, void *closure) "The last attribute value must be a string"); return -1; } - Py_INCREF(value); - Py_CLEAR(self->last); - self->last = value; + Py_XSETREF(self->last, Py_NewRef(value)); return 0; } @@ -192,9 +180,7 @@ PyInit_custom4(void) if (m == NULL) return NULL; - Py_INCREF(&CustomType); - if (PyModule_AddObject(m, "Custom", (PyObject *) &CustomType) < 0) { - Py_DECREF(&CustomType); + if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) { Py_DECREF(m); return NULL; } diff --git a/Tools/scripts/diff.py b/Doc/includes/diff.py old mode 100755 new mode 100644 similarity index 98% rename from Tools/scripts/diff.py rename to Doc/includes/diff.py index 96199b85116d7d..001619f5f83fc0 --- a/Tools/scripts/diff.py +++ b/Doc/includes/diff.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 """ Command line interface to difflib.py providing diffs in four formats: * ndiff: lists every line and highlights interline changes. diff --git a/Tools/scripts/ndiff.py b/Doc/includes/ndiff.py old mode 100755 new mode 100644 similarity index 80% rename from Tools/scripts/ndiff.py rename to Doc/includes/ndiff.py index c6d09b8f242fef..32c251bce9120b --- a/Tools/scripts/ndiff.py +++ b/Doc/includes/ndiff.py @@ -1,16 +1,3 @@ -#! /usr/bin/env python3 - -# Module ndiff version 1.7.0 -# Released to the public domain 08-Dec-2000, -# by Tim Peters (tim.one@home.com). - -# Provided as-is; use at your own risk; no warranty; no promises; enjoy! - -# ndiff.py is now simply a front-end to the difflib.ndiff() function. -# Originally, it contained the difflib.SequenceMatcher class as well. -# This completes the raiding of reusable code from this formerly -# self-contained script. - """ndiff [-q] file1 file2 or ndiff (-r1 | -r2) < ndiff_output > file1_or_file2 @@ -121,13 +108,4 @@ def restore(which): sys.stdout.writelines(restored) if __name__ == '__main__': - args = sys.argv[1:] - if "-profile" in args: - import profile, pstats - args.remove("-profile") - statf = "ndiff.pro" - profile.run("main(args)", statf) - stats = pstats.Stats(statf) - stats.strip_dirs().sort_stats('time').print_stats() - else: - main(args) + main(sys.argv[1:]) diff --git a/Doc/includes/tzinfo_examples.py b/Doc/includes/tzinfo_examples.py index 9b9e32a553e7d8..1fa6e615e46a76 100644 --- a/Doc/includes/tzinfo_examples.py +++ b/Doc/includes/tzinfo_examples.py @@ -71,7 +71,7 @@ def first_sunday_on_or_after(dt): # DST start and end times. For a complete and up-to-date set of DST rules # and timezone definitions, visit the Olson Database (or try pytz): # http://www.twinsun.com/tz/tz-link.htm -# http://sourceforge.net/projects/pytz/ (might not be up-to-date) +# https://sourceforge.net/projects/pytz/ (might not be up-to-date) # # In the US, since 2007, DST starts at 2am (standard time) on the second # Sunday in March, which is the first Sunday on or after Mar 8. diff --git a/Doc/install/index.rst b/Doc/install/index.rst index 3fc670b191424d..ab581d785ef7f0 100644 --- a/Doc/install/index.rst +++ b/Doc/install/index.rst @@ -765,7 +765,7 @@ And on Windows, the configuration files are: +--------------+-------------------------------------------------+-------+ On all platforms, the "personal" file can be temporarily disabled by -passing the `--no-user-cfg` option. +passing the ``--no-user-cfg`` option. Notes: diff --git a/Doc/library/_thread.rst b/Doc/library/_thread.rst index 9df9e7914e093b..122692a428594f 100644 --- a/Doc/library/_thread.rst +++ b/Doc/library/_thread.rst @@ -57,6 +57,8 @@ This module defines the following constants and functions: When the function raises a :exc:`SystemExit` exception, it is silently ignored. + .. audit-event:: _thread.start_new_thread function,args,kwargs start_new_thread + .. versionchanged:: 3.8 :func:`sys.unraisablehook` is now used to handle unhandled exceptions. diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index 5d76ab017edef4..f8839d0986d047 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -63,7 +63,7 @@ Name Description action_ Specify how an argument should be handled ``'store'``, ``'store_const'``, ``'store_true'``, ``'append'``, ``'append_const'``, ``'count'``, ``'help'``, ``'version'`` choices_ Limit values to a specific set of choices ``['foo', 'bar']``, ``range(1, 10)``, or :class:`~collections.abc.Container` instance const_ Store a constant value -default_ Default value used when an argument is not provided Defaults to *None* +default_ Default value used when an argument is not provided Defaults to ``None`` dest_ Specify the attribute name used in the result namespace help_ Help message for an argument metavar_ Alternate display name for the argument as shown in help @@ -201,9 +201,10 @@ ArgumentParser objects * usage_ - The string describing the program usage (default: generated from arguments added to parser) - * description_ - Text to display before the argument help (default: none) + * description_ - Text to display before the argument help + (by default, no text) - * epilog_ - Text to display after the argument help (default: none) + * epilog_ - Text to display after the argument help (by default, no text) * parents_ - A list of :class:`ArgumentParser` objects whose arguments should also be included @@ -1926,8 +1927,8 @@ FileType objects Namespace(out=<_io.TextIOWrapper name='file.txt' mode='w' encoding='UTF-8'>, raw=<_io.FileIO name='raw.dat' mode='wb'>) FileType objects understand the pseudo-argument ``'-'`` and automatically - convert this into ``sys.stdin`` for readable :class:`FileType` objects and - ``sys.stdout`` for writable :class:`FileType` objects:: + convert this into :data:`sys.stdin` for readable :class:`FileType` objects and + :data:`sys.stdout` for writable :class:`FileType` objects:: >>> parser = argparse.ArgumentParser() >>> parser.add_argument('infile', type=argparse.FileType('r')) @@ -1944,7 +1945,7 @@ Argument groups .. method:: ArgumentParser.add_argument_group(title=None, description=None) By default, :class:`ArgumentParser` groups command-line arguments into - "positional arguments" and "optional arguments" when displaying help + "positional arguments" and "options" when displaying help messages. When there is a better conceptual grouping of arguments than this default one, appropriate groups can be created using the :meth:`add_argument_group` method:: diff --git a/Doc/library/array.rst b/Doc/library/array.rst index 975670cc81a202..95f1eaf401b052 100644 --- a/Doc/library/array.rst +++ b/Doc/library/array.rst @@ -62,6 +62,14 @@ The actual representation of values is determined by the machine architecture (strictly speaking, by the C implementation). The actual size can be accessed through the :attr:`itemsize` attribute. +The module defines the following item: + + +.. data:: typecodes + + A string with all available type codes. + + The module defines the following type: @@ -79,9 +87,6 @@ The module defines the following type: .. audit-event:: array.__new__ typecode,initializer array.array -.. data:: typecodes - - A string with all available type codes. Array objects support the ordinary sequence operations of indexing, slicing, concatenation, and multiplication. When using slice assignment, the assigned diff --git a/Doc/library/asynchat.rst b/Doc/library/asynchat.rst deleted file mode 100644 index 32e04ad6d19533..00000000000000 --- a/Doc/library/asynchat.rst +++ /dev/null @@ -1,217 +0,0 @@ -:mod:`asynchat` --- Asynchronous socket command/response handler -================================================================ - -.. module:: asynchat - :synopsis: Support for asynchronous command/response protocols. - :deprecated: - -.. moduleauthor:: Sam Rushing -.. sectionauthor:: Steve Holden - -**Source code:** :source:`Lib/asynchat.py` - -.. deprecated-removed:: 3.6 3.12 - The :mod:`asynchat` module is deprecated - (see :pep:`PEP 594 <594#asynchat>` for details). - Please use :mod:`asyncio` instead. - --------------- - -.. note:: - - This module exists for backwards compatibility only. For new code we - recommend using :mod:`asyncio`. - -This module builds on the :mod:`asyncore` infrastructure, simplifying -asynchronous clients and servers and making it easier to handle protocols -whose elements are terminated by arbitrary strings, or are of variable length. -:mod:`asynchat` defines the abstract class :class:`async_chat` that you -subclass, providing implementations of the :meth:`collect_incoming_data` and -:meth:`found_terminator` methods. It uses the same asynchronous loop as -:mod:`asyncore`, and the two types of channel, :class:`asyncore.dispatcher` -and :class:`asynchat.async_chat`, can freely be mixed in the channel map. -Typically an :class:`asyncore.dispatcher` server channel generates new -:class:`asynchat.async_chat` channel objects as it receives incoming -connection requests. - -.. include:: ../includes/wasm-notavail.rst - -.. class:: async_chat() - - This class is an abstract subclass of :class:`asyncore.dispatcher`. To make - practical use of the code you must subclass :class:`async_chat`, providing - meaningful :meth:`collect_incoming_data` and :meth:`found_terminator` - methods. - The :class:`asyncore.dispatcher` methods can be used, although not all make - sense in a message/response context. - - Like :class:`asyncore.dispatcher`, :class:`async_chat` defines a set of - events that are generated by an analysis of socket conditions after a - :c:func:`select` call. Once the polling loop has been started the - :class:`async_chat` object's methods are called by the event-processing - framework with no action on the part of the programmer. - - Two class attributes can be modified, to improve performance, or possibly - even to conserve memory. - - - .. data:: ac_in_buffer_size - - The asynchronous input buffer size (default ``4096``). - - - .. data:: ac_out_buffer_size - - The asynchronous output buffer size (default ``4096``). - - Unlike :class:`asyncore.dispatcher`, :class:`async_chat` allows you to - define a :abbr:`FIFO (first-in, first-out)` queue of *producers*. A producer need - have only one method, :meth:`more`, which should return data to be - transmitted on the channel. - The producer indicates exhaustion (*i.e.* that it contains no more data) by - having its :meth:`more` method return the empty bytes object. At this point - the :class:`async_chat` object removes the producer from the queue and starts - using the next producer, if any. When the producer queue is empty the - :meth:`handle_write` method does nothing. You use the channel object's - :meth:`set_terminator` method to describe how to recognize the end of, or - an important breakpoint in, an incoming transmission from the remote - endpoint. - - To build a functioning :class:`async_chat` subclass your input methods - :meth:`collect_incoming_data` and :meth:`found_terminator` must handle the - data that the channel receives asynchronously. The methods are described - below. - - -.. method:: async_chat.close_when_done() - - Pushes a ``None`` on to the producer queue. When this producer is popped off - the queue it causes the channel to be closed. - - -.. method:: async_chat.collect_incoming_data(data) - - Called with *data* holding an arbitrary amount of received data. The - default method, which must be overridden, raises a - :exc:`NotImplementedError` exception. - - -.. method:: async_chat.discard_buffers() - - In emergencies this method will discard any data held in the input and/or - output buffers and the producer queue. - - -.. method:: async_chat.found_terminator() - - Called when the incoming data stream matches the termination condition set - by :meth:`set_terminator`. The default method, which must be overridden, - raises a :exc:`NotImplementedError` exception. The buffered input data - should be available via an instance attribute. - - -.. method:: async_chat.get_terminator() - - Returns the current terminator for the channel. - - -.. method:: async_chat.push(data) - - Pushes data on to the channel's queue to ensure its transmission. - This is all you need to do to have the channel write the data out to the - network, although it is possible to use your own producers in more complex - schemes to implement encryption and chunking, for example. - - -.. method:: async_chat.push_with_producer(producer) - - Takes a producer object and adds it to the producer queue associated with - the channel. When all currently pushed producers have been exhausted the - channel will consume this producer's data by calling its :meth:`more` - method and send the data to the remote endpoint. - - -.. method:: async_chat.set_terminator(term) - - Sets the terminating condition to be recognized on the channel. ``term`` - may be any of three types of value, corresponding to three different ways - to handle incoming protocol data. - - +-----------+---------------------------------------------+ - | term | Description | - +===========+=============================================+ - | *string* | Will call :meth:`found_terminator` when the | - | | string is found in the input stream | - +-----------+---------------------------------------------+ - | *integer* | Will call :meth:`found_terminator` when the | - | | indicated number of characters have been | - | | received | - +-----------+---------------------------------------------+ - | ``None`` | The channel continues to collect data | - | | forever | - +-----------+---------------------------------------------+ - - Note that any data following the terminator will be available for reading - by the channel after :meth:`found_terminator` is called. - - -.. _asynchat-example: - -asynchat Example ----------------- - -The following partial example shows how HTTP requests can be read with -:class:`async_chat`. A web server might create an -:class:`http_request_handler` object for each incoming client connection. -Notice that initially the channel terminator is set to match the blank line at -the end of the HTTP headers, and a flag indicates that the headers are being -read. - -Once the headers have been read, if the request is of type POST (indicating -that further data are present in the input stream) then the -``Content-Length:`` header is used to set a numeric terminator to read the -right amount of data from the channel. - -The :meth:`handle_request` method is called once all relevant input has been -marshalled, after setting the channel terminator to ``None`` to ensure that -any extraneous data sent by the web client are ignored. :: - - - import asynchat - - class http_request_handler(asynchat.async_chat): - - def __init__(self, sock, addr, sessions, log): - asynchat.async_chat.__init__(self, sock=sock) - self.addr = addr - self.sessions = sessions - self.ibuffer = [] - self.obuffer = b"" - self.set_terminator(b"\r\n\r\n") - self.reading_headers = True - self.handling = False - self.cgi_data = None - self.log = log - - def collect_incoming_data(self, data): - """Buffer the data""" - self.ibuffer.append(data) - - def found_terminator(self): - if self.reading_headers: - self.reading_headers = False - self.parse_headers(b"".join(self.ibuffer)) - self.ibuffer = [] - if self.op.upper() == b"POST": - clen = self.headers.getheader("content-length") - self.set_terminator(int(clen)) - else: - self.handling = True - self.set_terminator(None) - self.handle_request() - elif not self.handling: - self.set_terminator(None) # browsers sometimes over-send - self.cgi_data = parse(self.headers, b"".join(self.ibuffer)) - self.handling = True - self.ibuffer = [] - self.handle_request() diff --git a/Doc/library/asyncio-api-index.rst b/Doc/library/asyncio-api-index.rst index 54c1cd6582e494..ad475150fe7d91 100644 --- a/Doc/library/asyncio-api-index.rst +++ b/Doc/library/asyncio-api-index.rst @@ -57,7 +57,7 @@ await on multiple things with timeouts. - Monitor for completion. * - :func:`timeout` - - Run with a timeout. Useful in cases when `wait_for` is not suitable. + - Run with a timeout. Useful in cases when ``wait_for`` is not suitable. * - :func:`to_thread` - Asynchronously run a function in a separate OS thread. diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst index 14f2c3533c9712..921a394a59fec7 100644 --- a/Doc/library/asyncio-dev.rst +++ b/Doc/library/asyncio-dev.rst @@ -149,7 +149,8 @@ adjusted:: Network logging can block the event loop. It is recommended to use -a separate thread for handling logs or use non-blocking IO. +a separate thread for handling logs or use non-blocking IO. For example, +see :ref:`blocking-handlers`. .. _asyncio-coroutine-not-scheduled: diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 2f306b7edb8fe2..d0a1ed2b99e55d 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -877,9 +877,14 @@ TLS Upgrade Upgrade an existing transport-based connection to TLS. - Return a new transport instance, that the *protocol* must start using - immediately after the *await*. The *transport* instance passed to - the *start_tls* method should never be used again. + Create a TLS coder/decoder instance and insert it between the *transport* + and the *protocol*. The coder/decoder implements both *transport*-facing + protocol and *protocol*-facing transport. + + Return the created two-interface instance. After *await*, the *protocol* + must stop using the original *transport* and communicate with the returned + object only because the coder caches *protocol*-side data and sporadically + exchanges extra TLS session packets with *transport*. Parameters: @@ -1253,7 +1258,13 @@ Executing code in thread or process pools pool, cpu_bound) print('custom process pool', result) - asyncio.run(main()) + if __name__ == '__main__': + asyncio.run(main()) + + Note that the entry point guard (``if __name__ == '__main__'``) + is required for option 3 due to the peculiarities of :mod:`multiprocessing`, + which is used by :class:`~concurrent.futures.ProcessPoolExecutor`. + See :ref:`Safe importing of main module `. This method returns a :class:`asyncio.Future` object. diff --git a/Doc/library/asyncio-future.rst b/Doc/library/asyncio-future.rst index 8e60877f0e4c7d..70cec9b2f90248 100644 --- a/Doc/library/asyncio-future.rst +++ b/Doc/library/asyncio-future.rst @@ -197,11 +197,6 @@ Future Object .. versionchanged:: 3.9 Added the *msg* parameter. - .. deprecated-removed:: 3.11 3.14 - *msg* parameter is ambiguous when multiple :meth:`cancel` - are called with different cancellation messages. - The argument will be removed. - .. method:: exception() Return the exception that was set on this Future. @@ -282,8 +277,3 @@ the Future has a result:: - :meth:`asyncio.Future.cancel` accepts an optional ``msg`` argument, but :func:`concurrent.futures.cancel` does not. - - .. deprecated-removed:: 3.11 3.14 - *msg* parameter is ambiguous when multiple :meth:`cancel` - are called with different cancellation messages. - The argument will be removed. diff --git a/Doc/library/asyncio-policy.rst b/Doc/library/asyncio-policy.rst index bfc3e3090fdcdf..98c85015874689 100644 --- a/Doc/library/asyncio-policy.rst +++ b/Doc/library/asyncio-policy.rst @@ -88,12 +88,16 @@ The abstract event loop policy base class is defined as follows: This function is Unix specific. + .. deprecated:: 3.12 + .. method:: set_child_watcher(watcher) Set the current child process watcher to *watcher*. This function is Unix specific. + .. deprecated:: 3.12 + .. _asyncio-policy-builtin: @@ -158,12 +162,16 @@ implementation used by the asyncio event loop: Return the current child watcher for the current policy. + .. deprecated:: 3.12 + .. function:: set_child_watcher(watcher) Set the current child watcher to *watcher* for the current policy. *watcher* must implement methods defined in the :class:`AbstractChildWatcher` base class. + .. deprecated:: 3.12 + .. note:: Third-party event loops implementations might not support custom child watchers. For such event loops, using @@ -214,6 +222,9 @@ implementation used by the asyncio event loop: This method has to be called to ensure that underlying resources are cleaned-up. + .. deprecated:: 3.12 + + .. class:: ThreadedChildWatcher This implementation starts a new waiting thread for every subprocess spawn. @@ -245,6 +256,8 @@ implementation used by the asyncio event loop: .. versionadded:: 3.8 + .. deprecated:: 3.12 + .. class:: SafeChildWatcher This implementation uses active event loop from the main thread to handle @@ -257,6 +270,8 @@ implementation used by the asyncio event loop: This solution is as safe as :class:`MultiLoopChildWatcher` and has the same *O(N)* complexity but requires a running event loop in the main thread to work. + .. deprecated:: 3.12 + .. class:: FastChildWatcher This implementation reaps every terminated processes by calling @@ -269,6 +284,8 @@ implementation used by the asyncio event loop: This solution requires a running event loop in the main thread to work, as :class:`SafeChildWatcher`. + .. deprecated:: 3.12 + .. class:: PidfdChildWatcher This implementation polls process file descriptors (pidfds) to await child diff --git a/Doc/library/asyncio-protocol.rst b/Doc/library/asyncio-protocol.rst index 8b67f4b8957ef6..7bc906eaafc1f2 100644 --- a/Doc/library/asyncio-protocol.rst +++ b/Doc/library/asyncio-protocol.rst @@ -156,7 +156,8 @@ Base Transport will be received. After all buffered data is flushed, the protocol's :meth:`protocol.connection_lost() ` method will be called with - :const:`None` as its argument. + :const:`None` as its argument. The transport should not be + used once it is closed. .. method:: BaseTransport.is_closing() @@ -553,7 +554,7 @@ accept factories that return streaming protocols. a connection is open. However, :meth:`protocol.eof_received() ` - is called at most once. Once `eof_received()` is called, + is called at most once. Once ``eof_received()`` is called, ``data_received()`` is not called anymore. .. method:: Protocol.eof_received() diff --git a/Doc/library/asyncio-runner.rst b/Doc/library/asyncio-runner.rst index c43d664eba717f..b68b2570ef071e 100644 --- a/Doc/library/asyncio-runner.rst +++ b/Doc/library/asyncio-runner.rst @@ -22,7 +22,7 @@ to simplify async code usage for common wide-spread scenarios. Running an asyncio Program ========================== -.. function:: run(coro, *, debug=None) +.. function:: run(coro, *, debug=None, loop_factory=None) Execute the :term:`coroutine` *coro* and return the result. @@ -37,9 +37,11 @@ Running an asyncio Program debug mode explicitly. ``None`` is used to respect the global :ref:`asyncio-debug-mode` settings. - This function always creates a new event loop and closes it at - the end. It should be used as a main entry point for asyncio - programs, and should ideally only be called once. + If *loop_factory* is not ``None``, it is used to create a new event loop; + otherwise :func:`asyncio.new_event_loop` is used. The loop is closed at the end. + This function should be used as a main entry point for asyncio programs, + and should ideally only be called once. It is recommended to use + *loop_factory* to configure the event loop instead of policies. The executor is given a timeout duration of 5 minutes to shutdown. If the executor hasn't finished within that duration, a warning is @@ -62,6 +64,10 @@ Running an asyncio Program *debug* is ``None`` by default to respect the global debug mode settings. + .. versionchanged:: 3.12 + + Added *loop_factory* parameter. + Runner context manager ====================== diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst index ce88d70d6607d9..d87e3c042c9977 100644 --- a/Doc/library/asyncio-stream.rst +++ b/Doc/library/asyncio-stream.rst @@ -183,7 +183,8 @@ StreamReader .. class:: StreamReader Represents a reader object that provides APIs to read data - from the IO stream. + from the IO stream. As an :term:`asynchronous iterable`, the + object supports the :keyword:`async for` statement. It is not recommended to instantiate *StreamReader* objects directly; use :func:`open_connection` and :func:`start_server` diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 28d0b21e8180b6..4274638c5e8625 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -175,7 +175,7 @@ their completion. * the :meth:`~asyncio.subprocess.Process.communicate` and :meth:`~asyncio.subprocess.Process.wait` methods don't have a - *timeout* parameter: use the :func:`wait_for` function; + *timeout* parameter: use the :func:`~asyncio.wait_for` function; * the :meth:`Process.wait() ` method is asynchronous, whereas :meth:`subprocess.Popen.wait` method diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index d922f614954fcd..631a5ddc1f6500 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -18,6 +18,10 @@ and Tasks. Coroutines ========== +**Source code:** :source:`Lib/asyncio/coroutines.py` + +---------------------------------------------------- + :term:`Coroutines ` declared with the async/await syntax is the preferred way of writing asyncio applications. For example, the following snippet of code prints "hello", waits 1 second, @@ -230,6 +234,10 @@ is :meth:`loop.run_in_executor`. Creating Tasks ============== +**Source code:** :source:`Lib/asyncio/tasks.py` + +----------------------------------------------- + .. function:: create_task(coro, *, name=None, context=None) Wrap the *coro* :ref:`coroutine ` into a :class:`Task` @@ -631,7 +639,7 @@ Timeouts Change the time the timeout will trigger. - If *when* is `None`, any current deadline will be removed, and the + If *when* is ``None``, any current deadline will be removed, and the context manager will wait indefinitely. If *when* is a float, it is set as the new deadline. @@ -867,17 +875,17 @@ Running in Threads # blocking_io complete at 19:50:54 # finished main at 19:50:54 - Directly calling `blocking_io()` in any coroutine would block the event loop + Directly calling ``blocking_io()`` in any coroutine would block the event loop for its duration, resulting in an additional 1 second of run time. Instead, - by using `asyncio.to_thread()`, we can run it in a separate thread without + by using ``asyncio.to_thread()``, we can run it in a separate thread without blocking the event loop. .. note:: - Due to the :term:`GIL`, `asyncio.to_thread()` can typically only be used + Due to the :term:`GIL`, ``asyncio.to_thread()`` can typically only be used to make IO-bound functions non-blocking. However, for extension modules that release the GIL or alternative Python implementations that don't - have one, `asyncio.to_thread()` can also be used for CPU-bound functions. + have one, ``asyncio.to_thread()`` can also be used for CPU-bound functions. .. versionadded:: 3.9 @@ -1144,10 +1152,8 @@ Task Object .. versionchanged:: 3.9 Added the *msg* parameter. - .. deprecated-removed:: 3.11 3.14 - *msg* parameter is ambiguous when multiple :meth:`cancel` - are called with different cancellation messages. - The argument will be removed. + .. versionchanged:: 3.11 + The ``msg`` parameter is propagated from cancelled task to its awaiter. .. _asyncio_example_task_cancel: diff --git a/Doc/library/asyncore.rst b/Doc/library/asyncore.rst deleted file mode 100644 index a3a4e90d05277e..00000000000000 --- a/Doc/library/asyncore.rst +++ /dev/null @@ -1,365 +0,0 @@ -:mod:`asyncore` --- Asynchronous socket handler -=============================================== - -.. module:: asyncore - :synopsis: A base class for developing asynchronous socket handling - services. - :deprecated: - -.. moduleauthor:: Sam Rushing -.. sectionauthor:: Christopher Petrilli -.. sectionauthor:: Steve Holden -.. heavily adapted from original documentation by Sam Rushing - -**Source code:** :source:`Lib/asyncore.py` - -.. deprecated-removed:: 3.6 3.12 - The :mod:`asyncore` module is deprecated - (see :pep:`PEP 594 <594#asyncore>` for details). - Please use :mod:`asyncio` instead. - --------------- - -.. note:: - - This module exists for backwards compatibility only. For new code we - recommend using :mod:`asyncio`. - -This module provides the basic infrastructure for writing asynchronous socket -service clients and servers. - -.. include:: ../includes/wasm-notavail.rst - -There are only two ways to have a program on a single processor do "more than -one thing at a time." Multi-threaded programming is the simplest and most -popular way to do it, but there is another very different technique, that lets -you have nearly all the advantages of multi-threading, without actually using -multiple threads. It's really only practical if your program is largely I/O -bound. If your program is processor bound, then pre-emptive scheduled threads -are probably what you really need. Network servers are rarely processor -bound, however. - -If your operating system supports the :c:func:`select` system call in its I/O -library (and nearly all do), then you can use it to juggle multiple -communication channels at once; doing other work while your I/O is taking -place in the "background." Although this strategy can seem strange and -complex, especially at first, it is in many ways easier to understand and -control than multi-threaded programming. The :mod:`asyncore` module solves -many of the difficult problems for you, making the task of building -sophisticated high-performance network servers and clients a snap. For -"conversational" applications and protocols the companion :mod:`asynchat` -module is invaluable. - -The basic idea behind both modules is to create one or more network -*channels*, instances of class :class:`asyncore.dispatcher` and -:class:`asynchat.async_chat`. Creating the channels adds them to a global -map, used by the :func:`loop` function if you do not provide it with your own -*map*. - -Once the initial channel(s) is(are) created, calling the :func:`loop` function -activates channel service, which continues until the last channel (including -any that have been added to the map during asynchronous service) is closed. - - -.. function:: loop([timeout[, use_poll[, map[,count]]]]) - - Enter a polling loop that terminates after count passes or all open - channels have been closed. All arguments are optional. The *count* - parameter defaults to ``None``, resulting in the loop terminating only when all - channels have been closed. The *timeout* argument sets the timeout - parameter for the appropriate :func:`~select.select` or :func:`~select.poll` - call, measured in seconds; the default is 30 seconds. The *use_poll* - parameter, if true, indicates that :func:`~select.poll` should be used in - preference to :func:`~select.select` (the default is ``False``). - - The *map* parameter is a dictionary whose items are the channels to watch. - As channels are closed they are deleted from their map. If *map* is - omitted, a global map is used. Channels (instances of - :class:`asyncore.dispatcher`, :class:`asynchat.async_chat` and subclasses - thereof) can freely be mixed in the map. - - -.. class:: dispatcher() - - The :class:`dispatcher` class is a thin wrapper around a low-level socket - object. To make it more useful, it has a few methods for event-handling - which are called from the asynchronous loop. Otherwise, it can be treated - as a normal non-blocking socket object. - - The firing of low-level events at certain times or in certain connection - states tells the asynchronous loop that certain higher-level events have - taken place. For example, if we have asked for a socket to connect to - another host, we know that the connection has been made when the socket - becomes writable for the first time (at this point you know that you may - write to it with the expectation of success). The implied higher-level - events are: - - +----------------------+----------------------------------------+ - | Event | Description | - +======================+========================================+ - | ``handle_connect()`` | Implied by the first read or write | - | | event | - +----------------------+----------------------------------------+ - | ``handle_close()`` | Implied by a read event with no data | - | | available | - +----------------------+----------------------------------------+ - | ``handle_accepted()``| Implied by a read event on a listening | - | | socket | - +----------------------+----------------------------------------+ - - During asynchronous processing, each mapped channel's :meth:`readable` and - :meth:`writable` methods are used to determine whether the channel's socket - should be added to the list of channels :c:func:`select`\ ed or - :c:func:`poll`\ ed for read and write events. - - Thus, the set of channel events is larger than the basic socket events. The - full set of methods that can be overridden in your subclass follows: - - - .. method:: handle_read() - - Called when the asynchronous loop detects that a :meth:`read` call on the - channel's socket will succeed. - - - .. method:: handle_write() - - Called when the asynchronous loop detects that a writable socket can be - written. Often this method will implement the necessary buffering for - performance. For example:: - - def handle_write(self): - sent = self.send(self.buffer) - self.buffer = self.buffer[sent:] - - - .. method:: handle_expt() - - Called when there is out of band (OOB) data for a socket connection. This - will almost never happen, as OOB is tenuously supported and rarely used. - - - .. method:: handle_connect() - - Called when the active opener's socket actually makes a connection. Might - send a "welcome" banner, or initiate a protocol negotiation with the - remote endpoint, for example. - - - .. method:: handle_close() - - Called when the socket is closed. - - - .. method:: handle_error() - - Called when an exception is raised and not otherwise handled. The default - version prints a condensed traceback. - - - .. method:: handle_accept() - - Called on listening channels (passive openers) when a connection can be - established with a new remote endpoint that has issued a :meth:`connect` - call for the local endpoint. Deprecated in version 3.2; use - :meth:`handle_accepted` instead. - - .. deprecated:: 3.2 - - - .. method:: handle_accepted(sock, addr) - - Called on listening channels (passive openers) when a connection has been - established with a new remote endpoint that has issued a :meth:`connect` - call for the local endpoint. *sock* is a *new* socket object usable to - send and receive data on the connection, and *addr* is the address - bound to the socket on the other end of the connection. - - .. versionadded:: 3.2 - - - .. method:: readable() - - Called each time around the asynchronous loop to determine whether a - channel's socket should be added to the list on which read events can - occur. The default method simply returns ``True``, indicating that by - default, all channels will be interested in read events. - - - .. method:: writable() - - Called each time around the asynchronous loop to determine whether a - channel's socket should be added to the list on which write events can - occur. The default method simply returns ``True``, indicating that by - default, all channels will be interested in write events. - - - In addition, each channel delegates or extends many of the socket methods. - Most of these are nearly identical to their socket partners. - - - .. method:: create_socket(family=socket.AF_INET, type=socket.SOCK_STREAM) - - This is identical to the creation of a normal socket, and will use the - same options for creation. Refer to the :mod:`socket` documentation for - information on creating sockets. - - .. versionchanged:: 3.3 - *family* and *type* arguments can be omitted. - - - .. method:: connect(address) - - As with the normal socket object, *address* is a tuple with the first - element the host to connect to, and the second the port number. - - - .. method:: send(data) - - Send *data* to the remote end-point of the socket. - - - .. method:: recv(buffer_size) - - Read at most *buffer_size* bytes from the socket's remote end-point. An - empty bytes object implies that the channel has been closed from the - other end. - - Note that :meth:`recv` may raise :exc:`BlockingIOError` , even though - :func:`select.select` or :func:`select.poll` has reported the socket - ready for reading. - - - .. method:: listen(backlog) - - Listen for connections made to the socket. The *backlog* argument - specifies the maximum number of queued connections and should be at least - 1; the maximum value is system-dependent (usually 5). - - - .. method:: bind(address) - - Bind the socket to *address*. The socket must not already be bound. (The - format of *address* depends on the address family --- refer to the - :mod:`socket` documentation for more information.) To mark - the socket as re-usable (setting the :const:`SO_REUSEADDR` option), call - the :class:`dispatcher` object's :meth:`set_reuse_addr` method. - - - .. method:: accept() - - Accept a connection. The socket must be bound to an address and listening - for connections. The return value can be either ``None`` or a pair - ``(conn, address)`` where *conn* is a *new* socket object usable to send - and receive data on the connection, and *address* is the address bound to - the socket on the other end of the connection. - When ``None`` is returned it means the connection didn't take place, in - which case the server should just ignore this event and keep listening - for further incoming connections. - - - .. method:: close() - - Close the socket. All future operations on the socket object will fail. - The remote end-point will receive no more data (after queued data is - flushed). Sockets are automatically closed when they are - garbage-collected. - - -.. class:: dispatcher_with_send() - - A :class:`dispatcher` subclass which adds simple buffered output capability, - useful for simple clients. For more sophisticated usage use - :class:`asynchat.async_chat`. - -.. class:: file_dispatcher() - - A file_dispatcher takes a file descriptor or :term:`file object` along - with an optional map argument and wraps it for use with the :c:func:`poll` - or :c:func:`loop` functions. If provided a file object or anything with a - :c:func:`fileno` method, that method will be called and passed to the - :class:`file_wrapper` constructor. - - .. availability:: Unix. - -.. class:: file_wrapper() - - A file_wrapper takes an integer file descriptor and calls :func:`os.dup` to - duplicate the handle so that the original handle may be closed independently - of the file_wrapper. This class implements sufficient methods to emulate a - socket for use by the :class:`file_dispatcher` class. - - .. availability:: Unix. - - -.. _asyncore-example-1: - -asyncore Example basic HTTP client ----------------------------------- - -Here is a very basic HTTP client that uses the :class:`dispatcher` class to -implement its socket handling:: - - import asyncore - - class HTTPClient(asyncore.dispatcher): - - def __init__(self, host, path): - asyncore.dispatcher.__init__(self) - self.create_socket() - self.connect( (host, 80) ) - self.buffer = bytes('GET %s HTTP/1.0\r\nHost: %s\r\n\r\n' % - (path, host), 'ascii') - - def handle_connect(self): - pass - - def handle_close(self): - self.close() - - def handle_read(self): - print(self.recv(8192)) - - def writable(self): - return (len(self.buffer) > 0) - - def handle_write(self): - sent = self.send(self.buffer) - self.buffer = self.buffer[sent:] - - - client = HTTPClient('www.python.org', '/') - asyncore.loop() - -.. _asyncore-example-2: - -asyncore Example basic echo server ----------------------------------- - -Here is a basic echo server that uses the :class:`dispatcher` class to accept -connections and dispatches the incoming connections to a handler:: - - import asyncore - - class EchoHandler(asyncore.dispatcher_with_send): - - def handle_read(self): - data = self.recv(8192) - if data: - self.send(data) - - class EchoServer(asyncore.dispatcher): - - def __init__(self, host, port): - asyncore.dispatcher.__init__(self) - self.create_socket() - self.set_reuse_addr() - self.bind((host, port)) - self.listen(5) - - def handle_accepted(self, sock, addr): - print('Incoming connection from %s' % repr(addr)) - handler = EchoHandler(sock) - - server = EchoServer('localhost', 8080) - asyncore.loop() diff --git a/Doc/library/bdb.rst b/Doc/library/bdb.rst index 7b74bbd652be38..d201dc963b5995 100644 --- a/Doc/library/bdb.rst +++ b/Doc/library/bdb.rst @@ -143,7 +143,7 @@ The :mod:`bdb` module also defines two classes: For real file names, the canonical form is an operating-system-dependent, :func:`case-normalized ` :func:`absolute path - `. A *filename* with angle brackets, such as `""` + `. A *filename* with angle brackets, such as ``""`` generated in interactive mode, is returned unchanged. .. method:: reset() diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst index 999892e95f4715..ae5a1598f84b44 100644 --- a/Doc/library/bz2.rst +++ b/Doc/library/bz2.rst @@ -206,7 +206,7 @@ Incremental (de)compression will be set to ``True``. Attempting to decompress data after the end of stream is reached - raises an `EOFError`. Any data found after the end of the + raises an :exc:`EOFError`. Any data found after the end of the stream is ignored and saved in the :attr:`~.unused_data` attribute. .. versionchanged:: 3.5 @@ -303,7 +303,7 @@ Using :class:`BZ2Compressor` for incremental compression: >>> out = out + comp.flush() The example above uses a very "nonrandom" stream of data -(a stream of `b"z"` chunks). Random data tends to compress poorly, +(a stream of ``b"z"`` chunks). Random data tends to compress poorly, while ordered, repetitive data usually yields a high compression ratio. Writing and reading a bzip2-compressed file in binary mode: diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst index 4a665f2254f8a6..8225236350d22e 100644 --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -189,7 +189,8 @@ wider range of codecs when working with binary files: .. note:: - Underlying encoded files are always opened in binary mode. + If *encoding* is not ``None``, then the + underlying encoded files are always opened in binary mode. No automatic conversion of ``'\n'`` is done on reading and writing. The *mode* argument may be any binary mode acceptable to the built-in :func:`open` function; the ``'b'`` is automatically added. diff --git a/Doc/library/concurrent.futures.rst b/Doc/library/concurrent.futures.rst index 95c9e50991423f..8106cc235e5a3c 100644 --- a/Doc/library/concurrent.futures.rst +++ b/Doc/library/concurrent.futures.rst @@ -152,7 +152,7 @@ And:: All threads enqueued to ``ThreadPoolExecutor`` will be joined before the interpreter can exit. Note that the exit handler which does this is - executed *before* any exit handlers added using `atexit`. This means + executed *before* any exit handlers added using ``atexit``. This means exceptions in the main thread must be caught and handled in order to signal threads to exit gracefully. For this reason, it is recommended that ``ThreadPoolExecutor`` not be used for long-running tasks. @@ -411,13 +411,13 @@ The :class:`Future` class encapsulates the asynchronous execution of a callable. tests. If the method returns ``False`` then the :class:`Future` was cancelled, - i.e. :meth:`Future.cancel` was called and returned `True`. Any threads + i.e. :meth:`Future.cancel` was called and returned ``True``. Any threads waiting on the :class:`Future` completing (i.e. through :func:`as_completed` or :func:`wait`) will be woken up. If the method returns ``True`` then the :class:`Future` was not cancelled and has been put in the running state, i.e. calls to - :meth:`Future.running` will return `True`. + :meth:`Future.running` will return ``True``. This method can only be called once and cannot be called after :meth:`Future.set_result` or :meth:`Future.set_exception` have been diff --git a/Doc/library/configparser.rst b/Doc/library/configparser.rst index bf49f2bfbe1b43..a925a3dd4fb9c2 100644 --- a/Doc/library/configparser.rst +++ b/Doc/library/configparser.rst @@ -33,13 +33,17 @@ can be customized by end users easily. .. seealso:: + Module :mod:`tomllib` + TOML is a well-specified format for application configuration files. + It is specifically designed to be an improved version of INI. + Module :mod:`shlex` - Support for creating Unix shell-like mini-languages which can be used as - an alternate format for application configuration files. + Support for creating Unix shell-like mini-languages which can also + be used for application configuration files. Module :mod:`json` - The json module implements a subset of JavaScript syntax which can also - be used for this purpose. + The ``json`` module implements a subset of JavaScript syntax which is + sometimes used for configuration, but does not support comments. .. testsetup:: diff --git a/Doc/library/contextlib.rst b/Doc/library/contextlib.rst index 2d28fb35a9e316..1b55868c3aa62f 100644 --- a/Doc/library/contextlib.rst +++ b/Doc/library/contextlib.rst @@ -66,6 +66,8 @@ Functions and classes provided: # Code to release resource, e.g.: release_resource(resource) + The function can then be used like this:: + >>> with managed_resource(timeout=3600) as resource: ... # Resource is released at the end of this block, ... # even if code in the block raises an exception @@ -140,9 +142,9 @@ Functions and classes provided: finally: print(f'it took {time.monotonic() - now}s to run') - @timeit() - async def main(): - # ... async code ... + @timeit() + async def main(): + # ... async code ... When used as a decorator, a new generator instance is implicitly created on each function call. This allows the otherwise "one-shot" context managers @@ -249,15 +251,15 @@ Functions and classes provided: :ref:`asynchronous context managers `:: async def send_http(session=None): - if not session: - # If no http session, create it with aiohttp - cm = aiohttp.ClientSession() - else: - # Caller is responsible for closing the session - cm = nullcontext(session) + if not session: + # If no http session, create it with aiohttp + cm = aiohttp.ClientSession() + else: + # Caller is responsible for closing the session + cm = nullcontext(session) - async with cm as session: - # Send http requests with session + async with cm as session: + # Send http requests with session .. versionadded:: 3.7 @@ -396,6 +398,8 @@ Functions and classes provided: print('Finishing') return False + The class can then be used like this:: + >>> @mycontext() ... def function(): ... print('The bit in the middle') @@ -466,6 +470,8 @@ Functions and classes provided: print('Finishing') return False + The class can then be used like this:: + >>> @mycontext() ... async def function(): ... print('The bit in the middle') diff --git a/Doc/library/contextvars.rst b/Doc/library/contextvars.rst index be1dd0c9eb57e8..08a7c7d74eab97 100644 --- a/Doc/library/contextvars.rst +++ b/Doc/library/contextvars.rst @@ -110,7 +110,7 @@ Context Variables A read-only property. Set to the value the variable had before the :meth:`ContextVar.set` method call that created the token. - It points to :attr:`Token.MISSING` is the variable was not set + It points to :attr:`Token.MISSING` if the variable was not set before the call. .. attribute:: Token.MISSING diff --git a/Doc/library/copyreg.rst b/Doc/library/copyreg.rst index dc35965be3e40d..866b180f4bc3b8 100644 --- a/Doc/library/copyreg.rst +++ b/Doc/library/copyreg.rst @@ -25,20 +25,17 @@ Such constructors may be factory functions or class instances. hence not valid as a constructor), raises :exc:`TypeError`. -.. function:: pickle(type, function, constructor=None) +.. function:: pickle(type, function, constructor_ob=None) Declares that *function* should be used as a "reduction" function for objects of type *type*. *function* should return either a string or a tuple - containing two or three elements. + containing two or three elements. See the :attr:`~pickle.Pickler.dispatch_table` + for more details on the interface of *function*. - The optional *constructor* parameter, if provided, is a callable object which - can be used to reconstruct the object when called with the tuple of arguments - returned by *function* at pickling time. A :exc:`TypeError` is raised if the - *constructor* is not callable. + The *constructor_ob* parameter is a legacy feature and is now ignored, but if + passed it must be a callable. - See the :mod:`pickle` module for more details on the interface - expected of *function* and *constructor*. Note that the - :attr:`~pickle.Pickler.dispatch_table` attribute of a pickler + Note that the :attr:`~pickle.Pickler.dispatch_table` attribute of a pickler object or subclass of :class:`pickle.Pickler` can also be used for declaring reduction functions. diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst index 0cab95e1500a66..41f11505aa1030 100644 --- a/Doc/library/csv.rst +++ b/Doc/library/csv.rst @@ -420,7 +420,7 @@ Dialects support the following attributes: .. attribute:: Dialect.skipinitialspace - When :const:`True`, whitespace immediately following the *delimiter* is ignored. + When :const:`True`, spaces immediately following the *delimiter* are ignored. The default is :const:`False`. diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 4b6b26c991fd38..71e5545ffe47c6 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -6,6 +6,8 @@ .. moduleauthor:: Thomas Heller +**Source code:** :source:`Lib/ctypes` + -------------- :mod:`ctypes` is a foreign function library for Python. It provides C compatible @@ -359,7 +361,7 @@ from within *IDLE* or *PythonWin*:: >>> printf(b"%f bottles of beer\n", 42.5) Traceback (most recent call last): File "", line 1, in - ArgumentError: argument 2: exceptions.TypeError: Don't know how to convert parameter 2 + ArgumentError: argument 2: TypeError: Don't know how to convert parameter 2 >>> As has been mentioned before, all Python types except integers, strings, and @@ -371,6 +373,26 @@ that they can be converted to the required C data type:: 31 >>> +.. _ctypes-calling-variadic-functions: + +Calling varadic functions +^^^^^^^^^^^^^^^^^^^^^^^^^ + +On a lot of platforms calling variadic functions through ctypes is exactly the same +as calling functions with a fixed number of parameters. On some platforms, and in +particular ARM64 for Apple Platforms, the calling convention for variadic functions +is different than that for regular functions. + +On those platforms it is required to specify the *argtypes* attribute for the +regular, non-variadic, function arguments: + +.. code-block:: python3 + + libc.printf.argtypes = [ctypes.c_char_p] + +Because specifying the attribute does inhibit portability it is adviced to always +specify ``argtypes`` for all variadic functions. + .. _ctypes-calling-functions-with-own-custom-data-types: @@ -422,7 +444,7 @@ prototype for a C function), and tries to convert the arguments to valid types:: >>> printf(b"%d %d %d", 1, 2, 3) Traceback (most recent call last): File "", line 1, in - ArgumentError: argument 2: exceptions.TypeError: wrong type + ArgumentError: argument 2: TypeError: wrong type >>> printf(b"%s %d %f\n", b"X", 2, 3) X 2 3.000000 13 @@ -487,7 +509,7 @@ single character Python bytes object into a C char:: >>> strchr(b"abcdef", b"def") Traceback (most recent call last): File "", line 1, in - ArgumentError: argument 2: exceptions.TypeError: one character string expected + ArgumentError: argument 2: TypeError: one character string expected >>> print(strchr(b"abcdef", b"x")) None >>> strchr(b"abcdef", b"d") @@ -1948,7 +1970,7 @@ Utility functions .. function:: GetLastError() Windows only: Returns the last error code set by Windows in the calling thread. - This function calls the Windows `GetLastError()` function directly, + This function calls the Windows ``GetLastError()`` function directly, it does not return the ctypes-private copy of the error code. .. function:: get_errno() diff --git a/Doc/library/curses.ascii.rst b/Doc/library/curses.ascii.rst index a69dbb2ac06572..e1d1171927c9e2 100644 --- a/Doc/library/curses.ascii.rst +++ b/Doc/library/curses.ascii.rst @@ -7,6 +7,8 @@ .. moduleauthor:: Eric S. Raymond .. sectionauthor:: Eric S. Raymond +**Source code:** :source:`Lib/curses/ascii.py` + -------------- The :mod:`curses.ascii` module supplies name constants for ASCII characters and diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst index bb203c48f19ffb..f9f94b22f69e3d 100644 --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -9,6 +9,8 @@ .. sectionauthor:: Moshe Zadka .. sectionauthor:: Eric Raymond +**Source code:** :source:`Lib/curses` + -------------- The :mod:`curses` module provides an interface to the curses library, the @@ -275,7 +277,7 @@ The module :mod:`curses` defines the following functions: Change the definition of a color, taking the number of the color to be changed followed by three RGB values (for the amounts of red, green, and blue components). The value of *color_number* must be between ``0`` and - `COLORS - 1`. Each of *r*, *g*, *b*, must be a value between ``0`` and + ``COLORS - 1``. Each of *r*, *g*, *b*, must be a value between ``0`` and ``1000``. When :func:`init_color` is used, all occurrences of that color on the screen immediately change to the new definition. This function is a no-op on most terminals; it is active only if :func:`can_change_color` returns ``True``. diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index 4364ac342471eb..847299649d1efd 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -81,7 +81,7 @@ Module contents @dataclass(init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False, match_args=True, kw_only=False, slots=False, weakref_slot=False) class C: - ... + ... The parameters to :func:`dataclass` are: @@ -191,7 +191,7 @@ Module contents .. versionchanged:: 3.11 If a field name is already included in the ``__slots__`` of a base class, it will not be included in the generated ``__slots__`` - to prevent `overriding them `_. + to prevent :ref:`overriding them `. Therefore, do not use ``__slots__`` to retrieve the field names of a dataclass. Use :func:`fields` instead. To be able to determine inherited slots, @@ -482,10 +482,10 @@ Module contents @dataclass class Point: - x: float - _: KW_ONLY - y: float - z: float + x: float + _: KW_ONLY + y: float + z: float p = Point(0, y=1.5, z=2.0) @@ -578,8 +578,8 @@ value is not provided when creating the class:: @dataclass class C: i: int - j: int = None - database: InitVar[DatabaseType] = None + j: int | None = None + database: InitVar[DatabaseType | None] = None def __post_init__(self, database): if self.j is None and database is not None: @@ -773,24 +773,24 @@ default value have the following special behaviors: :: class IntConversionDescriptor: - def __init__(self, *, default): - self._default = default + def __init__(self, *, default): + self._default = default - def __set_name__(self, owner, name): - self._name = "_" + name + def __set_name__(self, owner, name): + self._name = "_" + name - def __get__(self, obj, type): - if obj is None: - return self._default + def __get__(self, obj, type): + if obj is None: + return self._default - return getattr(obj, self._name, self._default) + return getattr(obj, self._name, self._default) - def __set__(self, obj, value): - setattr(obj, self._name, int(value)) + def __set__(self, obj, value): + setattr(obj, self._name, int(value)) @dataclass class InventoryItem: - quantity_on_hand: IntConversionDescriptor = IntConversionDescriptor(default=100) + quantity_on_hand: IntConversionDescriptor = IntConversionDescriptor(default=100) i = InventoryItem() print(i.quantity_on_hand) # 100 diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index c3a66a4674b10a..f7e2bb3f3c6de3 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1769,7 +1769,7 @@ Other constructor: ISO 8601 format, with the following exceptions: 1. Time zone offsets may have fractional seconds. - 2. The leading `T`, normally required in cases where there may be ambiguity between + 2. The leading ``T``, normally required in cases where there may be ambiguity between a date and a time, is not required. 3. Fractional seconds may have any number of digits (anything beyond 6 will be truncated). @@ -2265,7 +2265,7 @@ where historical changes have been made to civil time. two digits of ``offset.hours`` and ``offset.minutes`` respectively. .. versionchanged:: 3.6 - Name generated from ``offset=timedelta(0)`` is now plain `'UTC'`, not + Name generated from ``offset=timedelta(0)`` is now plain ``'UTC'``, not ``'UTC+00:00'``. diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index b7e836308fa8aa..260108136df7f1 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -576,11 +576,11 @@ Decimal objects Alternative constructor that only accepts instances of :class:`float` or :class:`int`. - Note `Decimal.from_float(0.1)` is not the same as `Decimal('0.1')`. + Note ``Decimal.from_float(0.1)`` is not the same as ``Decimal('0.1')``. Since 0.1 is not exactly representable in binary floating point, the value is stored as the nearest representable value which is - `0x1.999999999999ap-4`. That equivalent value in decimal is - `0.1000000000000000055511151231257827021181583404541015625`. + ``0x1.999999999999ap-4``. That equivalent value in decimal is + ``0.1000000000000000055511151231257827021181583404541015625``. .. note:: From Python 3.2 onwards, a :class:`Decimal` instance can also be constructed directly from a :class:`float`. @@ -1209,7 +1209,7 @@ In addition to the three supplied contexts, new contexts can be created with the .. method:: exp(x) - Returns `e ** x`. + Returns ``e ** x``. .. method:: fma(x, y, z) diff --git a/Doc/library/difflib.rst b/Doc/library/difflib.rst index c5a279688a4491..5ee1f4a02c6816 100644 --- a/Doc/library/difflib.rst +++ b/Doc/library/difflib.rst @@ -145,8 +145,6 @@ diffs. For comparing directories and files, see also, the :mod:`filecmp` module. The arguments for this method are the same as those for the :meth:`make_file` method. - :file:`Tools/scripts/diff.py` is a command-line front-end to this class and - contains a good example of its use. .. function:: context_diff(a, b, fromfile='', tofile='', fromfiledate='', tofiledate='', n=3, lineterm='\n') @@ -240,8 +238,6 @@ diffs. For comparing directories and files, see also, the :mod:`filecmp` module. function :func:`IS_CHARACTER_JUNK`, which filters out whitespace characters (a blank or tab; it's a bad idea to include newline in this!). - :file:`Tools/scripts/ndiff.py` is a command-line front-end to this function. - >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(keepends=True), ... 'ore\ntree\nemu\n'.splitlines(keepends=True)) >>> print(''.join(diff), end="") @@ -759,7 +755,12 @@ A command-line interface to difflib ----------------------------------- This example shows how to use difflib to create a ``diff``-like utility. -It is also contained in the Python source distribution, as -:file:`Tools/scripts/diff.py`. -.. literalinclude:: ../../Tools/scripts/diff.py +.. literalinclude:: ../includes/diff.py + +ndiff example +------------- + +This example shows how to use :func:`difflib.ndiff`. + +.. literalinclude:: ../includes/ndiff.py diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index 47e4bf60072170..30bbf95be63417 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -30,11 +30,17 @@ interpreter. Use 2 bytes for each instruction. Previously the number of bytes varied by instruction. + .. versionchanged:: 3.10 + The argument of jump, exception handling and loop instructions is now + the instruction offset rather than the byte offset. + .. versionchanged:: 3.11 Some instructions are accompanied by one or more inline cache entries, which take the form of :opcode:`CACHE` instructions. These instructions are hidden by default, but can be shown by passing ``show_caches=True`` to - any :mod:`dis` utility. + any :mod:`dis` utility. Furthermore, the interpreter now adapts the + bytecode to specialize it for different runtime conditions. The + adaptive bytecode can be shown by passing ``adaptive=True``. Example: Given the function :func:`myfunc`:: @@ -66,8 +72,8 @@ The bytecode analysis API allows pieces of Python code to be wrapped in a :class:`Bytecode` object that provides easy access to details of the compiled code. -.. class:: Bytecode(x, *, first_line=None, current_offset=None, show_caches=False) - +.. class:: Bytecode(x, *, first_line=None, current_offset=None,\ + show_caches=False, adaptive=False) Analyse the bytecode corresponding to a function, generator, asynchronous generator, coroutine, method, string of source code, or a code object (as @@ -86,6 +92,12 @@ code. disassembled code. Setting this means :meth:`.dis` will display a "current instruction" marker against the specified opcode. + If *show_caches* is ``True``, :meth:`.dis` will display inline cache + entries used by the interpreter to specialize the bytecode. + + If *adaptive* is ``True``, :meth:`.dis` will display specialized bytecode + that may be different from the original bytecode. + .. classmethod:: from_traceback(tb, *, show_caches=False) Construct a :class:`Bytecode` instance from the given traceback, setting @@ -113,7 +125,7 @@ code. This can now handle coroutine and asynchronous generator objects. .. versionchanged:: 3.11 - Added the ``show_caches`` parameter. + Added the *show_caches* and *adaptive* parameters. Example: @@ -168,7 +180,7 @@ operation is being performed, so the intermediate analysis object isn't useful: Added *file* parameter. -.. function:: dis(x=None, *, file=None, depth=None, show_caches=False) +.. function:: dis(x=None, *, file=None, depth=None, show_caches=False, adaptive=False) Disassemble the *x* object. *x* can denote either a module, a class, a method, a function, a generator, an asynchronous generator, a coroutine, @@ -189,6 +201,12 @@ operation is being performed, so the intermediate analysis object isn't useful: The maximal depth of recursion is limited by *depth* unless it is ``None``. ``depth=0`` means no recursion. + If *show_caches* is ``True``, this function will display inline cache + entries used by the interpreter to specialize the bytecode. + + If *adaptive* is ``True``, this function will display specialized bytecode + that may be different from the original bytecode. + .. versionchanged:: 3.4 Added *file* parameter. @@ -199,10 +217,10 @@ operation is being performed, so the intermediate analysis object isn't useful: This can now handle coroutine and asynchronous generator objects. .. versionchanged:: 3.11 - Added the ``show_caches`` parameter. + Added the *show_caches* and *adaptive* parameters. -.. function:: distb(tb=None, *, file=None, show_caches=False) +.. function:: distb(tb=None, *, file=None, show_caches=False, adaptive=False) Disassemble the top-of-stack function of a traceback, using the last traceback if none was passed. The instruction causing the exception is @@ -215,11 +233,11 @@ operation is being performed, so the intermediate analysis object isn't useful: Added *file* parameter. .. versionchanged:: 3.11 - Added the ``show_caches`` parameter. + Added the *show_caches* and *adaptive* parameters. -.. function:: disassemble(code, lasti=-1, *, file=None, show_caches=False) - disco(code, lasti=-1, *, file=None, show_caches=False) +.. function:: disassemble(code, lasti=-1, *, file=None, show_caches=False, adaptive=False) + disco(code, lasti=-1, *, file=None, show_caches=False, adaptive=False) Disassemble a code object, indicating the last instruction if *lasti* was provided. The output is divided in the following columns: @@ -242,10 +260,10 @@ operation is being performed, so the intermediate analysis object isn't useful: Added *file* parameter. .. versionchanged:: 3.11 - Added the ``show_caches`` parameter. + Added the *show_caches* and *adaptive* parameters. -.. function:: get_instructions(x, *, first_line=None, show_caches=False) +.. function:: get_instructions(x, *, first_line=None, show_caches=False, adaptive=False) Return an iterator over the instructions in the supplied function, method, source code string or code object. @@ -258,10 +276,12 @@ operation is being performed, so the intermediate analysis object isn't useful: source line information (if any) is taken directly from the disassembled code object. + The *show_caches* and *adaptive* parameters work as they do in :func:`dis`. + .. versionadded:: 3.4 .. versionchanged:: 3.11 - Added the ``show_caches`` parameter. + Added the *show_caches* and *adaptive* parameters. .. function:: findlinestarts(code) @@ -367,7 +387,7 @@ details of bytecode instructions as :class:`Instruction` instances: .. class:: Positions - In case the information is not available, some fields might be `None`. + In case the information is not available, some fields might be ``None``. .. data:: lineno .. data:: end_lineno @@ -393,6 +413,15 @@ The Python compiler currently generates the following bytecode instructions. Removes the top-of-stack (TOS) item. +.. opcode:: END_FOR + + Removes the top two values from the stack. + Equivalent to POP_TOP; POP_TOP. + Used to clean up at the end of loops, hence the name. + + .. versionadded:: 3.12 + + .. opcode:: COPY (i) Push the *i*-th item to the top of the stack. The item is not removed from its @@ -578,6 +607,15 @@ the original TOS1. .. versionadded:: 3.12 +.. opcode:: STOPITERATION_ERROR + + Handles a StopIteration raised in a generator or coroutine. + If TOS is an instance of :exc:`StopIteration`, or :exc:`StopAsyncIteration` + replace it with a :exc:`RuntimeError`. + + .. versionadded:: 3.12 + + .. opcode:: BEFORE_ASYNC_WITH Resolves ``__aenter__`` and ``__aexit__`` from the object on top of the @@ -1068,9 +1106,11 @@ iterations of the loop. TOS is an :term:`iterator`. Call its :meth:`~iterator.__next__` method. If this yields a new value, push it on the stack (leaving the iterator below - it). If the iterator indicates it is exhausted, TOS is popped, and the byte + it). If the iterator indicates it is exhausted then the byte code counter is incremented by *delta*. + .. versionchanged:: 3.12 + Up until 3.11 the iterator was popped when it was exhausted. .. opcode:: LOAD_GLOBAL (namei) diff --git a/Doc/library/email.compat32-message.rst b/Doc/library/email.compat32-message.rst index 4eaa9d588ca35e..5bef155a4af310 100644 --- a/Doc/library/email.compat32-message.rst +++ b/Doc/library/email.compat32-message.rst @@ -298,7 +298,7 @@ Here are the methods of the :class:`Message` class: In a model generated from bytes, any header values that (in contravention of the RFCs) contain non-ASCII bytes will, when retrieved through this interface, be represented as :class:`~email.header.Header` objects with - a charset of `unknown-8bit`. + a charset of ``unknown-8bit``. .. method:: __len__() diff --git a/Doc/library/email.headerregistry.rst b/Doc/library/email.headerregistry.rst index 98527cea43da2a..00a954e0307ea6 100644 --- a/Doc/library/email.headerregistry.rst +++ b/Doc/library/email.headerregistry.rst @@ -153,7 +153,7 @@ headers. specified as ``-0000`` (indicating it is in UTC but contains no information about the source timezone), then :attr:`.datetime` will be a naive :class:`~datetime.datetime`. If a specific timezone offset is - found (including `+0000`), then :attr:`.datetime` will contain an aware + found (including ``+0000``), then :attr:`.datetime` will contain an aware ``datetime`` that uses :class:`datetime.timezone` to record the timezone offset. diff --git a/Doc/library/ensurepip.rst b/Doc/library/ensurepip.rst index 34f45e20bae9d9..d7f89cf96368b5 100644 --- a/Doc/library/ensurepip.rst +++ b/Doc/library/ensurepip.rst @@ -7,6 +7,8 @@ .. versionadded:: 3.4 +**Source code:** :source:`Lib/ensurepip` + -------------- The :mod:`ensurepip` package provides support for bootstrapping the ``pip`` diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index a30a7a4ca34300..74d9e67327629e 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -27,7 +27,8 @@ An enumeration: * is a set of symbolic names (members) bound to unique values -* can be iterated over to return its members in definition order +* can be iterated over to return its canonical (i.e. non-alias) members in + definition order * uses *call* syntax to return members by value * uses *index* syntax to return members by name @@ -92,6 +93,11 @@ Module Contents the bitwise operators without losing their :class:`IntFlag` membership. :class:`IntFlag` members are also subclasses of :class:`int`. (`Notes`_) + :class:`ReprEnum` + + Used by :class:`IntEnum`, :class:`StrEnum`, and :class:`IntFlag` + to keep the :class:`str() ` of the mixed-in type. + :class:`EnumCheck` An enumeration with the values ``CONTINUOUS``, ``NAMED_FLAGS``, and @@ -132,9 +138,20 @@ Module Contents Do not make ``obj`` a member. Can be used as a decorator. + :func:`global_enum` + + Modify the :class:`str() ` and :func:`repr` of an enum + to show its members as belonging to the module instead of its class. + Should only be used if the enum members will be exported to the + module global namespace. + + :func:`show_flag_values` + + Return a list of all power-of-two integers contained in a flag. + .. versionadded:: 3.6 ``Flag``, ``IntFlag``, ``auto`` -.. versionadded:: 3.11 ``StrEnum``, ``EnumCheck``, ``FlagBoundary``, ``property``, ``member``, ``nonmember`` +.. versionadded:: 3.11 ``StrEnum``, ``EnumCheck``, ``ReprEnum``, ``FlagBoundary``, ``property``, ``member``, ``nonmember``, ``global_enum``, ``show_flag_values`` --------------- @@ -226,8 +243,8 @@ Data Types Member values can be anything: :class:`int`, :class:`str`, etc.. If the exact value is unimportant you may use :class:`auto` instances and an - appropriate value will be chosen for you. Care must be taken if you mix - :class:`auto` with other values. + appropriate value will be chosen for you. See :class:`auto` for the + details. .. attribute:: Enum._ignore_ @@ -409,19 +426,23 @@ Data Types in most of the same places that a string can be used. The result of any string operation performed on or with a *StrEnum* member is not part of the enumeration. - .. note:: There are places in the stdlib that check for an exact :class:`str` - instead of a :class:`str` subclass (i.e. ``type(unknown) == str`` - instead of ``isinstance(str, unknown)``), and in those locations you - will need to use ``str(StrEnum.member)``. + .. note:: + + There are places in the stdlib that check for an exact :class:`str` + instead of a :class:`str` subclass (i.e. ``type(unknown) == str`` + instead of ``isinstance(unknown, str)``), and in those locations you + will need to use ``str(StrEnum.member)``. .. note:: Using :class:`auto` with :class:`StrEnum` results in the lower-cased member name as the value. - .. note:: :meth:`__str__` is :func:`str.__str__` to better support the - *replacement of existing constants* use-case. :meth:`__format__` is likewise - :func:`str.__format__` for that same reason. + .. note:: + + :meth:`~object.__str__` is :meth:`!str.__str__` to better support the + *replacement of existing constants* use-case. :meth:`~object.__format__` is likewise + :meth:`!str.__format__` for that same reason. .. versionadded:: 3.11 @@ -453,13 +474,17 @@ Data Types .. method:: __iter__(self): - Returns all contained members:: + Returns all contained non-alias members:: >>> list(Color.RED) [] >>> list(purple) [, ] + .. versionchanged:: 3.11 + + Aliases are no longer returned during iteration. + .. method:: __len__(self): Returns number of members in flag:: @@ -569,10 +594,30 @@ Data Types Using :class:`auto` with :class:`IntFlag` results in integers that are powers of two, starting with ``1``. - .. versionchanged:: 3.11 :meth:`__str__` is now :func:`int.__str__` to - better support the *replacement of existing constants* use-case. - :meth:`__format__` was already :func:`int.__format__` for that same reason. + .. versionchanged:: 3.11 + + :meth:`~object.__str__` is now :meth:`!int.__str__` to better support the + *replacement of existing constants* use-case. :meth:`~object.__format__` was + already :meth:`!int.__format__` for that same reason. + + Inversion of a :class:`!IntFlag` now returns a positive value that is the + union of all flags not in the given flag, rather than a negative value. + This matches the existing :class:`Flag` behavior. +.. class:: ReprEnum + + :class:`!ReprEum` uses the :meth:`repr() ` of :class:`Enum`, + but the :class:`str() ` of the mixed-in data type: + + * :meth:`!int.__str__` for :class:`IntEnum` and :class:`IntFlag` + * :meth:`!str.__str__` for :class:`StrEnum` + + Inherit from :class:`!ReprEnum` to keep the :class:`str() / :func:`format` + of the mixed-in data type instead of using the + :class:`Enum`-default :meth:`str() `. + + + .. versionadded:: 3.11 .. class:: EnumCheck @@ -748,7 +793,16 @@ Utilities and Decorators For *Enum* and *IntEnum* that appropriate value will be the last value plus one; for *Flag* and *IntFlag* it will be the first power-of-two greater than the last value; for *StrEnum* it will be the lower-cased version of the - member's name. + member's name. Care must be taken if mixing *auto()* with manually specified + values. + + *auto* instances are only resolved when at the top level of an assignment: + + * ``FIRST = auto()`` will work (auto() is replaced with ``1``); + * ``SECOND = auto(), -2`` will work (auto is replaced with ``2``, so ``2, -2`` is + used to create the ``SECOND`` enum member; + * ``THREE = [auto(), -3]`` will *not* work (``, -3`` is used to + create the ``THREE`` enum member) ``_generate_next_value_`` can be overridden to customize the values used by *auto*. @@ -808,6 +862,22 @@ Utilities and Decorators .. versionadded:: 3.11 +.. decorator:: global_enum + + A decorator to change the :class:`str() ` and :func:`repr` of an enum + to show its members as belonging to the module instead of its class. + Should only be used when the enum members are exported + to the module global namespace (see :class:`re.RegexFlag` for an example). + + + .. versionadded:: 3.11 + +.. function:: show_flag_values(value) + + Return a list of all power-of-two integers contained in a flag *value*. + + .. versionadded:: 3.11 + --------------- Notes diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index fc856277d67b2e..1217b817b4e843 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -934,21 +934,42 @@ their subgroups based on the types of the contained exceptions. .. method:: derive(excs) - Returns an exception group with the same :attr:`message`, - :attr:`__traceback__`, :attr:`__cause__`, :attr:`__context__` - and :attr:`__notes__` but which wraps the exceptions in ``excs``. + Returns an exception group with the same :attr:`message`, but which + wraps the exceptions in ``excs``. This method is used by :meth:`subgroup` and :meth:`split`. A subclass needs to override it in order to make :meth:`subgroup` and :meth:`split` return instances of the subclass rather - than :exc:`ExceptionGroup`. :: + than :exc:`ExceptionGroup`. + + :meth:`subgroup` and :meth:`split` copy the :attr:`__traceback__`, + :attr:`__cause__`, :attr:`__context__` and :attr:`__notes__` fields from + the original exception group to the one returned by :meth:`derive`, so + these fields do not need to be updated by :meth:`derive`. :: >>> class MyGroup(ExceptionGroup): ... def derive(self, exc): ... return MyGroup(self.message, exc) ... - >>> MyGroup("eg", [ValueError(1), TypeError(2)]).split(TypeError) - (MyGroup('eg', [TypeError(2)]), MyGroup('eg', [ValueError(1)])) + >>> e = MyGroup("eg", [ValueError(1), TypeError(2)]) + >>> e.add_note("a note") + >>> e.__context__ = Exception("context") + >>> e.__cause__ = Exception("cause") + >>> try: + ... raise e + ... except Exception as e: + ... exc = e + ... + >>> match, rest = exc.split(ValueError) + >>> exc, exc.__context__, exc.__cause__, exc.__notes__ + (MyGroup('eg', [ValueError(1), TypeError(2)]), Exception('context'), Exception('cause'), ['a note']) + >>> match, match.__context__, match.__cause__, match.__notes__ + (MyGroup('eg', [ValueError(1)]), Exception('context'), Exception('cause'), ['a note']) + >>> rest, rest.__context__, rest.__cause__, rest.__notes__ + (MyGroup('eg', [TypeError(2)]), Exception('context'), Exception('cause'), ['a note']) + >>> exc.__traceback__ is match.__traceback__ is rest.__traceback__ + True + Note that :exc:`BaseExceptionGroup` defines :meth:`__new__`, so subclasses that need a different constructor signature need to @@ -965,6 +986,10 @@ their subgroups based on the types of the contained exceptions. def derive(self, excs): return Errors(excs, self.exit_code) + Like :exc:`ExceptionGroup`, any subclass of :exc:`BaseExceptionGroup` which + is also a subclass of :exc:`Exception` can only wrap instances of + :exc:`Exception`. + .. versionadded:: 3.11 diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst index 3751800b3b1118..c46d88b2297aa1 100644 --- a/Doc/library/fractions.rst +++ b/Doc/library/fractions.rst @@ -99,7 +99,7 @@ another rational number, or from a string. ``typing.SupportsInt`` instance checks. .. versionchanged:: 3.12 - Space is allowed around the slash for string inputs: `Fraction('2 / 3')`. + Space is allowed around the slash for string inputs: ``Fraction('2 / 3')``. .. attribute:: numerator diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 26ee302d2eabc3..110e7e5d7fb9a7 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -82,7 +82,8 @@ are always available. They are listed here in alphabetical order. return True -.. awaitablefunction:: anext(async_iterator[, default]) +.. awaitablefunction:: anext(async_iterator) + anext(async_iterator, default) When awaited, return the next item from the given :term:`asynchronous iterator`, or *default* if given and the iterator is exhausted. @@ -139,7 +140,7 @@ are always available. They are listed here in alphabetical order. See also :func:`format` for more information. -.. class:: bool([x]) +.. class:: bool(x=False) Return a Boolean value, i.e. one of ``True`` or ``False``. *x* is converted using the standard :ref:`truth testing procedure `. If *x* is false @@ -172,7 +173,9 @@ are always available. They are listed here in alphabetical order. .. versionadded:: 3.7 .. _func-bytearray: -.. class:: bytearray([source[, encoding[, errors]]]) +.. class:: bytearray(source=b'') + bytearray(source, encoding) + bytearray(source, encoding, errors) :noindex: Return a new array of bytes. The :class:`bytearray` class is a mutable @@ -202,7 +205,9 @@ are always available. They are listed here in alphabetical order. .. _func-bytes: -.. class:: bytes([source[, encoding[, errors]]]) +.. class:: bytes(source=b'') + bytes(source, encoding) + bytes(source, encoding, errors) :noindex: Return a new "bytes" object which is an immutable sequence of integers in @@ -358,7 +363,8 @@ are always available. They are listed here in alphabetical order. support for top-level ``await``, ``async for``, and ``async with``. -.. class:: complex([real[, imag]]) +.. class:: complex(real=0, imag=0) + complex(string) Return a complex number with the value *real* + *imag*\*1j or convert a string or number to a complex number. If the first parameter is a string, it will @@ -413,7 +419,8 @@ are always available. They are listed here in alphabetical order. :class:`tuple` classes, as well as the :mod:`collections` module. -.. function:: dir([object]) +.. function:: dir() + dir(object) Without arguments, return the list of names in the current local scope. With an argument, attempt to return a list of valid attributes for that object. @@ -505,7 +512,7 @@ are always available. They are listed here in alphabetical order. .. _func-eval: -.. function:: eval(expression[, globals[, locals]]) +.. function:: eval(expression, /, globals=None, locals=None) The arguments are a string and optional globals and locals. If provided, *globals* must be a dictionary. If provided, *locals* can be any mapping @@ -556,7 +563,7 @@ are always available. They are listed here in alphabetical order. .. index:: builtin: exec -.. function:: exec(object[, globals[, locals]], *, closure=None) +.. function:: exec(object, globals=None, locals=None, /, *, closure=None) This function supports dynamic execution of Python code. *object* must be either a string or a code object. If it is a string, the string is parsed as @@ -629,7 +636,7 @@ are always available. They are listed here in alphabetical order. elements of *iterable* for which *function* returns false. -.. class:: float([x]) +.. class:: float(x=0.0) .. index:: single: NaN @@ -697,7 +704,7 @@ are always available. They are listed here in alphabetical order. single: __format__ single: string; format() (built-in function) -.. function:: format(value[, format_spec]) +.. function:: format(value, format_spec="") Convert a *value* to a "formatted" representation, as controlled by *format_spec*. The interpretation of *format_spec* will depend on the type @@ -720,7 +727,7 @@ are always available. They are listed here in alphabetical order. .. _func-frozenset: -.. class:: frozenset([iterable]) +.. class:: frozenset(iterable=set()) :noindex: Return a new :class:`frozenset` object, optionally with elements taken from @@ -732,7 +739,8 @@ are always available. They are listed here in alphabetical order. module. -.. function:: getattr(object, name[, default]) +.. function:: getattr(object, name) + getattr(object, name, default) Return the value of the named attribute of *object*. *name* must be a string. If the string is the name of one of the object's attributes, the result is the @@ -777,7 +785,8 @@ are always available. They are listed here in alphabetical order. truncates the return value based on the bit width of the host machine. See :meth:`__hash__` for details. -.. function:: help([object]) +.. function:: help() + help(request) Invoke the built-in help system. (This function is intended for interactive use.) If no argument is given, the interactive help system starts on the @@ -842,7 +851,8 @@ are always available. They are listed here in alphabetical order. .. audit-event:: builtins.id id id -.. function:: input([prompt]) +.. function:: input() + input(prompt) If the *prompt* argument is present, it is written to standard output without a trailing newline. The function then reads a line from input, converts it @@ -868,7 +878,7 @@ are always available. They are listed here in alphabetical order. with the result after successfully reading input. -.. class:: int([x]) +.. class:: int(x=0) int(x, base=10) Return an integer object constructed from a number or string *x*, or return @@ -951,7 +961,8 @@ are always available. They are listed here in alphabetical order. *classinfo* can be a :ref:`types-union`. -.. function:: iter(object[, sentinel]) +.. function:: iter(object) + iter(object, sentinel) Return an :term:`iterator` object. The first argument is interpreted very differently depending on the presence of the second argument. Without a @@ -991,7 +1002,8 @@ are always available. They are listed here in alphabetical order. .. _func-list: -.. class:: list([iterable]) +.. class:: list() + list(iterable) :noindex: Rather than being a function, :class:`list` is actually a mutable @@ -1009,18 +1021,19 @@ are always available. They are listed here in alphabetical order. The contents of this dictionary should not be modified; changes may not affect the values of local and free variables used by the interpreter. -.. function:: map(function, iterable, ...) +.. function:: map(function, iterable, *iterables) Return an iterator that applies *function* to every item of *iterable*, - yielding the results. If additional *iterable* arguments are passed, + yielding the results. If additional *iterables* arguments are passed, *function* must take that many arguments and is applied to the items from all iterables in parallel. With multiple iterables, the iterator stops when the shortest iterable is exhausted. For cases where the function inputs are already arranged into argument tuples, see :func:`itertools.starmap`\. -.. function:: max(iterable, *[, key, default]) - max(arg1, arg2, *args[, key]) +.. function:: max(iterable, *, key=None) + max(iterable, *, default, key=None) + max(arg1, arg2, *args, key=None) Return the largest item in an iterable or the largest of two or more arguments. @@ -1056,8 +1069,9 @@ are always available. They are listed here in alphabetical order. :ref:`typememoryview` for more information. -.. function:: min(iterable, *[, key, default]) - min(arg1, arg2, *args[, key]) +.. function:: min(iterable, *, key=None) + min(iterable, *, default, key=None) + min(arg1, arg2, *args, key=None) Return the smallest item in an iterable or the smallest of two or more arguments. @@ -1085,7 +1099,8 @@ are always available. They are listed here in alphabetical order. The *key* can be ``None``. -.. function:: next(iterator[, default]) +.. function:: next(iterator) + next(iterator, default) Retrieve the next item from the :term:`iterator` by calling its :meth:`~iterator.__next__` method. If *default* is given, it is returned @@ -1254,8 +1269,8 @@ are always available. They are listed here in alphabetical order. .. _open-newline-parameter: - *newline* controls how :term:`universal newlines` mode works (it only - applies to text mode). It can be ``None``, ``''``, ``'\n'``, ``'\r'``, and + *newline* determines how to parse newline characters from the stream. + It can be ``None``, ``''``, ``'\n'``, ``'\r'``, and ``'\r\n'``. It works as follows: * When reading input from the stream, if *newline* is ``None``, universal @@ -1364,7 +1379,7 @@ are always available. They are listed here in alphabetical order. returns ``8364``. This is the inverse of :func:`chr`. -.. function:: pow(base, exp[, mod]) +.. function:: pow(base, exp, mod=None) Return *base* to the power *exp*; if *mod* is present, return *base* to the power *exp*, modulo *mod* (computed more efficiently than @@ -1404,7 +1419,7 @@ are always available. They are listed here in alphabetical order. supported. -.. function:: print(*objects, sep=' ', end='\n', file=sys.stdout, flush=False) +.. function:: print(*objects, sep=' ', end='\n', file=None, flush=False) Print *objects* to the text stream *file*, separated by *sep* and followed by *end*. *sep*, *end*, *file*, and *flush*, if present, must be given as keyword @@ -1508,7 +1523,7 @@ are always available. They are listed here in alphabetical order. .. _func-range: .. class:: range(stop) - range(start, stop[, step]) + range(start, stop, step=1) :noindex: Rather than being a function, :class:`range` is actually an immutable @@ -1536,7 +1551,7 @@ are always available. They are listed here in alphabetical order. arguments starting at ``0``). -.. function:: round(number[, ndigits]) +.. function:: round(number, ndigits=None) Return *number* rounded to *ndigits* precision after the decimal point. If *ndigits* is omitted or is ``None``, it returns the @@ -1564,7 +1579,8 @@ are always available. They are listed here in alphabetical order. .. _func-set: -.. class:: set([iterable]) +.. class:: set() + set(iterable) :noindex: Return a new :class:`set` object, optionally with elements taken from @@ -1599,7 +1615,7 @@ are always available. They are listed here in alphabetical order. .. class:: slice(stop) - slice(start, stop[, step]) + slice(start, stop, step=1) Return a :term:`slice` object representing the set of indices specified by ``range(start, stop, step)``. The *start* and *step* arguments default to @@ -1716,21 +1732,22 @@ are always available. They are listed here in alphabetical order. .. versionchanged:: 3.8 The *start* parameter can be specified as a keyword argument. -.. class:: super([type[, object-or-type]]) +.. class:: super() + super(type, object_or_type=None) Return a proxy object that delegates method calls to a parent or sibling class of *type*. This is useful for accessing inherited methods that have been overridden in a class. - The *object-or-type* determines the :term:`method resolution order` + The *object_or_type* determines the :term:`method resolution order` to be searched. The search starts from the class right after the *type*. - For example, if :attr:`~class.__mro__` of *object-or-type* is + For example, if :attr:`~class.__mro__` of *object_or_type* is ``D -> B -> C -> A -> object`` and the value of *type* is ``B``, then :func:`super` searches ``C -> A -> object``. - The :attr:`~class.__mro__` attribute of the *object-or-type* lists the method + The :attr:`~class.__mro__` attribute of the *object_or_type* lists the method resolution search order used by both :func:`getattr` and :func:`super`. The attribute is dynamic and can change whenever the inheritance hierarchy is updated. @@ -1786,7 +1803,8 @@ are always available. They are listed here in alphabetical order. .. _func-tuple: -.. class:: tuple([iterable]) +.. class:: tuple() + tuple(iterable) :noindex: Rather than being a function, :class:`tuple` is actually an immutable @@ -1834,7 +1852,8 @@ are always available. They are listed here in alphabetical order. Subclasses of :class:`type` which don't override ``type.__new__`` may no longer use the one-argument form to get the type of an object. -.. function:: vars([object]) +.. function:: vars() + vars(object) Return the :attr:`~object.__dict__` attribute for a module, class, instance, or any other object with a :attr:`~object.__dict__` attribute. diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 943a05c39d6840..2f0a9bd8be8815 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -150,7 +150,7 @@ The :mod:`functools` module defines the following functions: arguments to the function must be hashable. Distinct argument patterns may be considered to be distinct calls with - separate cache entries. For example, `f(a=1, b=2)` and `f(b=2, a=1)` + separate cache entries. For example, ``f(a=1, b=2)`` and ``f(b=2, a=1)`` differ in their keyword argument order and may have two separate cache entries. @@ -197,7 +197,7 @@ The :mod:`functools` module defines the following functions: The cache keeps references to the arguments and return values until they age out of the cache or until the cache is cleared. - If a method is cached, the `self` instance argument is included in the + If a method is cached, the ``self`` instance argument is included in the cache. See :ref:`faq-cache-method-calls` An `LRU (least recently used) cache diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst index 386541acf510ac..8e47312fe77bf5 100644 --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -426,7 +426,7 @@ Constructor functions also accept the following tree hashing parameters: BLAKE2s, 0 in sequential mode). * *last_node*: boolean indicating whether the processed node is the last - one (`False` for sequential mode). + one (``False`` for sequential mode). .. figure:: hashlib-blake2-tree.png :alt: Explanation of tree mode parameters. diff --git a/Doc/library/http.cookiejar.rst b/Doc/library/http.cookiejar.rst index eb31315438f66a..87ef156a0bed57 100644 --- a/Doc/library/http.cookiejar.rst +++ b/Doc/library/http.cookiejar.rst @@ -61,7 +61,7 @@ The following classes are provided: responsible for storing and retrieving cookies from a file or database. -.. class:: FileCookieJar(filename, delayload=None, policy=None) +.. class:: FileCookieJar(filename=None, delayload=None, policy=None) *policy* is an object implementing the :class:`CookiePolicy` interface. For the other arguments, see the documentation for the corresponding attributes. @@ -71,6 +71,8 @@ The following classes are provided: :meth:`load` or :meth:`revert` method is called. Subclasses of this class are documented in section :ref:`file-cookie-jar-classes`. + This should not be initialized directly – use its subclasses below instead. + .. versionchanged:: 3.8 The filename parameter supports a :term:`path-like object`. @@ -317,7 +319,7 @@ FileCookieJar subclasses and co-operation with web browsers The following :class:`CookieJar` subclasses are provided for reading and writing. -.. class:: MozillaCookieJar(filename, delayload=None, policy=None) +.. class:: MozillaCookieJar(filename=None, delayload=None, policy=None) A :class:`FileCookieJar` that can load from and save cookies to disk in the Mozilla ``cookies.txt`` file format (which is also used by curl and the Lynx @@ -338,7 +340,7 @@ writing. Mozilla. -.. class:: LWPCookieJar(filename, delayload=None, policy=None) +.. class:: LWPCookieJar(filename=None, delayload=None, policy=None) A :class:`FileCookieJar` that can load from and save cookies to disk in format compatible with the libwww-perl library's ``Set-Cookie3`` file format. This is diff --git a/Doc/library/http.rst b/Doc/library/http.rst index 9d002dc33ccb8d..5e1912716e5319 100644 --- a/Doc/library/http.rst +++ b/Doc/library/http.rst @@ -171,16 +171,25 @@ Property Indicates that Details Usage:: >>> from http import HTTPMethod - >>> HTTMethod.GET - HTTMethod.GET - >>> HTTMethod.GET == 'GET' + >>> + >>> HTTPMethod.GET + + >>> HTTPMethod.GET == 'GET' True - >>> HTTMethod.GET.value + >>> HTTPMethod.GET.value 'GET' - >>> HTTMethod.GET.description - 'Transfer a current representation of the target resource.' + >>> HTTPMethod.GET.description + 'Retrieve the target.' >>> list(HTTPMethod) - [HTTPMethod.GET, HTTPMethod.HEAD, ...] + [, + , + , + , + , + , + , + , + ] .. _http-methods: diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst index 48f952daae12f5..81b6bf5373b495 100644 --- a/Doc/library/http.server.rst +++ b/Doc/library/http.server.rst @@ -392,8 +392,8 @@ provides three different variants: contents of the file are output. If the file's MIME type starts with ``text/`` the file is opened in text mode; otherwise binary mode is used. - For example usage, see the implementation of the :func:`test` function - invocation in the :mod:`http.server` module. + For example usage, see the implementation of the ``test`` function + in :source:`Lib/http/server.py`. .. versionchanged:: 3.7 Support of the ``'If-Modified-Since'`` header. diff --git a/Doc/library/imghdr.rst b/Doc/library/imghdr.rst index 318fe650776d2a..630fd7019f94de 100644 --- a/Doc/library/imghdr.rst +++ b/Doc/library/imghdr.rst @@ -21,8 +21,8 @@ The :mod:`imghdr` module defines the following function: .. function:: what(file, h=None) - Tests the image data contained in the file named by *file*, and returns a - string describing the image type. If optional *h* is provided, the *file* + Test the image data contained in the file named *file* and return a + string describing the image type. If *h* is provided, the *file* argument is ignored and *h* is assumed to contain the byte stream to test. .. versionchanged:: 3.6 diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst index a1af7a754ba4ef..988d1a317f5960 100644 --- a/Doc/library/importlib.metadata.rst +++ b/Doc/library/importlib.metadata.rst @@ -13,21 +13,39 @@ **Source code:** :source:`Lib/importlib/metadata/__init__.py` -``importlib.metadata`` is a library that provides access to installed -package metadata, such as its entry points or its -top-level name. Built in part on Python's import system, this library +``importlib_metadata`` is a library that provides access to +the metadata of an installed `Distribution Package `_, +such as its entry points +or its top-level names (`Import Package `_\s, modules, if any). +Built in part on Python's import system, this library intends to replace similar functionality in the `entry point API`_ and `metadata API`_ of ``pkg_resources``. Along with :mod:`importlib.resources`, this package can eliminate the need to use the older and less efficient ``pkg_resources`` package. -By "installed package" we generally mean a third-party package installed into -Python's ``site-packages`` directory via tools such as `pip -`_. Specifically, -it means a package with either a discoverable ``dist-info`` or ``egg-info`` -directory, and metadata defined by :pep:`566` or its older specifications. -By default, package metadata can live on the file system or in zip archives on +``importlib_metadata`` operates on third-party *distribution packages* +installed into Python's ``site-packages`` directory via tools such as +`pip `_. +Specifically, it works with distributions with discoverable +``dist-info`` or ``egg-info`` directories, +and metadata defined by the `Core metadata specifications `_. + +.. important:: + + These are *not* necessarily equivalent to or correspond 1:1 with + the top-level *import package* names + that can be imported inside Python code. + One *distribution package* can contain multiple *import packages* + (and single modules), + and one top-level *import package* + may map to multiple *distribution packages* + if it is a namespace package. + You can use :ref:`package_distributions() ` + to get a mapping between them. + +By default, distribution metadata can live on the file system +or in zip archives on :data:`sys.path`. Through an extension mechanism, the metadata can live almost anywhere. @@ -37,12 +55,19 @@ anywhere. https://importlib-metadata.readthedocs.io/ The documentation for ``importlib_metadata``, which supplies a backport of ``importlib.metadata``. + This includes an `API reference + `__ + for this module's classes and functions, + as well as a `migration guide + `__ + for existing users of ``pkg_resources``. Overview ======== -Let's say you wanted to get the version string for a package you've installed +Let's say you wanted to get the version string for a +`Distribution Package `_ you've installed using ``pip``. We start by creating a virtual environment and installing something into it: @@ -151,11 +176,10 @@ for more information on entry points, their definition, and usage. The "selectable" entry points were introduced in ``importlib_metadata`` 3.6 and Python 3.10. Prior to those changes, ``entry_points`` accepted no parameters and always returned a dictionary of entry points, keyed -by group. For compatibility, if no parameters are passed to entry_points, -a ``SelectableGroups`` object is returned, implementing that dict -interface. In the future, calling ``entry_points`` with no parameters -will return an ``EntryPoints`` object. Users should rely on the selection -interface to retrieve entry points by group. +by group. With ``importlib_metadata`` 5.0 and Python 3.12, +``entry_points`` always returns an ``EntryPoints`` object. See +`backports.entry_points_selectable `_ +for compatibility options. .. _metadata: @@ -163,7 +187,8 @@ interface to retrieve entry points by group. Distribution metadata --------------------- -Every distribution includes some metadata, which you can extract using the +Every `Distribution Package `_ includes some metadata, +which you can extract using the ``metadata()`` function:: >>> wheel_metadata = metadata('wheel') # doctest: +SKIP @@ -186,7 +211,7 @@ all the metadata in a JSON-compatible form per :PEP:`566`:: The actual type of the object returned by ``metadata()`` is an implementation detail and should be accessed only through the interface described by the - `PackageMetadata protocol `. + `PackageMetadata protocol `_. .. versionchanged:: 3.10 The ``Description`` is now included in the metadata when presented @@ -201,7 +226,8 @@ all the metadata in a JSON-compatible form per :PEP:`566`:: Distribution versions --------------------- -The ``version()`` function is the quickest way to get a distribution's version +The ``version()`` function is the quickest way to get a +`Distribution Package `_'s version number, as a string:: >>> version('wheel') # doctest: +SKIP @@ -214,7 +240,8 @@ Distribution files ------------------ You can also get the full set of files contained within a distribution. The -``files()`` function takes a distribution package name and returns all of the +``files()`` function takes a `Distribution Package `_ name +and returns all of the files installed by this distribution. Each file object returned is a ``PackagePath``, a :class:`pathlib.PurePath` derived object with additional ``dist``, ``size``, and ``hash`` properties as indicated by the metadata. For example:: @@ -259,19 +286,24 @@ distribution is not known to have the metadata present. Distribution requirements ------------------------- -To get the full set of requirements for a distribution, use the ``requires()`` +To get the full set of requirements for a `Distribution Package `_, +use the ``requires()`` function:: >>> requires('wheel') # doctest: +SKIP ["pytest (>=3.0.0) ; extra == 'test'", "pytest-cov ; extra == 'test'"] -Package distributions ---------------------- +.. _package-distributions: +.. _import-distribution-package-mapping: + +Mapping import to distribution packages +--------------------------------------- -A convenience method to resolve the distribution or -distributions (in the case of a namespace package) for top-level -Python packages or modules:: +A convenience method to resolve the `Distribution Package `_ +name (or names, in the case of a namespace package) +that provide each importable top-level +Python module or `Import Package `_:: >>> packages_distributions() {'importlib_metadata': ['importlib-metadata'], 'yaml': ['PyYAML'], 'jaraco': ['jaraco.classes', 'jaraco.functools'], ...} @@ -285,7 +317,8 @@ Distributions While the above API is the most common and convenient usage, you can get all of that information from the ``Distribution`` class. A ``Distribution`` is an -abstract object that represents the metadata for a Python package. You can +abstract object that represents the metadata for +a Python `Distribution Package `_. You can get the ``Distribution`` instance:: >>> from importlib.metadata import distribution # doctest: +SKIP @@ -305,14 +338,16 @@ instance:: >>> dist.metadata['License'] # doctest: +SKIP 'MIT' -The full set of available metadata is not described here. See :pep:`566` -for additional details. +The full set of available metadata is not described here. +See the `Core metadata specifications `_ for additional details. Distribution Discovery ====================== -By default, this package provides built-in support for discovery of metadata for file system and zip file packages. This metadata finder search defaults to ``sys.path``, but varies slightly in how it interprets those values from how other import machinery does. In particular: +By default, this package provides built-in support for discovery of metadata +for file system and zip file `Distribution Package `_\s. +This metadata finder search defaults to ``sys.path``, but varies slightly in how it interprets those values from how other import machinery does. In particular: - ``importlib.metadata`` does not honor :class:`bytes` objects on ``sys.path``. - ``importlib.metadata`` will incidentally honor :py:class:`pathlib.Path` objects on ``sys.path`` even though such values will be ignored for imports. @@ -321,15 +356,18 @@ By default, this package provides built-in support for discovery of metadata for Extending the search algorithm ============================== -Because package metadata is not available through :data:`sys.path` searches, or -package loaders directly, the metadata for a package is found through import +Because `Distribution Package `_ metadata +is not available through :data:`sys.path` searches, or +package loaders directly, +the metadata for a distribution is found through import system :ref:`finders `. To find a distribution package's metadata, ``importlib.metadata`` queries the list of :term:`meta path finders ` on :data:`sys.meta_path`. -The default ``PathFinder`` for Python includes a hook that calls into -``importlib.metadata.MetadataPathFinder`` for finding distributions -loaded from typical file-system-based paths. +By default ``importlib_metadata`` installs a finder for distribution packages +found on the file system. +This finder doesn't actually find any *distributions*, +but it can find their metadata. The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the interface expected of finders by Python's import system. @@ -358,4 +396,3 @@ a custom finder, return instances of this derived ``Distribution`` in the .. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points .. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api -.. _`importlib_resources`: https://importlib-resources.readthedocs.io/en/latest/index.html diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index a7c067c06e8ec2..3fc1531c0cdf19 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -443,7 +443,7 @@ ABC hierarchy:: from the import. If the loader inserted a module and the load fails, it must be removed by the loader from :data:`sys.modules`; modules already in :data:`sys.modules` before the loader began execution should be left - alone (see :func:`importlib.util.module_for_loader`). + alone. The loader should set several attributes on the module (note that some of these attributes can change when a module is @@ -1326,58 +1326,6 @@ an :term:`importer`. .. versionadded:: 3.5 -.. decorator:: module_for_loader - - A :term:`decorator` for :meth:`importlib.abc.Loader.load_module` - to handle selecting the proper - module object to load with. The decorated method is expected to have a call - signature taking two positional arguments - (e.g. ``load_module(self, module)``) for which the second argument - will be the module **object** to be used by the loader. - Note that the decorator will not work on static methods because of the - assumption of two arguments. - - The decorated method will take in the **name** of the module to be loaded - as expected for a :term:`loader`. If the module is not found in - :data:`sys.modules` then a new one is constructed. Regardless of where the - module came from, :attr:`__loader__` set to **self** and :attr:`__package__` - is set based on what :meth:`importlib.abc.InspectLoader.is_package` returns - (if available). These attributes are set unconditionally to support - reloading. - - If an exception is raised by the decorated method and a module was added to - :data:`sys.modules`, then the module will be removed to prevent a partially - initialized module from being in left in :data:`sys.modules`. If the module - was already in :data:`sys.modules` then it is left alone. - - .. versionchanged:: 3.3 - :attr:`__loader__` and :attr:`__package__` are automatically set - (when possible). - - .. versionchanged:: 3.4 - Set :attr:`__name__`, :attr:`__loader__` :attr:`__package__` - unconditionally to support reloading. - - .. deprecated:: 3.4 - The import machinery now directly performs all the functionality - provided by this function. - -.. decorator:: set_loader - - A :term:`decorator` for :meth:`importlib.abc.Loader.load_module` - to set the :attr:`__loader__` - attribute on the returned module. If the attribute is already set the - decorator does nothing. It is assumed that the first positional argument to - the wrapped method (i.e. ``self``) is what :attr:`__loader__` should be set - to. - - .. versionchanged:: 3.4 - Set ``__loader__`` if set to ``None``, as if the attribute does not - exist. - - .. deprecated:: 3.4 - The import machinery takes care of this automatically. - .. function:: spec_from_loader(name, loader, *, origin=None, is_package=None) A factory function for creating a :class:`~importlib.machinery.ModuleSpec` diff --git a/Doc/library/index.rst b/Doc/library/index.rst index 7d2002b37df12c..d064b680f9aaa4 100644 --- a/Doc/library/index.rst +++ b/Doc/library/index.rst @@ -27,8 +27,8 @@ as a collection of packages, so it may be necessary to use the packaging tools provided with the operating system to obtain some or all of the optional components. -In addition to the standard library, there is a growing collection of -several thousand components (from individual programs and modules to +In addition to the standard library, there is an active collection of +hundreds of thousands of components (from individual programs and modules to packages and entire application development frameworks), available from the `Python Package Index `_. diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 700cd9122cd34c..9cb7a6f94e49cd 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -32,7 +32,7 @@ The :func:`getmembers` function retrieves the members of an object such as a class or module. The functions whose names begin with "is" are mainly provided as convenient choices for the second argument to :func:`getmembers`. They also help you determine when you can expect to find the following special -attributes: +attributes (see :ref:`import-mod-attrs` for module attributes): .. this function name is too big to fit in the ascii-art table below .. |coroutine-origin-link| replace:: :func:`sys.set_coroutine_origin_tracking_depth` @@ -40,11 +40,6 @@ attributes: +-----------+-------------------+---------------------------+ | Type | Attribute | Description | +===========+===================+===========================+ -| module | __doc__ | documentation string | -+-----------+-------------------+---------------------------+ -| | __file__ | filename (missing for | -| | | built-in modules) | -+-----------+-------------------+---------------------------+ | class | __doc__ | documentation string | +-----------+-------------------+---------------------------+ | | __name__ | name with which this | @@ -434,8 +429,10 @@ attributes: Return ``True`` if the type of object is a :class:`~types.MethodWrapperType`. - These are instances of :class:`~types.MethodWrapperType`, such as :meth:`~object().__str__`, - :meth:`~object().__eq__` and :meth:`~object().__repr__` + These are instances of :class:`~types.MethodWrapperType`, such as :meth:`~object.__str__`, + :meth:`~object.__eq__` and :meth:`~object.__repr__`. + + .. versionadded:: 3.11 .. function:: isroutine(object) @@ -1206,12 +1203,13 @@ is considered deprecated and may be removed in the future. number, start column offset, and end column offset associated with the instruction being executed by the frame this record corresponds to. -.. versionchanged:: 3.5 - Return a named tuple instead of a tuple. + .. versionchanged:: 3.5 + Return a :term:`named tuple` instead of a :class:`tuple`. + + .. versionchanged:: 3.11 + :class:`!FrameInfo` is now a class instance + (that is backwards compatible with the previous :term:`named tuple`). -.. versionchanged:: 3.11 - Changed the return object from a named tuple to a regular object (that is - backwards compatible with the previous named tuple). .. class:: Traceback @@ -1245,6 +1243,11 @@ is considered deprecated and may be removed in the future. the instruction being executed by the frame this traceback corresponds to. + .. versionchanged:: 3.11 + :class:`!Traceback` is now a class instance + (that is backwards compatible with the previous :term:`named tuple`). + + .. note:: Keeping references to frame objects, as found in the first element of the frame diff --git a/Doc/library/intro.rst b/Doc/library/intro.rst index 1020924038e91f..5a4c9b8b16ab3b 100644 --- a/Doc/library/intro.rst +++ b/Doc/library/intro.rst @@ -114,7 +114,7 @@ DOM APIs as well as limited networking capabilities with JavaScript's .. _WebAssembly: https://webassembly.org/ .. _Emscripten: https://emscripten.org/ -.. _Emscripten Networking: https://emscripten.org/docs/porting/networking.html> +.. _Emscripten Networking: https://emscripten.org/docs/porting/networking.html .. _WASI: https://wasi.dev/ .. _wasmtime: https://wasmtime.dev/ .. _Pyodide: https://pyodide.org/ diff --git a/Doc/library/io.rst b/Doc/library/io.rst index 8fd6b3537019aa..0968509fbafec2 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -1055,10 +1055,10 @@ Text I/O The initial value of the buffer can be set by providing *initial_value*. If newline translation is enabled, newlines will be encoded as if by :meth:`~TextIOBase.write`. The stream is positioned at the start of the - buffer which emulates opening an existing file in a `w+` mode, making it + buffer which emulates opening an existing file in a ``w+`` mode, making it ready for an immediate write from the beginning or for a write that - would overwrite the initial value. To emulate opening a file in an `a+` - mode ready for appending, use `f.seek(0, io.SEEK_END)` to reposition the + would overwrite the initial value. To emulate opening a file in an ``a+`` + mode ready for appending, use ``f.seek(0, io.SEEK_END)`` to reposition the stream at the end of the buffer. The *newline* argument works like that of :class:`TextIOWrapper`, diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index b83163560f4d9a..0b5978505a9672 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -10,6 +10,10 @@ .. testsetup:: from itertools import * + import collections + import math + import operator + import random -------------- @@ -48,6 +52,7 @@ Iterator Arguments Results Iterator Arguments Results Example ============================ ============================ ================================================= ============================================================= :func:`accumulate` p [,func] p0, p0+p1, p0+p1+p2, ... ``accumulate([1,2,3,4,5]) --> 1 3 6 10 15`` +:func:`batched` p, n [p0, p1, ..., p_n-1], ... ``batched('ABCDEFG', n=3) --> ABC DEF G`` :func:`chain` p, q, ... p0, p1, ... plast, q0, q1, ... ``chain('ABC', 'DEF') --> A B C D E F`` :func:`chain.from_iterable` iterable p0, p1, ... plast, q0, q1, ... ``chain.from_iterable(['ABC', 'DEF']) --> A B C D E F`` :func:`compress` data, selectors (d[0] if s[0]), (d[1] if s[1]), ... ``compress('ABCDEF', [1,0,1,0,1,1]) --> A C E F`` @@ -132,10 +137,9 @@ loops that truncate the stream. There are a number of uses for the *func* argument. It can be set to :func:`min` for a running minimum, :func:`max` for a running maximum, or :func:`operator.mul` for a running product. Amortization tables can be - built by accumulating interest and applying payments. First-order - `recurrence relations `_ - can be modeled by supplying the initial value in the iterable and using only - the accumulated total in *func* argument:: + built by accumulating interest and applying payments: + + .. doctest:: >>> data = [3, 4, 6, 2, 1, 9, 0, 7, 5, 8] >>> list(accumulate(data, operator.mul)) # running product @@ -148,17 +152,6 @@ loops that truncate the stream. >>> list(accumulate(cashflows, lambda bal, pmt: bal*1.05 + pmt)) [1000, 960.0, 918.0, 873.9000000000001, 827.5950000000001] - # Chaotic recurrence relation https://en.wikipedia.org/wiki/Logistic_map - >>> logistic_map = lambda x, _: r * x * (1 - x) - >>> r = 3.8 - >>> x0 = 0.4 - >>> inputs = repeat(x0, 36) # only the initial value is used - >>> [format(x, '.2f') for x in accumulate(inputs, logistic_map)] - ['0.40', '0.91', '0.30', '0.81', '0.60', '0.92', '0.29', '0.79', '0.63', - '0.88', '0.39', '0.90', '0.33', '0.84', '0.52', '0.95', '0.18', '0.57', - '0.93', '0.25', '0.71', '0.79', '0.63', '0.88', '0.39', '0.91', '0.32', - '0.83', '0.54', '0.95', '0.20', '0.60', '0.91', '0.30', '0.80', '0.60'] - See :func:`functools.reduce` for a similar function that returns only the final accumulated value. @@ -170,6 +163,44 @@ loops that truncate the stream. .. versionchanged:: 3.8 Added the optional *initial* parameter. + +.. function:: batched(iterable, n) + + Batch data from the *iterable* into lists of length *n*. The last + batch may be shorter than *n*. + + Loops over the input iterable and accumulates data into lists up to + size *n*. The input is consumed lazily, just enough to fill a list. + The result is yielded as soon as the batch is full or when the input + iterable is exhausted: + + .. doctest:: + + >>> flattened_data = ['roses', 'red', 'violets', 'blue', 'sugar', 'sweet'] + >>> unflattened = list(batched(flattened_data, 2)) + >>> unflattened + [['roses', 'red'], ['violets', 'blue'], ['sugar', 'sweet']] + + >>> for batch in batched('ABCDEFG', 3): + ... print(batch) + ... + ['A', 'B', 'C'] + ['D', 'E', 'F'] + ['G'] + + Roughly equivalent to:: + + def batched(iterable, n): + # batched('ABCDEFG', 3) --> ABC DEF G + if n < 1: + raise ValueError('n must be at least one') + it = iter(iterable) + while (batch := list(islice(it, n))): + yield batch + + .. versionadded:: 3.12 + + .. function:: chain(*iterables) Make an iterator that returns elements from the first iterable until it is @@ -202,10 +233,10 @@ loops that truncate the stream. The combination tuples are emitted in lexicographic ordering according to the order of the input *iterable*. So, if the input *iterable* is sorted, - the combination tuples will be produced in sorted order. + the output tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their - value. So if the input elements are unique, there will be no repeat + value. So if the input elements are unique, there will be no repeated values in each combination. Roughly equivalent to:: @@ -251,7 +282,7 @@ loops that truncate the stream. The combination tuples are emitted in lexicographic ordering according to the order of the input *iterable*. So, if the input *iterable* is sorted, - the combination tuples will be produced in sorted order. + the output tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their value. So if the input elements are unique, the generated combinations @@ -410,14 +441,17 @@ loops that truncate the stream. class groupby: # [k for k, g in groupby('AAAABBBCCDAABBB')] --> A B C D A B # [list(g) for k, g in groupby('AAAABBBCCD')] --> AAAA BBB CC D + def __init__(self, iterable, key=None): if key is None: key = lambda x: x self.keyfunc = key self.it = iter(iterable) self.tgtkey = self.currkey = self.currvalue = object() + def __iter__(self): return self + def __next__(self): self.id = object() while self.currkey == self.tgtkey: @@ -425,6 +459,7 @@ loops that truncate the stream. self.currkey = self.keyfunc(self.currvalue) self.tgtkey = self.currkey return (self.currkey, self._grouper(self.tgtkey, self.id)) + def _grouper(self, tgtkey, id): while self.id is id and self.currkey == tgtkey: yield self.currvalue @@ -443,10 +478,17 @@ loops that truncate the stream. Afterward, elements are returned consecutively unless *step* is set higher than one which results in items being skipped. If *stop* is ``None``, then iteration continues until the iterator is exhausted, if at all; otherwise, it stops at the - specified position. Unlike regular slicing, :func:`islice` does not support - negative values for *start*, *stop*, or *step*. Can be used to extract related - fields from data where the internal structure has been flattened (for example, a - multi-line report may list a name field on every third line). Roughly equivalent to:: + specified position. + + If *start* is ``None``, then iteration starts at zero. If *step* is ``None``, + then the step defaults to one. + + Unlike regular slicing, :func:`islice` does not support negative values for + *start*, *stop*, or *step*. Can be used to extract related fields from + data where the internal structure has been flattened (for example, a + multi-line report may list a name field on every third line). + + Roughly equivalent to:: def islice(iterable, *args): # islice('ABCDEFG', 2) --> A B @@ -473,8 +515,6 @@ loops that truncate the stream. for i, element in zip(range(i + 1, stop), iterable): pass - If *start* is ``None``, then iteration starts at zero. If *step* is ``None``, - then the step defaults to one. .. function:: pairwise(iterable) @@ -503,13 +543,13 @@ loops that truncate the stream. of the *iterable* and all possible full-length permutations are generated. - The permutation tuples are emitted in lexicographic ordering according to + The permutation tuples are emitted in lexicographic order according to the order of the input *iterable*. So, if the input *iterable* is sorted, - the combination tuples will be produced in sorted order. + the output tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their - value. So if the input elements are unique, there will be no repeat - values in each permutation. + value. So if the input elements are unique, there will be no repeated + values within a permutation. Roughly equivalent to:: @@ -589,9 +629,7 @@ loops that truncate the stream. .. function:: repeat(object[, times]) Make an iterator that returns *object* over and over again. Runs indefinitely - unless the *times* argument is specified. Used as argument to :func:`map` for - invariant parameters to the called function. Also used with :func:`zip` to - create an invariant part of a tuple record. + unless the *times* argument is specified. Roughly equivalent to:: @@ -605,7 +643,9 @@ loops that truncate the stream. yield object A common use for *repeat* is to supply a stream of constant values to *map* - or *zip*:: + or *zip*: + + .. doctest:: >>> list(map(pow, range(10), repeat(2))) [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] @@ -614,9 +654,12 @@ loops that truncate the stream. Make an iterator that computes the function using arguments obtained from the iterable. Used instead of :func:`map` when argument parameters are already - grouped in tuples from a single iterable (the data has been "pre-zipped"). The - difference between :func:`map` and :func:`starmap` parallels the distinction - between ``function(a,b)`` and ``function(*c)``. Roughly equivalent to:: + grouped in tuples from a single iterable (when the data has been + "pre-zipped"). + + The difference between :func:`map` and :func:`starmap` parallels the + distinction between ``function(a,b)`` and ``function(*c)``. Roughly + equivalent to:: def starmap(function, iterable): # starmap(pow, [(2,5), (3,2), (10,3)]) --> 32 9 1000 @@ -644,9 +687,7 @@ loops that truncate the stream. The following Python code helps explain what *tee* does (although the actual implementation is more complex and uses only a single underlying - :abbr:`FIFO (first-in, first-out)` queue). - - Roughly equivalent to:: + :abbr:`FIFO (first-in, first-out)` queue):: def tee(iterable, n=2): it = iter(iterable) @@ -663,7 +704,7 @@ loops that truncate the stream. yield mydeque.popleft() return tuple(gen(d) for d in deques) - Once :func:`tee` has made a split, the original *iterable* should not be + Once a :func:`tee` has been created, the original *iterable* should not be used anywhere else; otherwise, the *iterable* could get advanced without the tee objects being informed. @@ -717,14 +758,28 @@ Itertools Recipes This section shows recipes for creating an extended toolset using the existing itertools as building blocks. +The primary purpose of the itertools recipes is educational. The recipes show +various ways of thinking about individual tools — for example, that +``chain.from_iterable`` is related to the concept of flattening. The recipes +also give ideas about ways that the tools can be combined — for example, how +``compress()`` and ``range()`` can work together. The recipes also show patterns +for using itertools with the :mod:`operator` and :mod:`collections` modules as +well as with the built-in itertools such as ``map()``, ``filter()``, +``reversed()``, and ``enumerate()``. + +A secondary purpose of the recipes is to serve as an incubator. The +``accumulate()``, ``compress()``, and ``pairwise()`` itertools started out as +recipes. Currently, the ``iter_index()`` recipe is being tested to see +whether it proves its worth. + Substantially all of these recipes and many, many others can be installed from the `more-itertools project `_ found on the Python Package Index:: python -m pip install more-itertools -The extended tools offer the same high performance as the underlying toolset. -The superior memory performance is kept by processing elements one at a time +Many of the recipes offer the same high performance as the underlying toolset. +Superior memory performance is kept by processing elements one at a time rather than bringing the whole iterable into memory all at once. Code volume is kept small by linking the tools together in a functional style which helps eliminate temporary variables. High speed is retained by preferring @@ -775,10 +830,7 @@ which incur interpreter overhead. return sum(map(pred, iterable)) def pad_none(iterable): - """Returns the sequence elements and then returns None indefinitely. - - Useful for emulating the behavior of the built-in map() function. - """ + "Returns the sequence elements and then returns None indefinitely." return chain(iterable, repeat(None)) def ncycles(iterable, n): @@ -812,15 +864,36 @@ which incur interpreter overhead. for k in range(len(roots) + 1) ] + def iter_index(iterable, value, start=0): + "Return indices where a value occurs in a sequence or iterable." + # iter_index('AABCADEAF', 'A') --> 0 1 4 7 + try: + seq_index = iterable.index + except AttributeError: + # Slow path for general iterables + it = islice(iterable, start, None) + for i, element in enumerate(it, start): + if element is value or element == value: + yield i + else: + # Fast path for sequences + i = start - 1 + try: + while True: + yield (i := seq_index(value, i+1)) + except ValueError: + pass + def sieve(n): - "Primes less than n" - # sieve(30) --> 2 3 5 7 11 13 17 19 23 29 - data = bytearray([1]) * n - data[:2] = 0, 0 - limit = math.isqrt(n) + 1 - for p in compress(range(limit), data): - data[p+p : n : p] = bytearray(len(range(p+p, n, p))) - return compress(count(), data) + "Primes less than n" + # sieve(30) --> 2 3 5 7 11 13 17 19 23 29 + data = bytearray((0, 1)) * (n // 2) + data[:3] = 0, 0, 0 + limit = math.isqrt(n) + 1 + for p in compress(range(limit), data): + data[p*p : n : p+p] = bytes(len(range(p*p, n, p+p))) + data[2] = 1 + return iter_index(data, 1) if n > 2 else iter([]) def flatten(list_of_lists): "Flatten one level of nesting" @@ -931,16 +1004,19 @@ which incur interpreter overhead. # unique_everseen('AAAABBBCCDAABBB') --> A B C D # unique_everseen('ABBCcAD', str.lower) --> A B C D seen = set() - seen_add = seen.add if key is None: for element in filterfalse(seen.__contains__, iterable): - seen_add(element) + seen.add(element) yield element + # Note: The steps shown above are intended to demonstrate + # filterfalse(). For order preserving deduplication, + # a better solution is: + # yield from dict.fromkeys(iterable) else: for element in iterable: k = key(element) if k not in seen: - seen_add(k) + seen.add(k) yield element def unique_justseen(iterable, key=None): @@ -985,31 +1061,6 @@ which incur interpreter overhead. # first_true([a,b], x, f) --> a if f(a) else b if f(b) else x return next(filter(pred, iterable), default) - def random_product(*args, repeat=1): - "Random selection from itertools.product(*args, **kwds)" - pools = [tuple(pool) for pool in args] * repeat - return tuple(map(random.choice, pools)) - - def random_permutation(iterable, r=None): - "Random selection from itertools.permutations(iterable, r)" - pool = tuple(iterable) - r = len(pool) if r is None else r - return tuple(random.sample(pool, r)) - - def random_combination(iterable, r): - "Random selection from itertools.combinations(iterable, r)" - pool = tuple(iterable) - n = len(pool) - indices = sorted(random.sample(range(n), r)) - return tuple(pool[i] for i in indices) - - def random_combination_with_replacement(iterable, r): - "Random selection from itertools.combinations_with_replacement(iterable, r)" - pool = tuple(iterable) - n = len(pool) - indices = sorted(random.choices(range(n), k=r)) - return tuple(pool[i] for i in indices) - def nth_combination(iterable, r, index): "Equivalent to list(combinations(iterable, r))[index]" pool = tuple(iterable) @@ -1166,10 +1217,31 @@ which incur interpreter overhead. >>> all(factored(x) == expanded(x) for x in range(-10, 11)) True + >>> list(iter_index('AABCADEAF', 'A')) + [0, 1, 4, 7] + >>> list(iter_index('AABCADEAF', 'B')) + [2] + >>> list(iter_index('AABCADEAF', 'X')) + [] + >>> list(iter_index('', 'X')) + [] + >>> list(iter_index('AABCADEAF', 'A', 1)) + [1, 4, 7] + >>> list(iter_index(iter('AABCADEAF'), 'A', 1)) + [1, 4, 7] + >>> list(iter_index('AABCADEAF', 'A', 2)) + [4, 7] + >>> list(iter_index(iter('AABCADEAF'), 'A', 2)) + [4, 7] + >>> list(iter_index('AABCADEAF', 'A', 10)) + [] + >>> list(iter_index(iter('AABCADEAF'), 'A', 10)) + [] + >>> list(sieve(30)) [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] - >>> small_primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59] - >>> all(list(sieve(n)) == [p for p in small_primes if p < n] for n in range(60)) + >>> small_primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97] + >>> all(list(sieve(n)) == [p for p in small_primes if p < n] for n in range(101)) True >>> len(list(sieve(100))) 25 diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst index 5bb09b68ca138c..1e46b0f3bf5b03 100644 --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -147,12 +147,12 @@ The :mod:`locale` module defines the following exception and functions: | ``CHAR_MAX`` | Nothing is specified in this locale. | +--------------+-----------------------------------------+ - The function sets temporarily the ``LC_CTYPE`` locale to the ``LC_NUMERIC`` + The function temporarily sets the ``LC_CTYPE`` locale to the ``LC_NUMERIC`` locale or the ``LC_MONETARY`` locale if locales are different and numeric or monetary strings are non-ASCII. This temporary change affects other threads. .. versionchanged:: 3.7 - The function now sets temporarily the ``LC_CTYPE`` locale to the + The function now temporarily sets the ``LC_CTYPE`` locale to the ``LC_NUMERIC`` locale in some cases. @@ -227,16 +227,18 @@ The :mod:`locale` module defines the following exception and functions: Get a regular expression that can be used with the regex function to recognize a positive response to a yes/no question. - .. note:: - - The expression is in the syntax suitable for the :c:func:`regex` function - from the C library, which might differ from the syntax used in :mod:`re`. - .. data:: NOEXPR Get a regular expression that can be used with the regex(3) function to recognize a negative response to a yes/no question. + .. note:: + + The regular expressions for :const:`YESEXPR` and + :const:`NOEXPR` use syntax suitable for the + :c:func:`regex` function from the C library, which might + differ from the syntax used in :mod:`re`. + .. data:: CRNCYSTR Get the currency symbol, preceded by "-" if the symbol should appear before @@ -399,7 +401,7 @@ The :mod:`locale` module defines the following exception and functions: Formats a number *val* according to the current :const:`LC_NUMERIC` setting. The format follows the conventions of the ``%`` operator. For floating point - values, the decimal point is modified if appropriate. If *grouping* is true, + values, the decimal point is modified if appropriate. If *grouping* is ``True``, also takes the grouping into account. If *monetary* is true, the conversion uses monetary thousands separator and @@ -417,12 +419,14 @@ The :mod:`locale` module defines the following exception and functions: Formats a number *val* according to the current :const:`LC_MONETARY` settings. The returned string includes the currency symbol if *symbol* is true, which is - the default. If *grouping* is true (which is not the default), grouping is done - with the value. If *international* is true (which is not the default), the + the default. If *grouping* is ``True`` (which is not the default), grouping is done + with the value. If *international* is ``True`` (which is not the default), the international currency symbol is used. - Note that this function will not work with the 'C' locale, so you have to set a - locale via :func:`setlocale` first. + .. note:: + + This function will not work with the 'C' locale, so you have to set a + locale via :func:`setlocale` first. .. function:: str(float) @@ -609,4 +613,3 @@ applications that link with additional C libraries which internally invoke :c:func:`gettext` or :c:func:`dcgettext`. For these applications, it may be necessary to bind the text domain, so that the libraries can properly locate their message catalogs. - diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst index f3b26e726aafc7..d4429d3d0a4f73 100644 --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -650,6 +650,17 @@ supports sending logging messages to a remote or local Unix syslog. Closes the socket to the remote host. + .. method:: createSocket() + + Tries to create a socket and, if it's not a datagram socket, connect it + to the other end. This method is called during handler initialization, + but it's not regarded as an error if the other end isn't listening at + this point - the method will be called again when emitting an event, if + but it's not regarded as an error if the other end isn't listening yet + --- the method will be called again when emitting an event, + if there is no socket at that point. + + .. versionadded:: 3.11 .. method:: emit(record) diff --git a/Doc/library/lzma.rst b/Doc/library/lzma.rst index f7aaa0cb30c2a8..a9311f2a03563f 100644 --- a/Doc/library/lzma.rst +++ b/Doc/library/lzma.rst @@ -258,7 +258,7 @@ Compressing and decompressing data in memory will be set to ``True``. Attempting to decompress data after the end of stream is reached - raises an `EOFError`. Any data found after the end of the + raises an :exc:`EOFError`. Any data found after the end of the stream is ignored and saved in the :attr:`~.unused_data` attribute. .. versionchanged:: 3.5 diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index dab115acdc207c..5516084780673f 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -2954,6 +2954,8 @@ Global variables However, global variables which are just module level constants cause no problems. +.. _multiprocessing-safe-main-import: + Safe importing of main module Make sure that the main module can be safely imported by a new Python diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index 6d52a03ba95704..50e089653fe71b 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -266,6 +266,15 @@ the :mod:`glob` module.) Accepts a :term:`path-like object`. +.. function:: isjunction(path) + + Return ``True`` if *path* refers to an :func:`existing ` directory + entry that is a junction. Always return ``False`` if junctions are not + supported on the current platform. + + .. versionadded:: 3.12 + + .. function:: islink(path) Return ``True`` if *path* refers to an :func:`existing ` directory diff --git a/Doc/library/os.rst b/Doc/library/os.rst index cb06dc690b4d07..775aa32df99a46 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -304,8 +304,8 @@ process and user. .. function:: getenv(key, default=None) - Return the value of the environment variable *key* if it exists, or - *default* if it doesn't. *key*, *default* and the result are str. Note that + Return the value of the environment variable *key* as a string if it exists, or + *default* if it doesn't. *key* is a string. Note that since :func:`getenv` uses :data:`os.environ`, the mapping of :func:`getenv` is similarly also captured on import, and the function may not reflect future environment changes. @@ -319,8 +319,8 @@ process and user. .. function:: getenvb(key, default=None) - Return the value of the environment variable *key* if it exists, or - *default* if it doesn't. *key*, *default* and the result are bytes. Note that + Return the value of the environment variable *key* as bytes if it exists, or + *default* if it doesn't. *key* must be bytes. Note that since :func:`getenvb` uses :data:`os.environb`, the mapping of :func:`getenvb` is similarly also captured on import, and the function may not reflect future environment changes. @@ -590,6 +590,44 @@ process and user. See the documentation for :func:`getgroups` for cases where it may not return the same group list set by calling setgroups(). +.. function:: setns(fd, nstype=0) + + Reassociate the current thread with a Linux namespace. + See the :manpage:`setns(2)` and :manpage:`namespaces(7)` man pages for more + details. + + If *fd* refers to a :file:`/proc/{pid}/ns/` link, ``setns()`` reassociates the + calling thread with the namespace associated with that link, + and *nstype* may be set to one of the + :ref:`CLONE_NEW* constants ` + to impose constraints on the operation + (``0`` means no constraints). + + Since Linux 5.8, *fd* may refer to a PID file descriptor obtained from + :func:`~os.pidfd_open`. In this case, ``setns()`` reassociates the calling thread + into one or more of the same namespaces as the thread referred to by *fd*. + This is subject to any constraints imposed by *nstype*, + which is a bit mask combining one or more of the + :ref:`CLONE_NEW* constants `, + e.g. ``setns(fd, os.CLONE_NEWUTS | os.CLONE_NEWPID)``. + The caller's memberships in unspecified namespaces are left unchanged. + + *fd* can be any object with a :meth:`~io.IOBase.fileno` method, or a raw file descriptor. + + This example reassociates the thread with the ``init`` process's network namespace:: + + fd = os.open("/proc/1/ns/net", os.O_RDONLY) + os.setns(fd, os.CLONE_NEWNET) + os.close(fd) + + .. availability:: Linux >= 3.0 with glibc >= 2.14. + + .. versionadded:: 3.12 + + .. seealso:: + + The :func:`~os.unshare` function. + .. function:: setpgrp() Call the system call :c:func:`setpgrp` or ``setpgrp(0, 0)`` depending on @@ -756,6 +794,49 @@ process and user. The function is now always available and is also available on Windows. +.. function:: unshare(flags) + + Disassociate parts of the process execution context, and move them into a + newly created namespace. + See the :manpage:`unshare(2)` + man page for more details. + The *flags* argument is a bit mask, combining zero or more of the + :ref:`CLONE_* constants `, + that specifies which parts of the execution context should be + unshared from their existing associations and moved to a new namespace. + If the *flags* argument is ``0``, no changes are made to the calling process's + execution context. + + .. availability:: Linux >= 2.6.16. + + .. versionadded:: 3.12 + + .. seealso:: + + The :func:`~os.setns` function. + +.. _os-unshare-clone-flags: + +Flags to the :func:`unshare` function, if the implementation supports them. +See :manpage:`unshare(2)` in the Linux manual +for their exact effect and availability. + +.. data:: CLONE_FILES + CLONE_FS + CLONE_NEWCGROUP + CLONE_NEWIPC + CLONE_NEWNET + CLONE_NEWNS + CLONE_NEWPID + CLONE_NEWTIME + CLONE_NEWUSER + CLONE_NEWUTS + CLONE_SIGHAND + CLONE_SYSVSEM + CLONE_THREAD + CLONE_VM + + .. _os-newstreams: File Object Creation @@ -2340,7 +2421,7 @@ features: .. function:: remove(path, *, dir_fd=None) Remove (delete) the file *path*. If *path* is a directory, an - :exc:`IsADirectoryError` is raised. Use :func:`rmdir` to remove directories. + :exc:`OSError` is raised. Use :func:`rmdir` to remove directories. If the file does not exist, a :exc:`FileNotFoundError` is raised. This function can support :ref:`paths relative to directory descriptors @@ -2657,6 +2738,17 @@ features: This method can raise :exc:`OSError`, such as :exc:`PermissionError`, but :exc:`FileNotFoundError` is caught and not raised. + .. method:: is_junction() + + Return ``True`` if this entry is a junction (even if broken); + return ``False`` if the entry points to a regular directory, any kind + of file, a symlink, or if it doesn't exist anymore. + + The result is cached on the ``os.DirEntry`` object. Call + :func:`os.path.isjunction` to fetch up-to-date information. + + .. versionadded:: 3.12 + .. method:: stat(*, follow_symlinks=True) Return a :class:`stat_result` object for this entry. This method @@ -2679,8 +2771,8 @@ features: Note that there is a nice correspondence between several attributes and methods of ``os.DirEntry`` and of :class:`pathlib.Path`. In particular, the ``name`` attribute has the same - meaning, as do the ``is_dir()``, ``is_file()``, ``is_symlink()`` - and ``stat()`` methods. + meaning, as do the ``is_dir()``, ``is_file()``, ``is_symlink()``, + ``is_junction()``, and ``stat()`` methods. .. versionadded:: 3.5 @@ -3194,7 +3286,7 @@ features: system records access and modification times; see :func:`~os.stat`. The best way to preserve exact times is to use the *st_atime_ns* and *st_mtime_ns* fields from the :func:`os.stat` result object with the *ns* parameter to - `utime`. + :func:`utime`. This function can support :ref:`specifying a file descriptor `, :ref:`paths relative to directory descriptors ` and :ref:`not @@ -3222,7 +3314,8 @@ features: filenames)``. *dirpath* is a string, the path to the directory. *dirnames* is a list of the - names of the subdirectories in *dirpath* (excluding ``'.'`` and ``'..'``). + names of the subdirectories in *dirpath* (including symlinks to directories, + and excluding ``'.'`` and ``'..'``). *filenames* is a list of the names of the non-directory files in *dirpath*. Note that the names in the lists contain no path components. To get a full path (which begins with *top*) to a file or directory in *dirpath*, do @@ -4094,7 +4187,7 @@ written in Python, such as a mail server's external command delivery program. library :c:data:`POSIX_SPAWN_RESETIDS` flag. If the *setsid* argument is ``True``, it will create a new session ID - for `posix_spawn`. *setsid* requires :c:data:`POSIX_SPAWN_SETSID` + for ``posix_spawn``. *setsid* requires :c:data:`POSIX_SPAWN_SETSID` or :c:data:`POSIX_SPAWN_SETSID_NP` flag. Otherwise, :exc:`NotImplementedError` is raised. diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 1375ce1aef953a..6537637f33c70e 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -391,7 +391,7 @@ Pure paths provide the following methods and properties: If you want to walk an arbitrary filesystem path upwards, it is recommended to first call :meth:`Path.resolve` so as to resolve - symlinks and eliminate `".."` components. + symlinks and eliminate ``".."`` components. .. data:: PurePath.name @@ -564,10 +564,10 @@ Pure paths provide the following methods and properties: True -.. method:: PurePath.relative_to(*other) +.. method:: PurePath.relative_to(*other, walk_up=False) Compute a version of this path relative to the path represented by - *other*. If it's impossible, ValueError is raised:: + *other*. If it's impossible, :exc:`ValueError` is raised:: >>> p = PurePosixPath('/etc/passwd') >>> p.relative_to('/') @@ -577,11 +577,33 @@ Pure paths provide the following methods and properties: >>> p.relative_to('/usr') Traceback (most recent call last): File "", line 1, in - File "pathlib.py", line 694, in relative_to - .format(str(self), str(formatted))) - ValueError: '/etc/passwd' is not in the subpath of '/usr' OR one path is relative and the other absolute. + File "pathlib.py", line 941, in relative_to + raise ValueError(error_message.format(str(self), str(formatted))) + ValueError: '/etc/passwd' is not in the subpath of '/usr' OR one path is relative and the other is absolute. + +When *walk_up* is False (the default), the path must start with *other*. + When the argument is True, ``..`` entries may be added to form the + relative path. In all other cases, such as the paths referencing + different drives, :exc:`ValueError` is raised.:: + + >>> p.relative_to('/usr', walk_up=True) + PurePosixPath('../etc/passwd') + >>> p.relative_to('foo', walk_up=True) + Traceback (most recent call last): + File "", line 1, in + File "pathlib.py", line 941, in relative_to + raise ValueError(error_message.format(str(self), str(formatted))) + ValueError: '/etc/passwd' is not on the same drive as 'foo' OR one path is relative and the other is absolute. - NOTE: This function is part of :class:`PurePath` and works with strings. It does not check or access the underlying file structure. + .. warning:: + This function is part of :class:`PurePath` and works with strings. + It does not check or access the underlying file structure. + This can impact the *walk_up* option as it assumes that no symlinks + are present in the path; call :meth:`~Path.resolve` first if + necessary to resolve symlinks. + + .. versionadded:: 3.12 + The *walk_up* argument (old behavior is the same as ``walk_up=False``). .. method:: PurePath.with_name(name) @@ -869,6 +891,14 @@ call fails (for example because the path doesn't exist). other errors (such as permission errors) are propagated. +.. method:: Path.is_junction() + + Return ``True`` if the path points to a junction, and ``False`` for any other + type of file. Currently only Windows supports junctions. + + .. versionadded:: 3.12 + + .. method:: Path.is_mount() Return ``True`` if the path is a :dfn:`mount point`: a point in a @@ -1164,6 +1194,8 @@ call fails (for example because the path doesn't exist). relative to the current working directory, *not* the directory of the Path object. + It is implemented in terms of :func:`os.rename` and gives the same guarantees. + .. versionchanged:: 3.8 Added return value, return the new Path instance. diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst index dc2d871b47d5ef..a0c9f63ab9f957 100644 --- a/Doc/library/platform.rst +++ b/Doc/library/platform.rst @@ -168,16 +168,20 @@ Cross Platform containing six attributes: :attr:`system`, :attr:`node`, :attr:`release`, :attr:`version`, :attr:`machine`, and :attr:`processor`. - Note that this adds a sixth attribute (:attr:`processor`) not present - in the :func:`os.uname` result. Also, the attribute names are different - for the first two attributes; :func:`os.uname` names them - :attr:`sysname` and :attr:`nodename`. + :attr:`processor` is resolved late, on demand. + + Note: the first two attribute names differ from the names presented by + :func:`os.uname`, where they are named :attr:`sysname` and + :attr:`nodename`. Entries which cannot be determined are set to ``''``. .. versionchanged:: 3.3 Result changed from a tuple to a :func:`~collections.namedtuple`. + .. versionchanged:: 3.9 + :attr:`processor` is resolved late instead of immediately. + Java Platform ------------- diff --git a/Doc/library/random.rst b/Doc/library/random.rst index 7e527a9b401ac6..669204ba65b77e 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -582,6 +582,37 @@ Simulation of arrival times and service deliveries for a multiserver queue:: Recipes ------- +These recipes show how to efficiently make random selections +from the combinatoric iterators in the :mod:`itertools` module: + +.. testcode:: + import random + + def random_product(*args, repeat=1): + "Random selection from itertools.product(*args, **kwds)" + pools = [tuple(pool) for pool in args] * repeat + return tuple(map(random.choice, pools)) + + def random_permutation(iterable, r=None): + "Random selection from itertools.permutations(iterable, r)" + pool = tuple(iterable) + r = len(pool) if r is None else r + return tuple(random.sample(pool, r)) + + def random_combination(iterable, r): + "Random selection from itertools.combinations(iterable, r)" + pool = tuple(iterable) + n = len(pool) + indices = sorted(random.sample(range(n), r)) + return tuple(pool[i] for i in indices) + + def random_combination_with_replacement(iterable, r): + "Random selection from itertools.combinations_with_replacement(iterable, r)" + pool = tuple(iterable) + n = len(pool) + indices = sorted(random.choices(range(n), k=r)) + return tuple(pool[i] for i in indices) + The default :func:`.random` returns multiples of 2⁻⁵³ in the range *0.0 ≤ x < 1.0*. All such numbers are evenly spaced and are exactly representable as Python floats. However, many other representable diff --git a/Doc/library/re.rst b/Doc/library/re.rst index 5b304f717b07fa..0034b46fb1ced2 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -29,7 +29,7 @@ a literal backslash, one might have to write ``'\\\\'`` as the pattern string, because the regular expression must be ``\\``, and each backslash must be expressed as ``\\`` inside a regular Python string literal. Also, please note that any invalid escape sequences in Python's -usage of the backslash in string literals now generate a :exc:`DeprecationWarning` +usage of the backslash in string literals now generate a :exc:`SyntaxWarning` and in the future this will become a :exc:`SyntaxError`. This behaviour will happen even if it is a valid escape sequence for a regular expression. @@ -483,6 +483,9 @@ The special characters are: some fixed length. Patterns which start with negative lookbehind assertions may match at the beginning of the string being searched. +.. _re-conditional-expression: +.. index:: single: (?(; in regular expressions + ``(?(id/name)yes-pattern|no-pattern)`` Will try to match with ``yes-pattern`` if the group with given *id* or *name* exists, and with ``no-pattern`` if it doesn't. ``no-pattern`` is diff --git a/Doc/library/runpy.rst b/Doc/library/runpy.rst index 26a4f1435214e8..501f4ddf5a3e3f 100644 --- a/Doc/library/runpy.rst +++ b/Doc/library/runpy.rst @@ -93,6 +93,11 @@ The :mod:`runpy` module provides two functions: run this way, as well as ensuring the real module name is always accessible as ``__spec__.name``. + .. versionchanged:: 3.12 + The setting of ``__cached__``, ``__loader__``, and + ``__package__`` are deprecated. See + :class:`~importlib.machinery.ModuleSpec` for alternatives. + .. function:: run_path(path_name, init_globals=None, run_name=None) .. index:: @@ -163,6 +168,10 @@ The :mod:`runpy` module provides two functions: case where ``__main__`` is imported from a valid sys.path entry rather than being executed directly. + .. versionchanged:: 3.12 + The setting of ``__cached__``, ``__loader__``, and + ``__package__`` are deprecated. + .. seealso:: :pep:`338` -- Executing modules as scripts diff --git a/Doc/library/secrets.rst b/Doc/library/secrets.rst index dc8e5f46fb581e..4405dfc0535973 100644 --- a/Doc/library/secrets.rst +++ b/Doc/library/secrets.rst @@ -128,7 +128,9 @@ Other functions .. function:: compare_digest(a, b) - Return ``True`` if strings *a* and *b* are equal, otherwise ``False``, + Return ``True`` if strings or + :term:`bytes-like objects ` + *a* and *b* are equal, otherwise ``False``, using a "constant-time compare" to reduce the risk of `timing attacks `_. See :func:`hmac.compare_digest` for additional details. diff --git a/Doc/library/select.rst b/Doc/library/select.rst index a8df81f5bd1f07..2890706bab729c 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -61,7 +61,7 @@ The module defines the following: events. *sizehint* informs epoll about the expected number of events to be - registered. It must be positive, or `-1` to use the default. It is only + registered. It must be positive, or ``-1`` to use the default. It is only used on older systems where :c:func:`epoll_create1` is not available; otherwise it has no effect (though its value is still checked). diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst index 2269f50cbaff99..523d1ac5001360 100644 --- a/Doc/library/signal.rst +++ b/Doc/library/signal.rst @@ -4,6 +4,8 @@ .. module:: signal :synopsis: Set handlers for asynchronous events. +**Source code:** :source:`Lib/signal.py` + -------------- This module provides mechanisms to use signal handlers in Python. @@ -362,9 +364,9 @@ The :mod:`signal` module defines the following functions: .. function:: strsignal(signalnum) - Return the system description of the signal *signalnum*, such as - "Interrupt", "Segmentation fault", etc. Returns :const:`None` if the signal - is not recognized. + Returns the description of signal *signalnum*, such as "Interrupt" + for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no + description. Raises :exc:`ValueError` if *signalnum* is invalid. .. versionadded:: 3.8 diff --git a/Doc/library/sndhdr.rst b/Doc/library/sndhdr.rst index e1dbe4a1a34483..fa9323e18dc348 100644 --- a/Doc/library/sndhdr.rst +++ b/Doc/library/sndhdr.rst @@ -54,3 +54,51 @@ be the sample size in bits or ``'A'`` for A-LAW or ``'U'`` for u-LAW. .. versionchanged:: 3.5 Result changed from a tuple to a namedtuple. +The following sound header types are recognized, as listed below with the return value +from :func:`whathdr`: and :func:`what`: + ++------------+------------------------------------+ +| Value | Sound header format | ++============+====================================+ +| ``'aifc'`` | Compressed Audio Interchange Files | ++------------+------------------------------------+ +| ``'aiff'`` | Audio Interchange Files | ++------------+------------------------------------+ +| ``'au'`` | Au Files | ++------------+------------------------------------+ +| ``'hcom'`` | HCOM Files | ++------------+------------------------------------+ +| ``'sndt'`` | Sndtool Sound Files | ++------------+------------------------------------+ +| ``'voc'`` | Creative Labs Audio Files | ++------------+------------------------------------+ +| ``'wav'`` | Waveform Audio File Format Files | ++------------+------------------------------------+ +| ``'8svx'`` | 8-Bit Sampled Voice Files | ++------------+------------------------------------+ +| ``'sb'`` | Signed Byte Audio Data Files | ++------------+------------------------------------+ +| ``'ub'`` | UB Files | ++------------+------------------------------------+ +| ``'ul'`` | uLAW Audio Files | ++------------+------------------------------------+ + +.. data:: tests + + A list of functions performing the individual tests. Each function takes two + arguments: the byte-stream and an open file-like object. When :func:`what` is + called with a byte-stream, the file-like object will be ``None``. + + The test function should return a string describing the image type if the test + succeeded, or ``None`` if it failed. + +Example: + +.. code-block:: pycon + + >>> import sndhdr + >>> imghdr.what('bass.wav') + 'wav' + >>> imghdr.whathdr('bass.wav') + 'wav' + diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index ee0c68e3a70779..c946407ea1d83f 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -189,8 +189,11 @@ created. Socket addresses are represented as follows: ``(ifname, proto[, pkttype[, hatype[, addr]]])`` where: - *ifname* - String specifying the device name. - - *proto* - An in network-byte-order integer specifying the Ethernet - protocol number. + - *proto* - The Ethernet protocol number. + May be :data:`ETH_P_ALL` to capture all protocols, + one of the :ref:`ETHERTYPE_* constants ` + or any other Ethernet protocol number. + Value must be in network-byte-order. - *pkttype* - Optional integer specifying the packet type: - ``PACKET_HOST`` (the default) - Packet addressed to the local host. @@ -508,6 +511,19 @@ Constants .. availability:: Linux >= 2.2. +.. data:: ETH_P_ALL + + :data:`!ETH_P_ALL` can be used in the :class:`~socket.socket` + constructor as *proto* for the :const:`AF_PACKET` family in order to + capture every packet, regardless of protocol. + + For more information, see the :manpage:`packet(7)` manpage. + + .. availability:: Linux. + + .. versionadded:: 3.12 + + .. data:: AF_RDS PF_RDS SOL_RDS @@ -638,6 +654,22 @@ Constants .. versionadded:: 3.12 +.. _socket-ethernet-types: + +.. data:: ETHERTYPE_ARP + ETHERTYPE_IP + ETHERTYPE_IPV6 + ETHERTYPE_VLAN + + `IEEE 802.3 protocol number + `_. + constants. + + .. availability:: Linux, FreeBSD, macOS. + + .. versionadded:: 3.12 + + Functions ^^^^^^^^^ @@ -689,7 +721,7 @@ The following functions all create :ref:`socket objects `. When :const:`SOCK_NONBLOCK` or :const:`SOCK_CLOEXEC` bit flags are applied to *type* they are cleared, and :attr:`socket.type` will not reflect them. They are still passed - to the underlying system `socket()` call. Therefore, + to the underlying system ``socket()`` call. Therefore, :: diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst index 70d56a1dad94f2..26785395ec0312 100644 --- a/Doc/library/socketserver.rst +++ b/Doc/library/socketserver.rst @@ -177,8 +177,7 @@ expensive or inappropriate for the service) is to maintain an explicit table of partially finished requests and to use :mod:`selectors` to decide which request to work on next (or whether to handle a new incoming request). This is particularly important for stream services where each client can potentially be -connected for a long time (if threads or subprocesses cannot be used). See -:mod:`asyncore` for another way to manage this. +connected for a long time (if threads or subprocesses cannot be used). .. XXX should data and methods be intermingled, or separate? how should the distinction between class and instance variables be drawn? diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index 26a085877ec112..0dac2312b2feb1 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -239,6 +239,7 @@ inserted data and retrieved values from it in multiple ways. * :ref:`sqlite3-adapters` * :ref:`sqlite3-converters` * :ref:`sqlite3-connection-context-manager` + * :ref:`sqlite3-howto-row-factory` * :ref:`sqlite3-explanation` for in-depth background on transaction control. @@ -258,7 +259,8 @@ Module functions .. function:: connect(database, timeout=5.0, detect_types=0, \ isolation_level="DEFERRED", check_same_thread=True, \ factory=sqlite3.Connection, cached_statements=128, \ - uri=False) + uri=False, \*, \ + autocommit=sqlite3.LEGACY_TRANSACTION_CONTROL) Open a connection to an SQLite database. @@ -290,11 +292,12 @@ Module functions By default (``0``), type detection is disabled. :param isolation_level: - The :attr:`~Connection.isolation_level` of the connection, - controlling whether and how transactions are implicitly opened. + See :attr:`Connection.isolation_level` and + :ref:`sqlite3-transaction-control-isolation-level` for more information. Can be ``"DEFERRED"`` (default), ``"EXCLUSIVE"`` or ``"IMMEDIATE"``; or ``None`` to disable opening transactions implicitly. - See :ref:`sqlite3-controlling-transactions` for more. + Has no effect unless :attr:`Connection.autocommit` is set to + :data:`~sqlite3.LEGACY_TRANSACTION_CONTROL` (the default). :type isolation_level: str | None :param bool check_same_thread: @@ -321,6 +324,14 @@ Module functions The query string allows passing parameters to SQLite, enabling various :ref:`sqlite3-uri-tricks`. + :param autocommit: + See :attr:`Connection.autocommit` and + :ref:`sqlite3-transaction-control-autocommit` for more information. + *autocommit* currently defaults to + :data:`~sqlite3.LEGACY_TRANSACTION_CONTROL`. + The default will change to ``False`` in a future Python release. + :type autocommit: bool + :rtype: Connection .. audit-event:: sqlite3.connect database sqlite3.connect @@ -335,6 +346,9 @@ Module functions .. versionadded:: 3.10 The ``sqlite3.connect/handle`` auditing event. + .. versionadded:: 3.12 + The *autocommit* parameter. + .. function:: complete_statement(statement) Return ``True`` if the string *statement* appears to contain @@ -418,6 +432,12 @@ Module functions Module constants ^^^^^^^^^^^^^^^^ +.. data:: LEGACY_TRANSACTION_CONTROL + + Set :attr:`~Connection.autocommit` to this constant to select + old style (pre-Python 3.12) transaction control behaviour. + See :ref:`sqlite3-transaction-control-isolation-level` for more information. + .. data:: PARSE_COLNAMES Pass this flag value to the *detect_types* parameter of @@ -478,9 +498,10 @@ Module constants .. note:: - The :mod:`!sqlite3` module supports both ``qmark`` and ``numeric`` DB-API - parameter styles, because that is what the underlying SQLite library - supports. However, the DB-API does not allow multiple values for + The :mod:`!sqlite3` module supports ``qmark``, ``numeric``, + and ``named`` DB-API parameter styles, + because that is what the underlying SQLite library supports. + However, the DB-API does not allow multiple values for the ``paramstyle`` attribute. .. data:: sqlite_version @@ -577,7 +598,7 @@ Connection objects supplied, this must be a callable returning an instance of :class:`Cursor` or its subclasses. - .. method:: blobopen(table, column, row, /, \*, readonly=False, name="main") + .. method:: blobopen(table, column, row, /, *, readonly=False, name="main") Open a :class:`Blob` handle to an existing :abbr:`BLOB (Binary Large OBject)`. @@ -615,18 +636,27 @@ Connection objects .. method:: commit() Commit any pending transaction to the database. - If there is no open transaction, this method is a no-op. + If :attr:`autocommit` is ``True``, or there is no open transaction, + this method does nothing. + If :attr:`!autocommit` is ``False``, a new transaction is implicitly + opened if a pending transaction was committed by this method. .. method:: rollback() Roll back to the start of any pending transaction. - If there is no open transaction, this method is a no-op. + If :attr:`autocommit` is ``True``, or there is no open transaction, + this method does nothing. + If :attr:`!autocommit` is ``False``, a new transaction is implicitly + opened if a pending transaction was rolled back by this method. .. method:: close() Close the database connection. - Any pending transaction is not committed implicitly; - make sure to :meth:`commit` before closing + If :attr:`autocommit` is ``False``, + any pending transaction is implicitly rolled back. + If :attr:`!autocommit` is ``True`` or :data:`LEGACY_TRANSACTION_CONTROL`, + no implicit transaction control is executed. + Make sure to :meth:`commit` before closing to avoid losing pending changes. .. method:: execute(sql, parameters=(), /) @@ -647,7 +677,7 @@ Connection objects :meth:`~Cursor.executescript` on it with the given *sql_script*. Return the new cursor object. - .. method:: create_function(name, narg, func, \*, deterministic=False) + .. method:: create_function(name, narg, func, *, deterministic=False) Create or remove a user-defined SQL function. @@ -1040,7 +1070,7 @@ Connection objects con.close() - .. method:: backup(target, \*, pages=-1, progress=None, name="main", sleep=0.250) + .. method:: backup(target, *, pages=-1, progress=None, name="main", sleep=0.250) Create a backup of an SQLite database. @@ -1169,7 +1199,7 @@ Connection objects .. _SQLite limit category: https://www.sqlite.org/c3ref/c_limit_attached.html - .. method:: serialize(\*, name="main") + .. method:: serialize(*, name="main") Serialize a database into a :class:`bytes` object. For an ordinary on-disk database file, the serialization is just a copy of the @@ -1191,7 +1221,7 @@ Connection objects .. versionadded:: 3.11 - .. method:: deserialize(data, /, \*, name="main") + .. method:: deserialize(data, /, *, name="main") Deserialize a :meth:`serialized ` database into a :class:`Connection`. @@ -1223,6 +1253,38 @@ Connection objects .. versionadded:: 3.11 + .. attribute:: autocommit + + This attribute controls :pep:`249`-compliant transaction behaviour. + :attr:`!autocommit` has three allowed values: + + * ``False``: Select :pep:`249`-compliant transaction behaviour, + implying that :mod:`!sqlite3` ensures a transaction is always open. + Use :meth:`commit` and :meth:`rollback` to close transactions. + + This is the recommended value of :attr:`!autocommit`. + + * ``True``: Use SQLite's `autocommit mode`_. + :meth:`commit` and :meth:`rollback` have no effect in this mode. + + * :data:`LEGACY_TRANSACTION_CONTROL`: + Pre-Python 3.12 (non-:pep:`249`-compliant) transaction control. + See :attr:`isolation_level` for more details. + + This is currently the default value of :attr:`!autocommit`. + + Changing :attr:`!autocommit` to ``False`` will open a new transaction, + and changing it to ``True`` will commit any pending transaction. + + See :ref:`sqlite3-transaction-control-autocommit` for more details. + + .. note:: + + The :attr:`isolation_level` attribute has no effect unless + :attr:`autocommit` is :data:`LEGACY_TRANSACTION_CONTROL`. + + .. versionadded:: 3.12 + .. attribute:: in_transaction This read-only attribute corresponds to the low-level SQLite @@ -1235,44 +1297,34 @@ Connection objects .. attribute:: isolation_level - This attribute controls the :ref:`transaction handling - ` performed by :mod:`!sqlite3`. + Controls the :ref:`legacy transaction handling mode + ` of :mod:`!sqlite3`. If set to ``None``, transactions are never implicitly opened. If set to one of ``"DEFERRED"``, ``"IMMEDIATE"``, or ``"EXCLUSIVE"``, corresponding to the underlying `SQLite transaction behaviour`_, - implicit :ref:`transaction management - ` is performed. + :ref:`implicit transaction management + ` is performed. If not overridden by the *isolation_level* parameter of :func:`connect`, the default is ``""``, which is an alias for ``"DEFERRED"``. - .. attribute:: row_factory - - A callable that accepts two arguments, - a :class:`Cursor` object and the raw row results as a :class:`tuple`, - and returns a custom object representing an SQLite row. - - Example: + .. note:: - .. doctest:: + Using :attr:`autocommit` to control transaction handling is + recommended over using :attr:`!isolation_level`. + :attr:`!isolation_level` has no effect unless :attr:`autocommit` is + set to :data:`LEGACY_TRANSACTION_CONTROL` (the default). - >>> def dict_factory(cursor, row): - ... col_names = [col[0] for col in cursor.description] - ... return {key: value for key, value in zip(col_names, row)} - >>> con = sqlite3.connect(":memory:") - >>> con.row_factory = dict_factory - >>> for row in con.execute("SELECT 1 AS a, 2 AS b"): - ... print(row) - {'a': 1, 'b': 2} + .. attribute:: row_factory - If returning a tuple doesn't suffice and you want name-based access to - columns, you should consider setting :attr:`row_factory` to the - highly optimized :class:`sqlite3.Row` type. :class:`Row` provides both - index-based and case-insensitive name-based access to columns with almost no - memory overhead. It will probably be better than your own custom - dictionary-based approach or even a db_row based solution. + The initial :attr:`~Cursor.row_factory` + for :class:`Cursor` objects created from this connection. + Assigning to this attribute does not affect the :attr:`!row_factory` + of existing cursors belonging to this connection, only new ones. + Is ``None`` by default, + meaning each row is returned as a :class:`tuple`. - .. XXX what's a db_row-based solution? + See :ref:`sqlite3-howto-row-factory` for more details. .. attribute:: text_factory @@ -1374,7 +1426,9 @@ Cursor objects :meth:`executescript` if you want to execute multiple SQL statements with one call. - If :attr:`~Connection.isolation_level` is not ``None``, + If :attr:`~Connection.autocommit` is + :data:`LEGACY_TRANSACTION_CONTROL`, + :attr:`~Connection.isolation_level` is not ``None``, *sql* is an ``INSERT``, ``UPDATE``, ``DELETE``, or ``REPLACE`` statement, and there is no open transaction, a transaction is implicitly opened before executing *sql*. @@ -1402,7 +1456,9 @@ Cursor objects .. method:: executescript(sql_script, /) Execute the SQL statements in *sql_script*. - If there is a pending transaction, + If the :attr:`~Connection.autocommit` is + :data:`LEGACY_TRANSACTION_CONTROL` + and there is a pending transaction, an implicit ``COMMIT`` statement is executed first. No other implicit transaction control is performed; any transaction control must be added to *sql_script*. @@ -1425,7 +1481,7 @@ Cursor objects .. method:: fetchone() - If :attr:`~Connection.row_factory` is ``None``, + If :attr:`~Cursor.row_factory` is ``None``, return the next row query result set as a :class:`tuple`. Else, pass it to the row factory and return its result. Return ``None`` if no more data is available. @@ -1519,6 +1575,22 @@ Cursor objects including :abbr:`CTE (Common Table Expression)` queries. It is only updated by the :meth:`execute` and :meth:`executemany` methods. + .. attribute:: row_factory + + Control how a row fetched from this :class:`!Cursor` is represented. + If ``None``, a row is represented as a :class:`tuple`. + Can be set to the included :class:`sqlite3.Row`; + or a :term:`callable` that accepts two arguments, + a :class:`Cursor` object and the :class:`!tuple` of row values, + and returns a custom object representing an SQLite row. + + Defaults to what :attr:`Connection.row_factory` was set to + when the :class:`!Cursor` was created. + Assigning to this attribute does not affect + :attr:`Connection.row_factory` of the parent connection. + + See :ref:`sqlite3-howto-row-factory` for more details. + .. The sqlite3.Row example used to be a how-to. It has now been incorporated into the Row reference. We keep the anchor here in order not to break @@ -1537,7 +1609,10 @@ Row objects It supports iteration, equality testing, :func:`len`, and :term:`mapping` access by column name and index. - Two row objects compare equal if have equal columns and equal members. + Two :class:`!Row` objects compare equal + if they have identical column names and values. + + See :ref:`sqlite3-howto-row-factory` for more details. .. method:: keys @@ -1548,21 +1623,6 @@ Row objects .. versionchanged:: 3.5 Added support of slicing. - Example: - - .. doctest:: - - >>> con = sqlite3.connect(":memory:") - >>> con.row_factory = sqlite3.Row - >>> res = con.execute("SELECT 'Earth' AS name, 6378 AS radius") - >>> row = res.fetchone() - >>> row.keys() - ['name', 'radius'] - >>> row[0], row["name"] # Access by index and name. - ('Earth', 'Earth') - >>> row["RADIUS"] # Column names are case-insensitive. - 6378 - .. _sqlite3-blob-objects: @@ -1833,8 +1893,13 @@ The deprecated default adapters and converters consist of: Command-line interface ^^^^^^^^^^^^^^^^^^^^^^ -The :mod:`!sqlite3` module can be invoked as a script +The :mod:`!sqlite3` module can be invoked as a script, +using the interpreter's :option:`-m` switch, in order to provide a simple SQLite shell. +The argument signature is as follows:: + + python -m sqlite3 [-h] [-v] [filename] [sql] + Type ``.quit`` or CTRL-D to exit the shell. .. program:: python -m sqlite3 [-h] [-v] [filename] [sql] @@ -2106,20 +2171,39 @@ This section shows recipes for common adapters and converters. def convert_date(val): """Convert ISO 8601 date to datetime.date object.""" - return datetime.date.fromisoformat(val) + return datetime.date.fromisoformat(val.decode()) def convert_datetime(val): """Convert ISO 8601 datetime to datetime.datetime object.""" - return datetime.datetime.fromisoformat(val) + return datetime.datetime.fromisoformat(val.decode()) def convert_timestamp(val): """Convert Unix epoch timestamp to datetime.datetime object.""" - return datetime.datetime.fromtimestamp(val) + return datetime.datetime.fromtimestamp(int(val)) sqlite3.register_converter("date", convert_date) sqlite3.register_converter("datetime", convert_datetime) sqlite3.register_converter("timestamp", convert_timestamp) +.. testcode:: + :hide: + + dt = datetime.datetime(2019, 5, 18, 15, 17, 8, 123456) + + assert adapt_date_iso(dt.date()) == "2019-05-18" + assert convert_date(b"2019-05-18") == dt.date() + + assert adapt_datetime_iso(dt) == "2019-05-18T15:17:08.123456" + assert convert_datetime(b"2019-05-18T15:17:08.123456") == dt + + # Using current time as fromtimestamp() returns local date/time. + # Droping microseconds as adapt_datetime_epoch truncates fractional second part. + now = datetime.datetime.now().replace(microsecond=0) + current_timestamp = int(now.timestamp()) + + assert adapt_datetime_epoch(now) == current_timestamp + assert convert_timestamp(str(current_timestamp).encode()) == now + .. _sqlite3-connection-shortcuts: @@ -2177,9 +2261,12 @@ the transaction is committed. If this commit fails, or if the body of the ``with`` statement raises an uncaught exception, the transaction is rolled back. +If :attr:`~Connection.autocommit` is ``False``, +a new transaction is implicitly opened after committing or rolling back. If there is no open transaction upon leaving the body of the ``with`` statement, -the context manager is a no-op. +or if :attr:`~Connection.autocommit` is ``True``, +the context manager does nothing. .. note:: @@ -2259,18 +2346,167 @@ can be found in the `SQLite URI documentation`_. .. _SQLite URI documentation: https://www.sqlite.org/uri.html +.. _sqlite3-howto-row-factory: + +How to create and use row factories +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +By default, :mod:`!sqlite3` represents each row as a :class:`tuple`. +If a :class:`!tuple` does not suit your needs, +you can use the :class:`sqlite3.Row` class +or a custom :attr:`~Cursor.row_factory`. + +While :attr:`!row_factory` exists as an attribute both on the +:class:`Cursor` and the :class:`Connection`, +it is recommended to set :class:`Connection.row_factory`, +so all cursors created from the connection will use the same row factory. + +:class:`!Row` provides indexed and case-insensitive named access to columns, +with minimal memory overhead and performance impact over a :class:`!tuple`. +To use :class:`!Row` as a row factory, +assign it to the :attr:`!row_factory` attribute: + +.. doctest:: + + >>> con = sqlite3.connect(":memory:") + >>> con.row_factory = sqlite3.Row + +Queries now return :class:`!Row` objects: + +.. doctest:: + + >>> res = con.execute("SELECT 'Earth' AS name, 6378 AS radius") + >>> row = res.fetchone() + >>> row.keys() + ['name', 'radius'] + >>> row[0] # Access by index. + 'Earth' + >>> row["name"] # Access by name. + 'Earth' + >>> row["RADIUS"] # Column names are case-insensitive. + 6378 + +You can create a custom :attr:`~Cursor.row_factory` +that returns each row as a :class:`dict`, with column names mapped to values: + +.. testcode:: + + def dict_factory(cursor, row): + fields = [column[0] for column in cursor.description] + return {key: value for key, value in zip(fields, row)} + +Using it, queries now return a :class:`!dict` instead of a :class:`!tuple`: + +.. doctest:: + + >>> con = sqlite3.connect(":memory:") + >>> con.row_factory = dict_factory + >>> for row in con.execute("SELECT 1 AS a, 2 AS b"): + ... print(row) + {'a': 1, 'b': 2} + +The following row factory returns a :term:`named tuple`: + +.. testcode:: + + from collections import namedtuple + + def namedtuple_factory(cursor, row): + fields = [column[0] for column in cursor.description] + cls = namedtuple("Row", fields) + return cls._make(row) + +:func:`!namedtuple_factory` can be used as follows: + +.. doctest:: + + >>> con = sqlite3.connect(":memory:") + >>> con.row_factory = namedtuple_factory + >>> cur = con.execute("SELECT 1 AS a, 2 AS b") + >>> row = cur.fetchone() + >>> row + Row(a=1, b=2) + >>> row[0] # Indexed access. + 1 + >>> row.b # Attribute access. + 2 + +With some adjustments, the above recipe can be adapted to use a +:class:`~dataclasses.dataclass`, or any other custom class, +instead of a :class:`~collections.namedtuple`. + + .. _sqlite3-explanation: Explanation ----------- +.. _sqlite3-transaction-control: .. _sqlite3-controlling-transactions: Transaction control ^^^^^^^^^^^^^^^^^^^ -The :mod:`!sqlite3` module does not adhere to the transaction handling recommended -by :pep:`249`. +:mod:`!sqlite3` offers multiple methods of controlling whether, +when and how database transactions are opened and closed. +:ref:`sqlite3-transaction-control-autocommit` is recommended, +while :ref:`sqlite3-transaction-control-isolation-level` +retains the pre-Python 3.12 behaviour. + +.. _sqlite3-transaction-control-autocommit: + +Transaction control via the ``autocommit`` attribute +"""""""""""""""""""""""""""""""""""""""""""""""""""" + +The recommended way of controlling transaction behaviour is through +the :attr:`Connection.autocommit` attribute, +which should preferrably be set using the *autocommit* parameter +of :func:`connect`. + +It is suggested to set *autocommit* to ``False``, +which implies :pep:`249`-compliant transaction control. +This means: + +* :mod:`!sqlite3` ensures that a transaction is always open, + so :meth:`Connection.commit` and :meth:`Connection.rollback` + will implicitly open a new transaction immediately after closing + the pending one. + :mod:`!sqlite3` uses ``BEGIN DEFERRED`` statements when opening transactions. +* Transactions should be committed explicitly using :meth:`!commit`. +* Transactions should be rolled back explicitly using :meth:`!rollback`. +* An implicit rollback is performed if the database is + :meth:`~Connection.close`-ed with pending changes. + +Set *autocommit* to ``True`` to enable SQLite's `autocommit mode`_. +In this mode, :meth:`Connection.commit` and :meth:`Connection.rollback` +have no effect. +Note that SQLite's autocommit mode is distinct from +the :pep:`249`-compliant :attr:`Connection.autocommit` attribute; +use :attr:`Connection.in_transaction` to query +the low-level SQLite autocommit mode. + +Set *autocommit* to :data:`LEGACY_TRANSACTION_CONTROL` +to leave transaction control behaviour to the +:attr:`Connection.isolation_level` attribute. +See :ref:`sqlite3-transaction-control-isolation-level` for more information. + + +.. _sqlite3-transaction-control-isolation-level: + +Transaction control via the ``isolation_level`` attribute +""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +.. note:: + + The recommended way of controlling transactions is via the + :attr:`~Connection.autocommit` attribute. + See :ref:`sqlite3-transaction-control-autocommit`. + +If :attr:`Connection.autocommit` is set to +:data:`LEGACY_TRANSACTION_CONTROL` (the default), +transaction behaviour is controlled using +the :attr:`Connection.isolation_level` attribute. +Otherwise, :attr:`!isolation_level` has no effect. If the connection attribute :attr:`~Connection.isolation_level` is not ``None``, @@ -2301,6 +2537,10 @@ regardless of the value of :attr:`~Connection.isolation_level`. :mod:`!sqlite3` used to implicitly commit an open transaction before DDL statements. This is no longer the case. +.. versionchanged:: 3.12 + The recommended way of controlling transactions is now via the + :attr:`~Connection.autocommit` attribute. + .. _autocommit mode: https://www.sqlite.org/lang_transaction.html#implicit_versus_explicit_transactions diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index f5d5a7c4d28205..08824feeb3958f 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -74,13 +74,10 @@ Functions, Constants, and Exceptions Socket creation ^^^^^^^^^^^^^^^ -Since Python 3.2 and 2.7.9, it is recommended to use the -:meth:`SSLContext.wrap_socket` of an :class:`SSLContext` instance to wrap -sockets as :class:`SSLSocket` objects. The helper functions +Instances of :class:`SSLSocket` must be created using the +:meth:`SSLContext.wrap_socket` method. The helper function :func:`create_default_context` returns a new context with secure default -settings. The old :func:`wrap_socket` function is deprecated since it is -both inefficient and has no support for server name indication (SNI) and -hostname matching. +settings. Client socket example with default context and IPv4/IPv6 dual stack:: @@ -369,10 +366,10 @@ Certificate handling Given the address ``addr`` of an SSL-protected server, as a (*hostname*, *port-number*) pair, fetches the server's certificate, and returns it as a PEM-encoded string. If ``ssl_version`` is specified, uses that version of - the SSL protocol to attempt to connect to the server. If ``ca_certs`` is + the SSL protocol to attempt to connect to the server. If *ca_certs* is specified, it should be a file containing a list of root certificates, the - same format as used for the same parameter in - :meth:`SSLContext.wrap_socket`. The call will attempt to validate the + same format as used for the *cafile* parameter in + :meth:`SSLContext.load_verify_locations`. The call will attempt to validate the server certificate against that set of root certificates, and will fail if the validation attempt fails. A timeout can be specified with the ``timeout`` parameter. @@ -451,33 +448,6 @@ Certificate handling .. versionadded:: 3.4 -.. function:: wrap_socket(sock, keyfile=None, certfile=None, \ - server_side=False, cert_reqs=CERT_NONE, ssl_version=PROTOCOL_TLS, \ - ca_certs=None, do_handshake_on_connect=True, \ - suppress_ragged_eofs=True, ciphers=None) - - Takes an instance ``sock`` of :class:`socket.socket`, and returns an instance - of :class:`ssl.SSLSocket`, a subtype of :class:`socket.socket`, which wraps - the underlying socket in an SSL context. ``sock`` must be a - :data:`~socket.SOCK_STREAM` socket; other socket types are unsupported. - - Internally, function creates a :class:`SSLContext` with protocol - *ssl_version* and :attr:`SSLContext.options` set to *cert_reqs*. If - parameters *keyfile*, *certfile*, *ca_certs* or *ciphers* are set, then - the values are passed to :meth:`SSLContext.load_cert_chain`, - :meth:`SSLContext.load_verify_locations`, and - :meth:`SSLContext.set_ciphers`. - - The arguments *server_side*, *do_handshake_on_connect*, and - *suppress_ragged_eofs* have the same meaning as - :meth:`SSLContext.wrap_socket`. - - .. deprecated:: 3.7 - - Since Python 3.2 and 2.7.9, it is recommended to use the - :meth:`SSLContext.wrap_socket` instead of :func:`wrap_socket`. The - top-level function is limited and creates an insecure client socket - without server name indication or hostname matching. Constants ^^^^^^^^^ @@ -488,8 +458,8 @@ Constants .. data:: CERT_NONE - Possible value for :attr:`SSLContext.verify_mode`, or the ``cert_reqs`` - parameter to :func:`wrap_socket`. Except for :const:`PROTOCOL_TLS_CLIENT`, + Possible value for :attr:`SSLContext.verify_mode`. + Except for :const:`PROTOCOL_TLS_CLIENT`, it is the default mode. With client-side sockets, just about any cert is accepted. Validation errors, such as untrusted or expired cert, are ignored and do not abort the TLS/SSL handshake. @@ -501,8 +471,8 @@ Constants .. data:: CERT_OPTIONAL - Possible value for :attr:`SSLContext.verify_mode`, or the ``cert_reqs`` - parameter to :func:`wrap_socket`. In client mode, :const:`CERT_OPTIONAL` + Possible value for :attr:`SSLContext.verify_mode`. + In client mode, :const:`CERT_OPTIONAL` has the same meaning as :const:`CERT_REQUIRED`. It is recommended to use :const:`CERT_REQUIRED` for client-side sockets instead. @@ -513,13 +483,12 @@ Constants the TLS handshake. Use of this setting requires a valid set of CA certificates to - be passed, either to :meth:`SSLContext.load_verify_locations` or as a - value of the ``ca_certs`` parameter to :func:`wrap_socket`. + be passed to :meth:`SSLContext.load_verify_locations`. .. data:: CERT_REQUIRED - Possible value for :attr:`SSLContext.verify_mode`, or the ``cert_reqs`` - parameter to :func:`wrap_socket`. In this mode, certificates are + Possible value for :attr:`SSLContext.verify_mode`. + In this mode, certificates are required from the other side of the socket connection; an :class:`SSLError` will be raised if no certificate is provided, or if its validation fails. This mode is **not** sufficient to verify a certificate in client mode as @@ -533,8 +502,7 @@ Constants the client must provide a valid and trusted certificate. Use of this setting requires a valid set of CA certificates to - be passed, either to :meth:`SSLContext.load_verify_locations` or as a - value of the ``ca_certs`` parameter to :func:`wrap_socket`. + be passed to :meth:`SSLContext.load_verify_locations`. .. class:: VerifyMode @@ -839,6 +807,22 @@ Constants .. versionadded:: 3.10 +.. data:: OP_ENABLE_KTLS + + Enable the use of the kernel TLS. To benefit from the feature, OpenSSL must + have been compiled with support for it, and the negotiated cipher suites and + extensions must be supported by it (a list of supported ones may vary by + platform and kernel version). + + Note that with enabled kernel TLS some cryptographic operations are + performed by the kernel directly and not via any available OpenSSL + Providers. This might be undesirable if, for example, the application + requires all cryptographic operations to be performed by the FIPS provider. + + This option is only available with OpenSSL 3.0.0 and later. + + .. versionadded:: 3.12 + .. data:: HAS_ALPN Whether the OpenSSL library has built-in support for the *Application-Layer @@ -1327,10 +1311,7 @@ SSL sockets also have the following additional methods and attributes: .. attribute:: SSLSocket.context - The :class:`SSLContext` object this SSL socket is tied to. If the SSL - socket was created using the deprecated :func:`wrap_socket` function - (rather than :meth:`SSLContext.wrap_socket`), this is a custom context - object created for this SSL socket. + The :class:`SSLContext` object this SSL socket is tied to. .. versionadded:: 3.2 @@ -2086,7 +2067,7 @@ Combined key and certificate Often the private key is stored in the same file as the certificate; in this case, only the ``certfile`` parameter to :meth:`SSLContext.load_cert_chain` -and :func:`wrap_socket` needs to be passed. If the private key is stored +needs to be passed. If the private key is stored with the certificate, it should come before the first certificate in the certificate chain:: diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index c3f9c1f5239e8b..88a887960edb58 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -839,7 +839,7 @@ of applications in statistics. The relative likelihood is computed as the probability of a sample occurring in a narrow range divided by the width of the range (hence the word "density"). Since the likelihood is relative to other points, - its value can be greater than `1.0`. + its value can be greater than ``1.0``. .. method:: NormalDist.cdf(x) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 2952c50787ad5d..785b76a11f2f38 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -353,7 +353,7 @@ Notes: The numeric literals accepted include the digits ``0`` to ``9`` or any Unicode equivalent (code points with the ``Nd`` property). - See https://www.unicode.org/Public/15.0.0/ucd/extracted/DerivedNumericType.txt + See `the Unicode Standard `_ for a complete list of code points with the ``Nd`` property. @@ -1522,7 +1522,7 @@ multiple fragments. printable string representation of *object*. For string objects, this is the string itself. If *object* does not have a :meth:`~object.__str__` method, then :func:`str` falls back to returning - :meth:`repr(object) `. + :func:`repr(object) `. .. index:: single: buffer protocol; str (built-in class) @@ -1597,8 +1597,9 @@ expression support in the :mod:`re` module). lowercase, :meth:`lower` would do nothing to ``'ß'``; :meth:`casefold` converts it to ``"ss"``. - The casefolding algorithm is described in section 3.13 of the Unicode - Standard. + The casefolding algorithm is + `described in section 3.13 of the Unicode Standard + `__. .. versionadded:: 3.3 @@ -1617,6 +1618,9 @@ expression support in the :mod:`re` module). range [*start*, *end*]. Optional arguments *start* and *end* are interpreted as in slice notation. + If *sub* is empty, returns the number of empty strings between characters + which is the length of the string plus one. + .. method:: str.encode(encoding="utf-8", errors="strict") @@ -1754,7 +1758,8 @@ expression support in the :mod:`re` module). one character, ``False`` otherwise. Alphabetic characters are those characters defined in the Unicode character database as "Letter", i.e., those with general category property being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is different - from the "Alphabetic" property defined in the Unicode Standard. + from the `Alphabetic property defined in the Unicode Standard + `_. .. method:: str.isascii() @@ -1791,7 +1796,7 @@ expression support in the :mod:`re` module). Return ``True`` if the string is a valid identifier according to the language definition, section :ref:`identifiers`. - Call :func:`keyword.iskeyword` to test whether string ``s`` is a reserved + :func:`keyword.iskeyword` can be used to test whether string ``s`` is a reserved identifier, such as :keyword:`def` and :keyword:`class`. Example: @@ -1888,8 +1893,9 @@ expression support in the :mod:`re` module). Return a copy of the string with all the cased characters [4]_ converted to lowercase. - The lowercasing algorithm used is described in section 3.13 of the Unicode - Standard. + The lowercasing algorithm used is + `described in section 3.13 of the Unicode Standard + `__. .. method:: str.lstrip([chars]) @@ -2233,8 +2239,9 @@ expression support in the :mod:`re` module). character(s) is not "Lu" (Letter, uppercase), but e.g. "Lt" (Letter, titlecase). - The uppercasing algorithm used is described in section 3.13 of the Unicode - Standard. + The uppercasing algorithm used is + `described in section 3.13 of the Unicode Standard + `__. .. method:: str.zfill(width) @@ -2698,6 +2705,9 @@ arbitrary binary data. The subsequence to search for may be any :term:`bytes-like object` or an integer in the range 0 to 255. + If *sub* is empty, returns the number of empty slices between characters + which is the length of the bytes object plus one. + .. versionchanged:: 3.3 Also accept an integer in the range 0 to 255 as the subsequence. @@ -4370,11 +4380,9 @@ type, the :dfn:`dictionary`. (For other containers see the built-in A dictionary's keys are *almost* arbitrary values. Values that are not :term:`hashable`, that is, values containing lists, dictionaries or other mutable types (that are compared by value rather than by object identity) may -not be used as keys. Numeric types used for keys obey the normal rules for -numeric comparison: if two numbers compare equal (such as ``1`` and ``1.0``) -then they can be used interchangeably to index the same dictionary entry. (Note -however, that since computers store floating-point numbers as approximations it -is usually unwise to use them as dictionary keys.) +not be used as keys. +Values that compare equal (such as ``1``, ``1.0``, and ``True``) +can be used interchangeably to index the same dictionary entry. .. class:: dict(**kwargs) dict(mapping, **kwargs) diff --git a/Doc/library/struct.rst b/Doc/library/struct.rst index d12a5732fa4a0d..50d70731f77523 100644 --- a/Doc/library/struct.rst +++ b/Doc/library/struct.rst @@ -12,21 +12,25 @@ -------------- -This module performs conversions between Python values and C structs represented -as Python :class:`bytes` objects. This can be used in handling binary data -stored in files or from network connections, among other sources. It uses -:ref:`struct-format-strings` as compact descriptions of the layout of the C -structs and the intended conversion to/from Python values. +This module converts between Python values and C structs represented +as Python :class:`bytes` objects. Compact :ref:`format strings ` +describe the intended conversions to/from Python values. +The module's functions and objects can be used for two largely +distinct applications, data exchange with external sources (files or +network connections), or data transfer between the Python application +and the C layer. .. note:: - By default, the result of packing a given C struct includes pad bytes in - order to maintain proper alignment for the C types involved; similarly, - alignment is taken into account when unpacking. This behavior is chosen so - that the bytes of a packed struct correspond exactly to the layout in memory - of the corresponding C struct. To handle platform-independent data formats - or omit implicit pad bytes, use ``standard`` size and alignment instead of - ``native`` size and alignment: see :ref:`struct-alignment` for details. + When no prefix character is given, native mode is the default. It + packs or unpacks data based on the platform and compiler on which + the Python interpreter was built. + The result of packing a given C struct includes pad bytes which + maintain proper alignment for the C types involved; similarly, + alignment is taken into account when unpacking. In contrast, when + communicating data between external sources, the programmer is + responsible for defining byte ordering and padding between elements. + See :ref:`struct-alignment` for details. Several :mod:`struct` functions (and methods of :class:`Struct`) take a *buffer* argument. This refers to objects that implement the :ref:`bufferobjects` and @@ -102,10 +106,13 @@ The module defines the following exception and functions: Format Strings -------------- -Format strings are the mechanism used to specify the expected layout when -packing and unpacking data. They are built up from :ref:`format-characters`, -which specify the type of data being packed/unpacked. In addition, there are -special characters for controlling the :ref:`struct-alignment`. +Format strings describe the data layout when +packing and unpacking data. They are built up from :ref:`format characters`, +which specify the type of data being packed/unpacked. In addition, +special characters control the :ref:`byte order, size and alignment`. +Each format string consists of an optional prefix character which +describes the overall properties of the data and one or more format +characters which describe the actual data values and padding. .. _struct-alignment: @@ -116,6 +123,11 @@ Byte Order, Size, and Alignment By default, C types are represented in the machine's native format and byte order, and properly aligned by skipping pad bytes if necessary (according to the rules used by the C compiler). +This behavior is chosen so +that the bytes of a packed struct correspond exactly to the memory layout +of the corresponding C struct. +Whether to use native byte ordering +and padding or standard formats depends on the application. .. index:: single: @ (at); in struct format strings @@ -144,12 +156,10 @@ following table: If the first character is not one of these, ``'@'`` is assumed. -Native byte order is big-endian or little-endian, depending on the host -system. For example, Intel x86 and AMD64 (x86-64) are little-endian; -IBM z and most legacy architectures are big-endian; -and ARM, RISC-V and IBM Power feature switchable endianness -(bi-endian, though the former two are nearly always little-endian in practice). -Use ``sys.byteorder`` to check the endianness of your system. +Native byte order is big-endian or little-endian, depending on the +host system. For example, Intel x86, AMD64 (x86-64), and Apple M1 are +little-endian; IBM z and many legacy architectures are big-endian. +Use :data:`sys.byteorder` to check the endianness of your system. Native size and alignment are determined using the C compiler's ``sizeof`` expression. This is always combined with native byte order. @@ -194,7 +204,7 @@ platform-dependent. +--------+--------------------------+--------------------+----------------+------------+ | Format | C Type | Python type | Standard size | Notes | +========+==========================+====================+================+============+ -| ``x`` | pad byte | no value | | | +| ``x`` | pad byte | no value | | \(7) | +--------+--------------------------+--------------------+----------------+------------+ | ``c`` | :c:expr:`char` | bytes of length 1 | 1 | | +--------+--------------------------+--------------------+----------------+------------+ @@ -231,9 +241,9 @@ platform-dependent. +--------+--------------------------+--------------------+----------------+------------+ | ``d`` | :c:expr:`double` | float | 8 | \(4) | +--------+--------------------------+--------------------+----------------+------------+ -| ``s`` | :c:expr:`char[]` | bytes | | | +| ``s`` | :c:expr:`char[]` | bytes | | \(9) | +--------+--------------------------+--------------------+----------------+------------+ -| ``p`` | :c:expr:`char[]` | bytes | | | +| ``p`` | :c:expr:`char[]` | bytes | | \(8) | +--------+--------------------------+--------------------+----------------+------------+ | ``P`` | :c:expr:`void \*` | integer | | \(5) | +--------+--------------------------+--------------------+----------------+------------+ @@ -291,6 +301,34 @@ Notes: operations. See the Wikipedia page on the `half-precision floating-point format `_ for more information. +(7) + When packing, ``'x'`` inserts one NUL byte. + +(8) + The ``'p'`` format character encodes a "Pascal string", meaning a short + variable-length string stored in a *fixed number of bytes*, given by the count. + The first byte stored is the length of the string, or 255, whichever is + smaller. The bytes of the string follow. If the string passed in to + :func:`pack` is too long (longer than the count minus 1), only the leading + ``count-1`` bytes of the string are stored. If the string is shorter than + ``count-1``, it is padded with null bytes so that exactly count bytes in all + are used. Note that for :func:`unpack`, the ``'p'`` format character consumes + ``count`` bytes, but that the string returned can never contain more than 255 + bytes. + +(9) + For the ``'s'`` format character, the count is interpreted as the length of the + bytes, not a repeat count like for the other format characters; for example, + ``'10s'`` means a single 10-byte string mapping to or from a single + Python byte string, while ``'10c'`` means 10 + separate one byte character elements (e.g., ``cccccccccc``) mapping + to or from ten different Python byte objects. (See :ref:`struct-examples` + for a concrete demonstration of the difference.) + If a count is not given, it defaults to 1. For packing, the string is + truncated or padded with null bytes as appropriate to make it fit. For + unpacking, the resulting bytes object always has exactly the specified number + of bytes. As a special case, ``'0s'`` means a single, empty string (while + ``'0c'`` means 0 characters). A format character may be preceded by an integral repeat count. For example, the format string ``'4h'`` means exactly the same as ``'hhhh'``. @@ -298,15 +336,6 @@ the format string ``'4h'`` means exactly the same as ``'hhhh'``. Whitespace characters between formats are ignored; a count and its format must not contain whitespace though. -For the ``'s'`` format character, the count is interpreted as the length of the -bytes, not a repeat count like for the other format characters; for example, -``'10s'`` means a single 10-byte string, while ``'10c'`` means 10 characters. -If a count is not given, it defaults to 1. For packing, the string is -truncated or padded with null bytes as appropriate to make it fit. For -unpacking, the resulting bytes object always has exactly the specified number -of bytes. As a special case, ``'0s'`` means a single, empty string (while -``'0c'`` means 0 characters). - When packing a value ``x`` using one of the integer formats (``'b'``, ``'B'``, ``'h'``, ``'H'``, ``'i'``, ``'I'``, ``'l'``, ``'L'``, ``'q'``, ``'Q'``), if ``x`` is outside the valid range for that format @@ -316,17 +345,6 @@ then :exc:`struct.error` is raised. Previously, some of the integer formats wrapped out-of-range values and raised :exc:`DeprecationWarning` instead of :exc:`struct.error`. -The ``'p'`` format character encodes a "Pascal string", meaning a short -variable-length string stored in a *fixed number of bytes*, given by the count. -The first byte stored is the length of the string, or 255, whichever is -smaller. The bytes of the string follow. If the string passed in to -:func:`pack` is too long (longer than the count minus 1), only the leading -``count-1`` bytes of the string are stored. If the string is shorter than -``count-1``, it is padded with null bytes so that exactly count bytes in all -are used. Note that for :func:`unpack`, the ``'p'`` format character consumes -``count`` bytes, but that the string returned can never contain more than 255 -bytes. - .. index:: single: ? (question mark); in struct format strings For the ``'?'`` format character, the return value is either :const:`True` or @@ -342,18 +360,36 @@ Examples ^^^^^^^^ .. note:: - All examples assume a native byte order, size, and alignment with a - big-endian machine. + Native byte order examples (designated by the ``'@'`` format prefix or + lack of any prefix character) may not match what the reader's + machine produces as + that depends on the platform and compiler. + +Pack and unpack integers of three different sizes, using big endian +ordering:: + + >>> from struct import * + >>> pack(">bhl", 1, 2, 3) + b'\x01\x00\x02\x00\x00\x00\x03' + >>> unpack('>bhl', b'\x01\x00\x02\x00\x00\x00\x03' + (1, 2, 3) + >>> calcsize('>bhl') + 7 -A basic example of packing/unpacking three integers:: +Attempt to pack an integer which is too large for the defined field:: - >>> from struct import * - >>> pack('hhl', 1, 2, 3) - b'\x00\x01\x00\x02\x00\x00\x00\x03' - >>> unpack('hhl', b'\x00\x01\x00\x02\x00\x00\x00\x03') - (1, 2, 3) - >>> calcsize('hhl') - 8 + >>> pack(">h", 99999) + Traceback (most recent call last): + File "", line 1, in + struct.error: 'h' format requires -32768 <= number <= 32767 + +Demonstrate the difference between ``'s'`` and ``'c'`` format +characters:: + + >>> pack("@ccc", b'1', b'2', b'3') + b'123' + >>> pack("@3s", b'123') + b'123' Unpacked fields can be named by assigning them to variables or by wrapping the result in a named tuple:: @@ -366,35 +402,132 @@ the result in a named tuple:: >>> Student._make(unpack('<10sHHb', record)) Student(name=b'raymond ', serialnum=4658, school=264, gradelevel=8) -The ordering of format characters may have an impact on size since the padding -needed to satisfy alignment requirements is different:: - - >>> pack('ci', b'*', 0x12131415) - b'*\x00\x00\x00\x12\x13\x14\x15' - >>> pack('ic', 0x12131415, b'*') - b'\x12\x13\x14\x15*' - >>> calcsize('ci') +The ordering of format characters may have an impact on size in native +mode since padding is implicit. In standard mode, the user is +responsible for inserting any desired padding. +Note in +the first ``pack`` call below that three NUL bytes were added after the +packed ``'#'`` to align the following integer on a four-byte boundary. +In this example, the output was produced on a little endian machine:: + + >>> pack('@ci', b'#', 0x12131415) + b'#\x00\x00\x00\x15\x14\x13\x12' + >>> pack('@ic', 0x12131415, b'#') + b'\x15\x14\x13\x12#' + >>> calcsize('@ci') 8 - >>> calcsize('ic') + >>> calcsize('@ic') 5 -The following format ``'llh0l'`` specifies two pad bytes at the end, assuming -longs are aligned on 4-byte boundaries:: +The following format ``'llh0l'`` results in two pad bytes being added +at the end, assuming the platform's longs are aligned on 4-byte boundaries:: - >>> pack('llh0l', 1, 2, 3) + >>> pack('@llh0l', 1, 2, 3) b'\x00\x00\x00\x01\x00\x00\x00\x02\x00\x03\x00\x00' -This only works when native size and alignment are in effect; standard size and -alignment does not enforce any alignment. - .. seealso:: Module :mod:`array` Packed binary storage of homogeneous data. - Module :mod:`xdrlib` - Packing and unpacking of XDR data. + Module :mod:`json` + JSON encoder and decoder. + + Module :mod:`pickle` + Python object serialization. + + +.. _applications: + +Applications +------------ + +Two main applications for the :mod:`struct` module exist, data +interchange between Python and C code within an application or another +application compiled using the same compiler (:ref:`native formats`), and +data interchange between applications using agreed upon data layout +(:ref:`standard formats`). Generally speaking, the format strings +constructed for these two domains are distinct. + + +.. _struct-native-formats: + +Native Formats +^^^^^^^^^^^^^^ + +When constructing format strings which mimic native layouts, the +compiler and machine architecture determine byte ordering and padding. +In such cases, the ``@`` format character should be used to specify +native byte ordering and data sizes. Internal pad bytes are normally inserted +automatically. It is possible that a zero-repeat format code will be +needed at the end of a format string to round up to the correct +byte boundary for proper alignment of consective chunks of data. + +Consider these two simple examples (on a 64-bit, little-endian +machine):: + + >>> calcsize('@lhl') + 24 + >>> calcsize('@llh') + 18 + +Data is not padded to an 8-byte boundary at the end of the second +format string without the use of extra padding. A zero-repeat format +code solves that problem:: + + >>> calcsize('@llh0l') + 24 + +The ``'x'`` format code can be used to specify the repeat, but for +native formats it is better to use a zero-repeat format like ``'0l'``. + +By default, native byte ordering and alignment is used, but it is +better to be explicit and use the ``'@'`` prefix character. + + +.. _struct-standard-formats: + +Standard Formats +^^^^^^^^^^^^^^^^ + +When exchanging data beyond your process such as networking or storage, +be precise. Specify the exact byte order, size, and alignment. Do +not assume they match the native order of a particular machine. +For example, network byte order is big-endian, while many popular CPUs +are little-endian. By defining this explicitly, the user need not +care about the specifics of the platform their code is running on. +The first character should typically be ``<`` or ``>`` +(or ``!``). Padding is the responsibility of the programmer. The +zero-repeat format character won't work. Instead, the user must +explicitly add ``'x'`` pad bytes where needed. Revisiting the +examples from the previous section, we have:: + + >>> calcsize('>> pack('>> calcsize('@llh') + 18 + >>> pack('@llh', 1, 2, 3) == pack('>> calcsize('>> calcsize('@llh0l') + 24 + >>> pack('@llh0l', 1, 2, 3) == pack('>> calcsize('>> calcsize('@llh0l') + 12 + >>> pack('@llh0l', 1, 2, 3) == pack('` (:option:`configure + If Python is :ref:`built in debug mode ` (:option:`configure --with-pydebug option <--with-pydebug>`), it also performs some expensive internal consistency checks. @@ -349,7 +358,7 @@ always available. files to (and read them from) a parallel directory tree rooted at this directory, rather than from ``__pycache__`` directories in the source code tree. Any ``__pycache__`` directories in the source code tree will be ignored - and new `.pyc` files written within the pycache prefix. Thus if you use + and new ``.pyc`` files written within the pycache prefix. Thus if you use :mod:`compileall` as a pre-build step, you must ensure you run it with the same pycache prefix (if any) that you will use at runtime. @@ -874,7 +883,7 @@ always available. .. function:: get_asyncgen_hooks() Returns an *asyncgen_hooks* object, which is similar to a - :class:`~collections.namedtuple` of the form `(firstiter, finalizer)`, + :class:`~collections.namedtuple` of the form ``(firstiter, finalizer)``, where *firstiter* and *finalizer* are expected to be either ``None`` or functions which take an :term:`asynchronous generator iterator` as an argument, and are used to schedule finalization of an asynchronous @@ -1178,7 +1187,7 @@ always available. string, which means the current working directory. To not prepend this potentially unsafe path, use the :option:`-P` command - line option or the :envvar:`PYTHONSAFEPATH` environment variable? + line option or the :envvar:`PYTHONSAFEPATH` environment variable. A program is free to modify this list for its own purposes. Only strings should be added to :data:`sys.path`; all other data types are @@ -1555,6 +1564,38 @@ always available. This function has been added on a provisional basis (see :pep:`411` for details.) Use it only for debugging purposes. +.. function:: activate_stack_trampoline(backend, /) + + Activate the stack profiler trampoline *backend*. + The only supported backend is ``"perf"``. + + .. availability:: Linux. + + .. versionadded:: 3.12 + + .. seealso:: + + * :ref:`perf_profiling` + * https://perf.wiki.kernel.org + +.. function:: deactivate_stack_trampoline() + + Deactivate the current stack profiler trampoline backend. + + If no stack profiler is activated, this function has no effect. + + .. availability:: Linux. + + .. versionadded:: 3.12 + +.. function:: is_stack_trampoline_active() + + Return ``True`` if a stack profiler trampoline is active. + + .. availability:: Linux. + + .. versionadded:: 3.12 + .. function:: _enablelegacywindowsfsencoding() Changes the :term:`filesystem encoding and error handler` to 'mbcs' and diff --git a/Doc/library/token-list.inc b/Doc/library/token-list.inc index 1a99f0518d1b47..2739d5bfc1dfa2 100644 --- a/Doc/library/token-list.inc +++ b/Doc/library/token-list.inc @@ -1,4 +1,4 @@ -.. Auto-generated by Tools/scripts/generate_token.py +.. Auto-generated by Tools/build/generate_token.py .. data:: ENDMARKER .. data:: NAME diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index f63d61eb1ea38d..94c9cb11f02d6d 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -105,7 +105,7 @@ A type alias is defined by assigning the type to the alias. In this example, def scale(scalar: float, vector: Vector) -> Vector: return [scalar * num for num in vector] - # typechecks; a list of floats qualifies as a Vector. + # passes type checking; a list of floats qualifies as a Vector. new_vector = scale(2.0, [1.0, -4.2, 5.4]) Type aliases are useful for simplifying complex type signatures. For example:: @@ -147,10 +147,10 @@ of the original type. This is useful in helping catch logical errors:: def get_user_name(user_id: UserId) -> str: ... - # typechecks + # passes type checking user_a = get_user_name(UserId(42351)) - # does not typecheck; an int is not a UserId + # fails type checking; an int is not a UserId user_b = get_user_name(-1) You may still perform all ``int`` operations on a variable of type ``UserId``, @@ -176,7 +176,7 @@ It is invalid to create a subtype of ``Derived``:: UserId = NewType('UserId', int) - # Fails at runtime and does not typecheck + # Fails at runtime and does not pass type checking class AdminUserId(UserId): pass However, it is possible to create a :class:`NewType` based on a 'derived' ``NewType``:: @@ -248,7 +248,7 @@ respectively. .. versionchanged:: 3.10 ``Callable`` now supports :class:`ParamSpec` and :data:`Concatenate`. - See :pep:`612` for more information. + See :pep:`612` for more details. .. seealso:: The documentation for :class:`ParamSpec` and :class:`Concatenate` provides @@ -319,7 +319,7 @@ single type parameter ``T`` . This also makes ``T`` valid as a type within the class body. The :class:`Generic` base class defines :meth:`~object.__class_getitem__` so -that ``LoggedVar[t]`` is valid as a type:: +that ``LoggedVar[T]`` is valid as a type:: from collections.abc import Iterable @@ -463,12 +463,12 @@ value of type :data:`Any` and assign it to any variable:: s = a # OK def foo(item: Any) -> int: - # Typechecks; 'item' could be any type, + # Passes type checking; 'item' could be any type, # and that type might have a 'bar' method item.bar() ... -Notice that no typechecking is performed when assigning a value of type +Notice that no type checking is performed when assigning a value of type :data:`Any` to a more precise type. For example, the static type checker did not report an error when assigning ``a`` to ``s`` even though ``s`` was declared to be of type :class:`str` and receives an :class:`int` value at @@ -500,20 +500,20 @@ reject almost all operations on it, and assigning it to a variable (or using it as a return value) of a more specialized type is a type error. For example:: def hash_a(item: object) -> int: - # Fails; an object does not have a 'magic' method. + # Fails type checking; an object does not have a 'magic' method. item.magic() ... def hash_b(item: Any) -> int: - # Typechecks + # Passes type checking item.magic() ... - # Typechecks, since ints and strs are subclasses of object + # Passes type checking, since ints and strs are subclasses of object hash_a(42) hash_a("foo") - # Typechecks, since Any is compatible with all types + # Passes type checking, since Any is compatible with all types hash_b(42) hash_b("foo") @@ -723,7 +723,7 @@ These can be used as types in annotations and do not support ``[]``. of the ``cls`` parameter. - Annotating an :meth:`~object.__enter__` method which returns self. - For more information, see :pep:`673`. + See :pep:`673` for more details. .. versionadded:: 3.11 @@ -760,8 +760,8 @@ These can be used as types in annotations using ``[]``, each having a unique syn is equivalent to ``Tuple[Any, ...]``, and in turn to :class:`tuple`. .. deprecated:: 3.9 - :class:`builtins.tuple ` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`builtins.tuple ` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. data:: Union @@ -849,12 +849,12 @@ These can be used as types in annotations using ``[]``, each having a unique syn respectively. .. deprecated:: 3.9 - :class:`collections.abc.Callable` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`collections.abc.Callable` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. versionchanged:: 3.10 ``Callable`` now supports :class:`ParamSpec` and :data:`Concatenate`. - See :pep:`612` for more information. + See :pep:`612` for more details. .. seealso:: The documentation for :class:`ParamSpec` and :class:`Concatenate` provide @@ -957,8 +957,8 @@ These can be used as types in annotations using ``[]``, each having a unique syn .. versionadded:: 3.5.2 .. deprecated:: 3.9 - :class:`builtins.type ` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`builtins.type ` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. data:: Literal @@ -1040,8 +1040,7 @@ These can be used as types in annotations using ``[]``, each having a unique syn Special typing constructs that mark individual keys of a :class:`TypedDict` as either required or non-required respectively. - For more information, see :class:`TypedDict` and - :pep:`655` ("Marking individual TypedDict items as required or potentially missing"). + See :class:`TypedDict` and :pep:`655` for more details. .. versionadded:: 3.11 @@ -1192,8 +1191,7 @@ These can be used as types in annotations using ``[]``, each having a unique syn is not a subtype of the former, since ``list`` is invariant. The responsibility of writing type-safe type guards is left to the user. - ``TypeGuard`` also works with type variables. For more information, see - :pep:`647` (User-Defined Type Guards). + ``TypeGuard`` also works with type variables. See :pep:`647` for more details. .. versionadded:: 3.10 @@ -1341,7 +1339,7 @@ These are not used in annotations. They are building blocks for creating generic ``Unpack[Ts]``.) Type variable tuples must *always* be unpacked. This helps distinguish type - variable types from normal type variables:: + variable tuples from normal type variables:: x: Ts # Not valid x: tuple[Ts] # Not valid @@ -1353,7 +1351,7 @@ These are not used in annotations. They are building blocks for creating generic Shape = TypeVarTuple('Shape') class Array(Generic[*Shape]): def __getitem__(self, key: tuple[*Shape]) -> float: ... - def __abs__(self) -> Array[*Shape]: ... + def __abs__(self) -> "Array[*Shape]": ... def get_shape(self) -> tuple[*Shape]: ... Type variable tuples can be happily combined with normal type variables:: @@ -1393,7 +1391,7 @@ These are not used in annotations. They are building blocks for creating generic to ``call_soon`` match the types of the (positional) arguments of ``callback``. - For more details on type variable tuples, see :pep:`646`. + See :pep:`646` for more details on type variable tuples. .. versionadded:: 3.11 @@ -1556,7 +1554,7 @@ These are not used in annotations. They are building blocks for creating generic func(C()) # Passes static type check - See :pep:`544` for details. Protocol classes decorated with + See :pep:`544` for more details. Protocol classes decorated with :func:`runtime_checkable` (described later) act as simple-minded runtime protocols that check only the presence of given attributes, ignoring their type signatures. @@ -1898,8 +1896,8 @@ Corresponding to built-in types ... .. deprecated:: 3.9 - :class:`builtins.dict ` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`builtins.dict ` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: List(list, MutableSequence[T]) @@ -1919,8 +1917,8 @@ Corresponding to built-in types return [item for item in vector if item > 0] .. deprecated:: 3.9 - :class:`builtins.list ` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`builtins.list ` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Set(set, MutableSet[T]) @@ -1929,16 +1927,17 @@ Corresponding to built-in types to use an abstract collection type such as :class:`AbstractSet`. .. deprecated:: 3.9 - :class:`builtins.set ` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`builtins.set ` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: FrozenSet(frozenset, AbstractSet[T_co]) A generic version of :class:`builtins.frozenset `. .. deprecated:: 3.9 - :class:`builtins.frozenset ` now supports ``[]``. See - :pep:`585` and :ref:`types-genericalias`. + :class:`builtins.frozenset ` + now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. note:: :data:`Tuple` is a special form. @@ -1952,8 +1951,8 @@ Corresponding to types in :mod:`collections` .. versionadded:: 3.5.2 .. deprecated:: 3.9 - :class:`collections.defaultdict` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`collections.defaultdict` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: OrderedDict(collections.OrderedDict, MutableMapping[KT, VT]) @@ -1962,8 +1961,8 @@ Corresponding to types in :mod:`collections` .. versionadded:: 3.7.2 .. deprecated:: 3.9 - :class:`collections.OrderedDict` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`collections.OrderedDict` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: ChainMap(collections.ChainMap, MutableMapping[KT, VT]) @@ -1973,8 +1972,8 @@ Corresponding to types in :mod:`collections` .. versionadded:: 3.6.1 .. deprecated:: 3.9 - :class:`collections.ChainMap` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`collections.ChainMap` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Counter(collections.Counter, Dict[T, int]) @@ -1984,8 +1983,8 @@ Corresponding to types in :mod:`collections` .. versionadded:: 3.6.1 .. deprecated:: 3.9 - :class:`collections.Counter` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`collections.Counter` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Deque(deque, MutableSequence[T]) @@ -1995,8 +1994,8 @@ Corresponding to types in :mod:`collections` .. versionadded:: 3.6.1 .. deprecated:: 3.9 - :class:`collections.deque` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`collections.deque` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. Other concrete types """""""""""""""""""" @@ -2010,7 +2009,7 @@ Other concrete types represent the types of I/O streams such as returned by :func:`open`. - .. deprecated-removed:: 3.8 3.12 + .. deprecated-removed:: 3.8 3.13 The ``typing.io`` namespace is deprecated and will be removed. These types should be directly imported from ``typing`` instead. @@ -2024,7 +2023,7 @@ Other concrete types ``Pattern[str]``, ``Pattern[bytes]``, ``Match[str]``, or ``Match[bytes]``. - .. deprecated-removed:: 3.8 3.12 + .. deprecated-removed:: 3.8 3.13 The ``typing.re`` namespace is deprecated and will be removed. These types should be directly imported from ``typing`` instead. @@ -2058,13 +2057,13 @@ Abstract Base Classes Corresponding to collections in :mod:`collections.abc` """""""""""""""""""""""""""""""""""""""""""""""""""""" -.. class:: AbstractSet(Sized, Collection[T_co]) +.. class:: AbstractSet(Collection[T_co]) A generic version of :class:`collections.abc.Set`. .. deprecated:: 3.9 - :class:`collections.abc.Set` now supports ``[]``. See :pep:`585` and - :ref:`types-genericalias`. + :class:`collections.abc.Set` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: ByteString(Sequence[int]) @@ -2077,8 +2076,8 @@ Corresponding to collections in :mod:`collections.abc` annotate arguments of any of the types mentioned above. .. deprecated:: 3.9 - :class:`collections.abc.ByteString` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.ByteString` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Collection(Sized, Iterable[T_co], Container[T_co]) @@ -2087,34 +2086,34 @@ Corresponding to collections in :mod:`collections.abc` .. versionadded:: 3.6.0 .. deprecated:: 3.9 - :class:`collections.abc.Collection` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Collection` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Container(Generic[T_co]) A generic version of :class:`collections.abc.Container`. .. deprecated:: 3.9 - :class:`collections.abc.Container` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Container` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. -.. class:: ItemsView(MappingView, Generic[KT_co, VT_co]) +.. class:: ItemsView(MappingView, AbstractSet[tuple[KT_co, VT_co]]) A generic version of :class:`collections.abc.ItemsView`. .. deprecated:: 3.9 - :class:`collections.abc.ItemsView` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.ItemsView` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. -.. class:: KeysView(MappingView[KT_co], AbstractSet[KT_co]) +.. class:: KeysView(MappingView, AbstractSet[KT_co]) A generic version of :class:`collections.abc.KeysView`. .. deprecated:: 3.9 - :class:`collections.abc.KeysView` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.KeysView` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. -.. class:: Mapping(Sized, Collection[KT], Generic[VT_co]) +.. class:: Mapping(Collection[KT], Generic[KT, VT_co]) A generic version of :class:`collections.abc.Mapping`. This type can be used as follows:: @@ -2123,56 +2122,58 @@ Corresponding to collections in :mod:`collections.abc` return word_list[word] .. deprecated:: 3.9 - :class:`collections.abc.Mapping` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Mapping` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. -.. class:: MappingView(Sized, Iterable[T_co]) +.. class:: MappingView(Sized) A generic version of :class:`collections.abc.MappingView`. .. deprecated:: 3.9 - :class:`collections.abc.MappingView` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.MappingView` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: MutableMapping(Mapping[KT, VT]) A generic version of :class:`collections.abc.MutableMapping`. .. deprecated:: 3.9 - :class:`collections.abc.MutableMapping` now supports ``[]``. See - :pep:`585` and :ref:`types-genericalias`. + :class:`collections.abc.MutableMapping` + now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: MutableSequence(Sequence[T]) A generic version of :class:`collections.abc.MutableSequence`. .. deprecated:: 3.9 - :class:`collections.abc.MutableSequence` now supports ``[]``. See - :pep:`585` and :ref:`types-genericalias`. + :class:`collections.abc.MutableSequence` + now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: MutableSet(AbstractSet[T]) A generic version of :class:`collections.abc.MutableSet`. .. deprecated:: 3.9 - :class:`collections.abc.MutableSet` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.MutableSet` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Sequence(Reversible[T_co], Collection[T_co]) A generic version of :class:`collections.abc.Sequence`. .. deprecated:: 3.9 - :class:`collections.abc.Sequence` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Sequence` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. -.. class:: ValuesView(MappingView[VT_co]) +.. class:: ValuesView(MappingView, Collection[_VT_co]) A generic version of :class:`collections.abc.ValuesView`. .. deprecated:: 3.9 - :class:`collections.abc.ValuesView` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.ValuesView` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. Corresponding to other types in :mod:`collections.abc` """""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -2182,16 +2183,16 @@ Corresponding to other types in :mod:`collections.abc` A generic version of :class:`collections.abc.Iterable`. .. deprecated:: 3.9 - :class:`collections.abc.Iterable` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Iterable` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Iterator(Iterable[T_co]) A generic version of :class:`collections.abc.Iterator`. .. deprecated:: 3.9 - :class:`collections.abc.Iterator` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Iterator` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]) @@ -2225,8 +2226,8 @@ Corresponding to other types in :mod:`collections.abc` start += 1 .. deprecated:: 3.9 - :class:`collections.abc.Generator` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Generator` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Hashable @@ -2240,8 +2241,8 @@ Corresponding to other types in :mod:`collections.abc` A generic version of :class:`collections.abc.Reversible`. .. deprecated:: 3.9 - :class:`collections.abc.Reversible` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Reversible` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Sized @@ -2268,8 +2269,8 @@ Asynchronous programming .. versionadded:: 3.5.3 .. deprecated:: 3.9 - :class:`collections.abc.Coroutine` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Coroutine` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra]) @@ -2305,8 +2306,9 @@ Asynchronous programming .. versionadded:: 3.6.1 .. deprecated:: 3.9 - :class:`collections.abc.AsyncGenerator` now supports ``[]``. See - :pep:`585` and :ref:`types-genericalias`. + :class:`collections.abc.AsyncGenerator` + now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: AsyncIterable(Generic[T_co]) @@ -2315,8 +2317,8 @@ Asynchronous programming .. versionadded:: 3.5.2 .. deprecated:: 3.9 - :class:`collections.abc.AsyncIterable` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.AsyncIterable` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: AsyncIterator(AsyncIterable[T_co]) @@ -2325,8 +2327,8 @@ Asynchronous programming .. versionadded:: 3.5.2 .. deprecated:: 3.9 - :class:`collections.abc.AsyncIterator` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.AsyncIterator` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: Awaitable(Generic[T_co]) @@ -2335,8 +2337,8 @@ Asynchronous programming .. versionadded:: 3.5.2 .. deprecated:: 3.9 - :class:`collections.abc.Awaitable` now supports ``[]``. See :pep:`585` - and :ref:`types-genericalias`. + :class:`collections.abc.Awaitable` now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. Context manager types @@ -2350,8 +2352,9 @@ Context manager types .. versionadded:: 3.6.0 .. deprecated:: 3.9 - :class:`contextlib.AbstractContextManager` now supports ``[]``. See - :pep:`585` and :ref:`types-genericalias`. + :class:`contextlib.AbstractContextManager` + now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. .. class:: AsyncContextManager(Generic[T_co]) @@ -2361,8 +2364,9 @@ Context manager types .. versionadded:: 3.6.2 .. deprecated:: 3.9 - :class:`contextlib.AbstractAsyncContextManager` now supports ``[]``. See - :pep:`585` and :ref:`types-genericalias`. + :class:`contextlib.AbstractAsyncContextManager` + now supports subscripting (``[]``). + See :pep:`585` and :ref:`types-genericalias`. Protocols --------- @@ -2632,7 +2636,7 @@ Functions and decorators def process(response): - See :pep:`484` for details and comparison with other typing semantics. + See :pep:`484` for more details and comparison with other typing semantics. .. versionchanged:: 3.11 Overloaded functions can now be introspected at runtime using @@ -2864,7 +2868,7 @@ convenience. This is subject to change, and not all deprecations are listed. +----------------------------------+---------------+-------------------+----------------+ | Feature | Deprecated in | Projected removal | PEP/issue | +==================================+===============+===================+================+ -| ``typing.io`` and ``typing.re`` | 3.8 | 3.12 | :issue:`38291` | +| ``typing.io`` and ``typing.re`` | 3.8 | 3.13 | :issue:`38291` | | submodules | | | | +----------------------------------+---------------+-------------------+----------------+ | ``typing`` versions of standard | 3.9 | Undecided | :pep:`585` | diff --git a/Doc/library/unittest.mock-examples.rst b/Doc/library/unittest.mock-examples.rst index 054efa81266326..f9a207bad6903f 100644 --- a/Doc/library/unittest.mock-examples.rst +++ b/Doc/library/unittest.mock-examples.rst @@ -1116,7 +1116,7 @@ on first use). That aside there is a way to use ``mock`` to affect the results of an import. Importing fetches an *object* from the :data:`sys.modules` dictionary. Note that it fetches an *object*, which need not be a module. Importing a module for the -first time results in a module object being put in `sys.modules`, so usually +first time results in a module object being put in ``sys.modules``, so usually when you import something you get a module back. This need not be the case however. diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst index 1478b34bc95514..96b396510794b4 100644 --- a/Doc/library/urllib.parse.rst +++ b/Doc/library/urllib.parse.rst @@ -113,7 +113,8 @@ or on combining URL components into a URL string. +------------------+-------+-------------------------+------------------------+ | :attr:`path` | 2 | Hierarchical path | empty string | +------------------+-------+-------------------------+------------------------+ - | :attr:`params` | 3 | No longer used | always an empty string | + | :attr:`params` | 3 | Parameters for last | empty string | + | | | path element | | +------------------+-------+-------------------------+------------------------+ | :attr:`query` | 4 | Query component | empty string | +------------------+-------+-------------------------+------------------------+ diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index 06810612acba54..adc6cd339ac157 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -15,14 +15,22 @@ -------------- -The :mod:`venv` module provides support for creating lightweight "virtual -environments" with their own site directories, optionally isolated from system -site directories. Each virtual environment has its own Python binary (which -matches the version of the binary that was used to create this environment) and -can have its own independent set of installed Python packages in its site -directories. +.. _venv-def: +.. _venv-intro: + +The :mod:`!venv` module supports creating lightweight "virtual environments", +each with their own independent set of Python packages installed in +their :mod:`site` directories. +A virtual environment is created on top of an existing +Python installation, known as the virtual environment's "base" Python, and may +optionally be isolated from the packages in the base environment, +so only those explicitly installed in the virtual environment are available. + +When used from within a virtual environment, common installation tools such as +`pip`_ will install Python packages into a virtual environment +without needing to be told to do so explicitly. -See :pep:`405` for more information about Python virtual environments. +See :pep:`405` for more background on Python virtual environments. .. seealso:: @@ -36,54 +44,90 @@ Creating virtual environments .. include:: /using/venv-create.inc +.. _venv-explanation: -.. _venv-def: +How venvs work +-------------- -.. note:: A virtual environment is a Python environment such that the Python - interpreter, libraries and scripts installed into it are isolated from those - installed in other virtual environments, and (by default) any libraries - installed in a "system" Python, i.e., one which is installed as part of your - operating system. - - A virtual environment is a directory tree which contains Python executable - files and other files which indicate that it is a virtual environment. - - Common installation tools such as setuptools_ and pip_ work as - expected with virtual environments. In other words, when a virtual - environment is active, they install Python packages into the virtual - environment without needing to be told to do so explicitly. - - When a virtual environment is active (i.e., the virtual environment's Python - interpreter is running), the attributes :attr:`sys.prefix` and - :attr:`sys.exec_prefix` point to the base directory of the virtual - environment, whereas :attr:`sys.base_prefix` and - :attr:`sys.base_exec_prefix` point to the non-virtual environment Python - installation which was used to create the virtual environment. If a virtual - environment is not active, then :attr:`sys.prefix` is the same as - :attr:`sys.base_prefix` and :attr:`sys.exec_prefix` is the same as - :attr:`sys.base_exec_prefix` (they all point to a non-virtual environment - Python installation). - - When a virtual environment is active, any options that change the - installation path will be ignored from all ``setuptools`` configuration - files to prevent projects being inadvertently installed outside of the - virtual environment. - - When working in a command shell, users can make a virtual environment active - by running an ``activate`` script in the virtual environment's executables - directory (the precise filename and command to use the file is - shell-dependent), which prepends the virtual environment's directory for - executables to the ``PATH`` environment variable for the running shell. There - should be no need in other circumstances to activate a virtual - environment; scripts installed into virtual environments have a "shebang" - line which points to the virtual environment's Python interpreter. This means - that the script will run with that interpreter regardless of the value of - ``PATH``. On Windows, "shebang" line processing is supported if you have the - Python Launcher for Windows installed (this was added to Python in 3.3 - see - :pep:`397` for more details). Thus, double-clicking an installed script in a - Windows Explorer window should run the script with the correct interpreter - without there needing to be any reference to its virtual environment in - ``PATH``. +When a Python interpreter is running from a virtual environment, +:data:`sys.prefix` and :data:`sys.exec_prefix` +point to the directories of the virtual environment, +whereas :data:`sys.base_prefix` and :data:`sys.base_exec_prefix` +point to those of the base Python used to create the environment. +It is sufficient to check +``sys.prefix == sys.base_prefix`` to determine if the current interpreter is +running from a virtual environment. + +A virtual environment may be "activated" using a script in its binary directory +(``bin`` on POSIX; ``Scripts`` on Windows). +This will prepend that directory to your :envvar:`!PATH`, so that running +:program:`!python` will invoke the environment's Python interpreter +and you can run installed scripts without having to use their full path. +The invocation of the activation script is platform-specific +(:samp:`{}` must be replaced by the path to the directory +containing the virtual environment): + ++-------------+------------+--------------------------------------------------+ +| Platform | Shell | Command to activate virtual environment | ++=============+============+==================================================+ +| POSIX | bash/zsh | :samp:`$ source {}/bin/activate` | +| +------------+--------------------------------------------------+ +| | fish | :samp:`$ source {}/bin/activate.fish` | +| +------------+--------------------------------------------------+ +| | csh/tcsh | :samp:`$ source {}/bin/activate.csh` | +| +------------+--------------------------------------------------+ +| | PowerShell | :samp:`$ {}/bin/Activate.ps1` | ++-------------+------------+--------------------------------------------------+ +| Windows | cmd.exe | :samp:`C:\\> {}\\Scripts\\activate.bat` | +| +------------+--------------------------------------------------+ +| | PowerShell | :samp:`PS C:\\> {}\\Scripts\\Activate.ps1` | ++-------------+------------+--------------------------------------------------+ + +.. versionadded:: 3.4 + :program:`!fish` and :program:`!csh` activation scripts. + +.. versionadded:: 3.8 + PowerShell activation scripts installed under POSIX for PowerShell Core + support. + +You don't specifically *need* to activate a virtual environment, +as you can just specify the full path to that environment's +Python interpreter when invoking Python. +Furthermore, all scripts installed in the environment +should be runnable without activating it. + +In order to achieve this, scripts installed into virtual environments have +a "shebang" line which points to the environment's Python interpreter, +i.e. :samp:`#!/{}/bin/python`. +This means that the script will run with that interpreter regardless of the +value of :envvar:`!PATH`. On Windows, "shebang" line processing is supported if +you have the :ref:`launcher` installed. Thus, double-clicking an installed +script in a Windows Explorer window should run it with the correct interpreter +without the environment needing to be activated or on the :envvar:`!PATH`. + +When a virtual environment has been activated, the :envvar:`!VIRTUAL_ENV` +environment variable is set to the path of the environment. +Since explicitly activating a virtual environment is not required to use it, +:envvar:`!VIRTUAL_ENV` cannot be relied upon to determine +whether a virtual environment is being used. + +.. warning:: Because scripts installed in environments should not expect the + environment to be activated, their shebang lines contain the absolute paths + to their environment's interpreters. Because of this, environments are + inherently non-portable, in the general case. You should always have a + simple means of recreating an environment (for example, if you have a + requirements file ``requirements.txt``, you can invoke ``pip install -r + requirements.txt`` using the environment's ``pip`` to install all of the + packages needed by the environment). If for any reason you need to move the + environment to a new location, you should recreate it at the desired + location and delete the one at the old location. If you move an environment + because you moved a parent directory of it, you should recreate the + environment in its new location. Otherwise, software installed into the + environment may not work as expected. + +You can deactivate a virtual environment by typing ``deactivate`` in your shell. +The exact mechanism is platform-specific and is an internal implementation +detail (typically, a script or shell function will be used). .. _venv-api: diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index 8397de4fb488f1..a1e542b1e927e4 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -146,6 +146,9 @@ See :ref:`__slots__ documentation ` for details. prevent their use as dictionary keys. *callback* is the same as the parameter of the same name to the :func:`ref` function. + Accessing an attribute of the proxy object after the referent is + garbage collected raises :exc:`ReferenceError`. + .. versionchanged:: 3.8 Extended the operator support on proxy objects to include the matrix multiplication operators ``@`` and ``@=``. diff --git a/Doc/library/wsgiref.rst b/Doc/library/wsgiref.rst index 06223e667a450a..75dea466335163 100644 --- a/Doc/library/wsgiref.rst +++ b/Doc/library/wsgiref.rst @@ -7,6 +7,8 @@ .. moduleauthor:: Phillip J. Eby .. sectionauthor:: Phillip J. Eby +**Source code:** :source:`Lib/wsgiref` + -------------- The Web Server Gateway Interface (WSGI) is a standard interface between web diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst index 82e5d6aea2310e..72a7a98c2ac4f2 100644 --- a/Doc/library/xml.dom.minidom.rst +++ b/Doc/library/xml.dom.minidom.rst @@ -148,8 +148,8 @@ module documentation. This section lists the differences between the API and Similarly, explicitly stating the *standalone* argument causes the standalone document declarations to be added to the prologue of the XML document. - If the value is set to `True`, `standalone="yes"` is added, - otherwise it is set to `"no"`. + If the value is set to ``True``, ``standalone="yes"`` is added, + otherwise it is set to ``"no"``. Not stating the argument will omit the declaration from the document. .. versionchanged:: 3.8 diff --git a/Doc/library/xml.sax.utils.rst b/Doc/library/xml.sax.utils.rst index e46fefdf997510..ab4606bcf9fe6c 100644 --- a/Doc/library/xml.sax.utils.rst +++ b/Doc/library/xml.sax.utils.rst @@ -25,6 +25,11 @@ or as base classes. replaced with its corresponding value. The characters ``'&'``, ``'<'`` and ``'>'`` are always escaped, even if *entities* is provided. + .. note:: + + This function should only be used to escape characters that + can't be used directly in XML. Do not use this function as a general + string translation function. .. function:: unescape(data, entities={}) diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst index 8b09acd4bd3049..bd2c49a6edab7f 100644 --- a/Doc/library/xmlrpc.client.rst +++ b/Doc/library/xmlrpc.client.rst @@ -60,7 +60,7 @@ between conformable Python objects and XML on the wire. may be passed to calls. The *headers* parameter is an optional sequence of HTTP headers to send with each request, expressed as a sequence of 2-tuples representing the header - name and value. (e.g. `[('Header-Name', 'value')]`). + name and value. (e.g. ``[('Header-Name', 'value')]``). The obsolete *use_datetime* flag is similar to *use_builtin_types* but it applies only to date/time values. diff --git a/Doc/library/xmlrpc.server.rst b/Doc/library/xmlrpc.server.rst index 9778a859da1fbf..016369d2b89d2c 100644 --- a/Doc/library/xmlrpc.server.rst +++ b/Doc/library/xmlrpc.server.rst @@ -263,7 +263,7 @@ This ExampleService demo can be invoked from the command line:: The client that interacts with the above server is included in -`Lib/xmlrpc/client.py`:: +``Lib/xmlrpc/client.py``:: server = ServerProxy("http://localhost:8000") diff --git a/Doc/library/zoneinfo.rst b/Doc/library/zoneinfo.rst index 2f1879dc056a88..d2e5619e7e47c2 100644 --- a/Doc/library/zoneinfo.rst +++ b/Doc/library/zoneinfo.rst @@ -9,6 +9,8 @@ .. moduleauthor:: Paul Ganssle .. sectionauthor:: Paul Ganssle +**Source code:** :source:`Lib/zoneinfo` + -------------- The :mod:`zoneinfo` module provides a concrete time zone implementation to diff --git a/Doc/license.rst b/Doc/license.rst index 00691b30ba6d3e..a934c60698f0b3 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -302,7 +302,8 @@ for third-party software incorporated in the Python distribution. Mersenne Twister ---------------- -The :mod:`_random` module includes code based on a download from +The :mod:`!_random` C extension underlying the :mod:`random` module +includes code based on a download from http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/MT2002/emt19937ar.html. The following are the verbatim comments from the original code:: @@ -386,7 +387,8 @@ Project, https://www.wide.ad.jp/. :: Asynchronous socket services ---------------------------- -The :mod:`asynchat` and :mod:`asyncore` modules contain the following notice:: +The :mod:`test.support.asynchat` and :mod:`test.support.asyncore` +modules contain the following notice:: Copyright 1996 by Sam Rushing @@ -819,7 +821,8 @@ sources unless the build is configured ``--with-system-expat``:: libffi ------ -The :mod:`_ctypes` extension is built using an included copy of the libffi +The :mod:`!_ctypes` C extension underlying the :mod:`ctypes` module +is built using an included copy of the libffi sources unless the build is configured ``--with-system-libffi``:: Copyright (c) 1996-2008 Red Hat, Inc and others. @@ -920,7 +923,8 @@ on the cfuhash project:: libmpdec -------- -The :mod:`_decimal` module is built using an included copy of the libmpdec +The :mod:`!_decimal` C extension underlying the :mod:`decimal` module +is built using an included copy of the libmpdec library unless the build is configured ``--with-system-libmpdec``:: Copyright (c) 2008-2020 Stefan Krah. All rights reserved. @@ -984,3 +988,31 @@ https://www.w3.org/TR/xml-c14n2-testcases/ and is distributed under the THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Audioop +------- + +The audioop module uses the code base in g771.c file of the SoX project:: + + Programming the AdLib/Sound Blaster + FM Music Chips + Version 2.0 (24 Feb 1992) + Copyright (c) 1991, 1992 by Jeffrey S. Lee + jlee@smylex.uucp + Warranty and Copyright Policy + This document is provided on an "as-is" basis, and its author makes + no warranty or representation, express or implied, with respect to + its quality performance or fitness for a particular purpose. In no + event will the author of this document be liable for direct, indirect, + special, incidental, or consequential damages arising out of the use + or inability to use the information contained within. Use of this + document is at your own risk. + This file may be used and copied freely so long as the applicable + copyright notices are retained, and no modifications are made to the + text of the document. No money shall be charged for its distribution + beyond reasonable shipping, handling and duplication costs, nor shall + proprietary changes be made to this document so that it cannot be + distributed freely. This document may not be included in published + material or commercial packages without the written consent of its + author. diff --git a/Doc/make.bat b/Doc/make.bat index 4f0b3c11f4facb..87d8359ef112bb 100644 --- a/Doc/make.bat +++ b/Doc/make.bat @@ -109,7 +109,7 @@ echo.always available include: echo. echo. Provided by Sphinx: echo. html, htmlhelp, latex, text -echo. suspicious, linkcheck, changes, doctest +echo. linkcheck, changes, doctest echo. Provided by this script: echo. clean, check, htmlview echo. diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index d914686c0a1ad9..9e09515f50d12f 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -343,7 +343,7 @@ the case of :keyword:`except`, but in the case of exception groups we can have partial matches when the type matches some of the exceptions in the group. This means that multiple :keyword:`!except*` clauses can execute, each handling part of the exception group. -Each clause executes once and handles an exception group +Each clause executes at most once and handles an exception group of all matching exceptions. Each exception in the group is handled by at most one :keyword:`!except*` clause, the first that matches it. :: @@ -363,18 +363,29 @@ one :keyword:`!except*` clause, the first that matches it. :: +-+---------------- 1 ---------------- | ValueError: 1 +------------------------------------ - >>> - Any remaining exceptions that were not handled by any :keyword:`!except*` - clause are re-raised at the end, combined into an exception group along with - all exceptions that were raised from within :keyword:`!except*` clauses. - An :keyword:`!except*` clause must have a matching type, - and this type cannot be a subclass of :exc:`BaseExceptionGroup`. - It is not possible to mix :keyword:`except` and :keyword:`!except*` - in the same :keyword:`try`. - :keyword:`break`, :keyword:`continue` and :keyword:`return` - cannot appear in an :keyword:`!except*` clause. +Any remaining exceptions that were not handled by any :keyword:`!except*` +clause are re-raised at the end, combined into an exception group along with +all exceptions that were raised from within :keyword:`!except*` clauses. + +If the raised exception is not an exception group and its type matches +one of the :keyword:`!except*` clauses, it is caught and wrapped by an +exception group with an empty message string. :: + + >>> try: + ... raise BlockingIOError + ... except* BlockingIOError as e: + ... print(repr(e)) + ... + ExceptionGroup('', (BlockingIOError())) + +An :keyword:`!except*` clause must have a matching type, +and this type cannot be a subclass of :exc:`BaseExceptionGroup`. +It is not possible to mix :keyword:`except` and :keyword:`!except*` +in the same :keyword:`try`. +:keyword:`break`, :keyword:`continue` and :keyword:`return` +cannot appear in an :keyword:`!except*` clause. .. index:: @@ -582,6 +593,7 @@ The :keyword:`!match` statement keyword: if keyword: as pair: match; case + single: as; match statement single: : (colon); compound statement .. versionadded:: 3.10 diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 9dacd66ee564fd..fd682fcff02003 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -1904,6 +1904,8 @@ Attribute lookup speed can be significantly improved as well. and *__weakref__* for each instance. +.. _datamodel-note-slots: + Notes on using *__slots__* """""""""""""""""""""""""" @@ -2821,7 +2823,7 @@ Customizing positional arguments in class pattern matching When using a class name in a pattern, positional arguments in the pattern are not allowed by default, i.e. ``case MyClass(x, y)`` is typically invalid without special -support in ``MyClass``. To be able to use that kind of patterns, the class needs to +support in ``MyClass``. To be able to use that kind of pattern, the class needs to define a *__match_args__* attribute. .. data:: object.__match_args__ diff --git a/Doc/reference/executionmodel.rst b/Doc/reference/executionmodel.rst index d9183561820b2b..3f01180e13f776 100644 --- a/Doc/reference/executionmodel.rst +++ b/Doc/reference/executionmodel.rst @@ -67,7 +67,7 @@ The following constructs bind names: + :keyword:`for` loop header, + after :keyword:`!as` in a :keyword:`with` statement, :keyword:`except` - clause or in the as-pattern in structural pattern matching, + clause, :keyword:`except* ` clause, or in the as-pattern in structural pattern matching, + in a capture pattern in structural pattern matching * :keyword:`import` statements. diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index a6ca55dafe5365..920e4d19b82d8d 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -154,7 +154,7 @@ tuple may or may not yield the same object). single: , (comma) Note that tuples are not formed by the parentheses, but rather by use of the -comma operator. The exception is the empty tuple, for which parentheses *are* +comma. The exception is the empty tuple, for which parentheses *are* required --- allowing unparenthesized "nothing" in expressions would cause ambiguities and allow common typos to pass uncaught. @@ -1562,7 +1562,7 @@ built-in types. true). * Mappings (instances of :class:`dict`) compare equal if and only if they have - equal `(key, value)` pairs. Equality comparison of the keys and values + equal ``(key, value)`` pairs. Equality comparison of the keys and values enforces reflexivity. Order comparisons (``<``, ``>``, ``<=``, and ``>=``) raise :exc:`TypeError`. @@ -1741,6 +1741,12 @@ returns a boolean value regardless of the type of its argument (for example, ``not 'foo'`` produces ``False`` rather than ``''``.) +.. index:: + single: := (colon equals) + single: assignment expression + single: walrus operator + single: named expression + Assignment expressions ====================== @@ -1766,6 +1772,13 @@ Or, when processing a file stream in chunks: while chunk := file.read(9000): process(chunk) +Assignment expressions must be surrounded by parentheses when used +as sub-expressions in slicing, conditional, lambda, +keyword-argument, and comprehension-if expressions +and in ``assert`` and ``with`` statements. +In all other places where they can be used, parentheses are not required, +including in ``if`` and ``while`` statements. + .. versionadded:: 3.8 See :pep:`572` for more details about assignment expressions. diff --git a/Doc/reference/grammar.rst b/Doc/reference/grammar.rst index 59b45005836a76..bc1db7b039cd5a 100644 --- a/Doc/reference/grammar.rst +++ b/Doc/reference/grammar.rst @@ -12,7 +12,7 @@ and `PEG `_. In particular, ``&`` followed by a symbol, token or parenthesized group indicates a positive lookahead (i.e., is required to match but not consumed), while ``!`` indicates a negative lookahead (i.e., is -required _not_ to match). We use the ``|`` separator to mean PEG's +required *not* to match). We use the ``|`` separator to mean PEG's "ordered choice" (written as ``/`` in traditional PEG grammars). See :pep:`617` for more details on the grammar's syntax. diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst index 58f4ef897bdb7d..b22b5251f1de46 100644 --- a/Doc/reference/import.rst +++ b/Doc/reference/import.rst @@ -558,6 +558,11 @@ listed below. It is **strongly** recommended that you rely on :attr:`__spec__` instead instead of this attribute. + .. versionchanged:: 3.12 + The value of ``__loader__`` is expected to be the same as + ``__spec__.loader``. The use of ``__loader__`` is deprecated and slated + for removal in Python 3.14. + .. attribute:: __package__ The module's ``__package__`` attribute may be set. Its value must @@ -568,6 +573,9 @@ listed below. submodules, to the parent package's name. See :pep:`366` for further details. + This attribute is used instead of ``__name__`` to calculate explicit + relative imports for main modules, as defined in :pep:`366`. + It is **strongly** recommended that you rely on :attr:`__spec__` instead instead of this attribute. @@ -822,7 +830,7 @@ The path based finder iterates over every entry in the search path, and for each of these, looks for an appropriate :term:`path entry finder` (:class:`~importlib.abc.PathEntryFinder`) for the path entry. Because this can be an expensive operation (e.g. there may be -`stat()` call overheads for this search), the path based finder maintains +``stat()`` call overheads for this search), the path based finder maintains a cache mapping path entries to path entry finders. This cache is maintained in :data:`sys.path_importer_cache` (despite the name, this cache actually stores finder objects rather than being limited to :term:`importer` objects). @@ -1025,25 +1033,6 @@ and ``__main__.__spec__`` is set accordingly, they're still considered to populate the ``__main__`` namespace, and not during normal import. -Open issues -=========== - -XXX It would be really nice to have a diagram. - -XXX * (import_machinery.rst) how about a section devoted just to the -attributes of modules and packages, perhaps expanding upon or supplanting the -related entries in the data model reference page? - -XXX runpy, pkgutil, et al in the library manual should all get "See Also" -links at the top pointing to the new import system section. - -XXX Add more explanation regarding the different ways in which -``__main__`` is initialized? - -XXX Add more info on ``__main__`` quirks/pitfalls (i.e. copy from -:pep:`395`). - - References ========== diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst index 4ab6e90a623449..8adb4b740825d0 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -612,9 +612,13 @@ Notes: As in Standard C, up to three octal digits are accepted. .. versionchanged:: 3.11 - Octal escapes with value larger than ``0o377`` produce a :exc:`DeprecationWarning`. - In a future Python version they will be a :exc:`SyntaxWarning` and - eventually a :exc:`SyntaxError`. + Octal escapes with value larger than ``0o377`` produce a + :exc:`DeprecationWarning`. + + .. versionchanged:: 3.12 + Octal escapes with value larger than ``0o377`` produce a + :exc:`SyntaxWarning`. In a future Python version they will be eventually + a :exc:`SyntaxError`. (3) Unlike in Standard C, exactly two hex digits are required. @@ -646,9 +650,11 @@ escape sequences only recognized in string literals fall into the category of unrecognized escapes for bytes literals. .. versionchanged:: 3.6 - Unrecognized escape sequences produce a :exc:`DeprecationWarning`. In - a future Python version they will be a :exc:`SyntaxWarning` and - eventually a :exc:`SyntaxError`. + Unrecognized escape sequences produce a :exc:`DeprecationWarning`. + + .. versionchanged:: 3.12 + Unrecognized escape sequences produce a :exc:`SyntaxWarning`. In a future + Python version they will be eventually a :exc:`SyntaxError`. Even in a raw literal, quotes can be escaped with a backslash, but the backslash remains in the result; for example, ``r"\""`` is a valid string diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index 8311de0457f6af..c98ac81e415b72 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -330,7 +330,7 @@ statement, of a variable or attribute annotation and an optional assignment stat annotated_assignment_stmt: `augtarget` ":" `expression` : ["=" (`starred_expression` | `yield_expression`)] -The difference from normal :ref:`assignment` is that only single target is allowed. +The difference from normal :ref:`assignment` is that only a single target is allowed. For simple names as assignment targets, if in class or module scope, the annotations are evaluated and stored in a special class or module @@ -365,8 +365,8 @@ target, then the interpreter evaluates the target except for the last IDEs. .. versionchanged:: 3.8 - Now annotated assignments allow same expressions in the right hand side as - the regular assignments. Previously, some expressions (like un-parenthesized + Now annotated assignments allow the same expressions in the right hand side as + regular assignments. Previously, some expressions (like un-parenthesized tuple expressions) caused a syntax error. @@ -756,7 +756,7 @@ commas) the two steps are carried out separately for each clause, just as though the clauses had been separated out into individual import statements. -The details of the first step, finding and loading modules are described in +The details of the first step, finding and loading modules, are described in greater detail in the section on the :ref:`import system `, which also describes the various types of packages and modules that can be imported, as well as all the hooks that can be used to customize @@ -994,20 +994,12 @@ The :keyword:`!nonlocal` statement .. productionlist:: python-grammar nonlocal_stmt: "nonlocal" `identifier` ("," `identifier`)* -.. XXX add when implemented - : ["=" (`target_list` "=")+ starred_expression] - : | "nonlocal" identifier augop expression_list - The :keyword:`nonlocal` statement causes the listed identifiers to refer to previously bound variables in the nearest enclosing scope excluding globals. This is important because the default behavior for binding is to search the local namespace first. The statement allows encapsulated code to rebind variables outside of the local scope besides the global (module) scope. -.. XXX not implemented - The :keyword:`nonlocal` statement may prepend an assignment or augmented - assignment, but not an expression. - Names listed in a :keyword:`nonlocal` statement, unlike those listed in a :keyword:`global` statement, must refer to pre-existing bindings in an enclosing scope (the scope in which a new binding should be created cannot diff --git a/Doc/requirements.txt b/Doc/requirements.txt index f8a7f9db144c21..958665db69e227 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -7,7 +7,7 @@ sphinx==4.5.0 blurb -sphinx-lint<1 +sphinx-lint==0.6.7 # The theme used by the documentation is stored separately, so we need # to install that as well. diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 8c3aa47ad1c74b..db7bb3b44219d2 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -26,7 +26,7 @@ from sphinx.errors import NoUri except ImportError: from sphinx.environment import NoUri -from sphinx.locale import translators +from sphinx.locale import _ as sphinx_gettext from sphinx.util import status_iterator, logging from sphinx.util.nodes import split_explicit_title from sphinx.writers.text import TextWriter, TextTranslator @@ -37,10 +37,6 @@ from sphinx.domains.python import PyClassmember as PyMethod from sphinx.domains.python import PyModulelevel as PyFunction -# Support for checking for suspicious markup - -import suspicious - ISSUE_URI = 'https://bugs.python.org/issue?@action=redirect&bpo=%s' GH_ISSUE_URI = 'https://github.com/python/cpython/issues/%s' @@ -108,7 +104,7 @@ class ImplementationDetail(Directive): def run(self): self.assert_has_content() pnode = nodes.compound(classes=['impl-detail']) - label = translators['sphinx'].gettext(self.label_text) + label = sphinx_gettext(self.label_text) content = self.content add_text = nodes.strong(label, label) self.state.nested_parse(content, self.content_offset, pnode) @@ -256,7 +252,7 @@ def run(self): else: args = [] - label = translators['sphinx'].gettext(self._label[min(2, len(args))]) + label = sphinx_gettext(self._label[min(2, len(args))]) text = label.format(name="``{}``".format(name), args=", ".join("``{}``".format(a) for a in args if a)) @@ -435,7 +431,7 @@ def run(self): else: label = self._removed_label - label = translators['sphinx'].gettext(label) + label = sphinx_gettext(label) text = label.format(deprecated=self.arguments[0], removed=self.arguments[1]) if len(self.arguments) == 3: inodes, messages = self.state.inline_text(self.arguments[2], @@ -686,7 +682,6 @@ def setup(app): app.add_directive('audit-event-table', AuditEventListDirective) app.add_directive('deprecated-removed', DeprecatedRemoved) app.add_builder(PydocTopicsBuilder) - app.add_builder(suspicious.CheckSuspiciousMarkupBuilder) app.add_object_type('opcode', 'opcode', '%s (opcode)', parse_opcode_signature) app.add_object_type('pdbcommand', 'pdbcmd', '%s (pdb command)', parse_pdb_command) app.add_object_type('2to3fixer', '2to3fixer', '%s (2to3 fixer)') diff --git a/Doc/tools/extensions/suspicious.py b/Doc/tools/extensions/suspicious.py deleted file mode 100644 index 2d581a8a6c3db6..00000000000000 --- a/Doc/tools/extensions/suspicious.py +++ /dev/null @@ -1,251 +0,0 @@ -""" -Try to detect suspicious constructs, resembling markup -that has leaked into the final output. - -Suspicious lines are reported in a comma-separated-file, -``suspicious.csv``, located in the output directory. - -The file is utf-8 encoded, and each line contains four fields: - - * document name (normalized) - * line number in the source document - * problematic text - * complete line showing the problematic text in context - -It is common to find many false positives. To avoid reporting them -again and again, they may be added to the ``ignored.csv`` file -(located in the configuration directory). The file has the same -format as ``suspicious.csv`` with a few differences: - - - each line defines a rule; if the rule matches, the issue - is ignored. - - line number may be empty (that is, nothing between the - commas: ",,"). In this case, line numbers are ignored (the - rule matches anywhere in the file). - - the last field does not have to be a complete line; some - surrounding text (never more than a line) is enough for - context. - -Rules are processed sequentially. A rule matches when: - - * document names are the same - * problematic texts are the same - * line numbers are close to each other (5 lines up or down) - * the rule text is completely contained into the source line - -The simplest way to create the ignored.csv file is by copying -undesired entries from suspicious.csv (possibly trimming the last -field.) - -Copyright 2009 Gabriel A. Genellina - -""" - -import os -import re -import csv - -from docutils import nodes -from sphinx.builders import Builder -import sphinx.util - -detect_all = re.compile(r''' - ::(?=[^=])| # two :: (but NOT ::=) - :[a-zA-Z][a-zA-Z0-9]+| # :foo - `| # ` (seldom used by itself) - (? don't care - self.issue = issue # the markup fragment that triggered this rule - self.line = line # text of the container element (single line only) - self.used = False - - def __repr__(self): - return '{0.docname},,{0.issue},{0.line}'.format(self) - - - -class dialect(csv.excel): - """Our dialect: uses only linefeed as newline.""" - lineterminator = '\n' - - -class CheckSuspiciousMarkupBuilder(Builder): - """ - Checks for possibly invalid markup that may leak into the output. - """ - name = 'suspicious' - logger = sphinx.util.logging.getLogger("CheckSuspiciousMarkupBuilder") - - def init(self): - # create output file - self.log_file_name = os.path.join(self.outdir, 'suspicious.csv') - open(self.log_file_name, 'w').close() - # load database of previously ignored issues - self.load_rules(os.path.join(os.path.dirname(__file__), '..', - 'susp-ignored.csv')) - - def get_outdated_docs(self): - return self.env.found_docs - - def get_target_uri(self, docname, typ=None): - return '' - - def prepare_writing(self, docnames): - pass - - def write_doc(self, docname, doctree): - # set when any issue is encountered in this document - self.any_issue = False - self.docname = docname - visitor = SuspiciousVisitor(doctree, self) - doctree.walk(visitor) - - def finish(self): - unused_rules = [rule for rule in self.rules if not rule.used] - if unused_rules: - self.logger.warning( - 'Found %s/%s unused rules: %s' % ( - len(unused_rules), len(self.rules), - '\n'.join(repr(rule) for rule in unused_rules), - ) - ) - return - - def check_issue(self, line, lineno, issue): - if not self.is_ignored(line, lineno, issue): - self.report_issue(line, lineno, issue) - - def is_ignored(self, line, lineno, issue): - """Determine whether this issue should be ignored.""" - docname = self.docname - for rule in self.rules: - if rule.docname != docname: continue - if rule.issue != issue: continue - # Both lines must match *exactly*. This is rather strict, - # and probably should be improved. - # Doing fuzzy matches with levenshtein distance could work, - # but that means bringing other libraries... - # Ok, relax that requirement: just check if the rule fragment - # is contained in the document line - if rule.line not in line: continue - # Check both line numbers. If they're "near" - # this rule matches. (lineno=None means "don't care") - if (rule.lineno is not None) and \ - abs(rule.lineno - lineno) > 5: continue - # if it came this far, the rule matched - rule.used = True - return True - return False - - def report_issue(self, text, lineno, issue): - self.any_issue = True - self.write_log_entry(lineno, issue, text) - self.logger.warning('[%s:%d] "%s" found in "%-.120s"' % - (self.docname, lineno, issue, text)) - self.app.statuscode = 1 - - def write_log_entry(self, lineno, issue, text): - f = open(self.log_file_name, 'a') - writer = csv.writer(f, dialect) - writer.writerow([self.docname, lineno, issue, text.strip()]) - f.close() - - def load_rules(self, filename): - """Load database of previously ignored issues. - - A csv file, with exactly the same format as suspicious.csv - Fields: document name (normalized), line number, issue, surrounding text - """ - self.logger.info("loading ignore rules... ", nonl=1) - self.rules = rules = [] - try: - f = open(filename, 'r') - except IOError: - return - for i, row in enumerate(csv.reader(f)): - if len(row) != 4: - raise ValueError( - "wrong format in %s, line %d: %s" % (filename, i+1, row)) - docname, lineno, issue, text = row - if lineno: - lineno = int(lineno) - else: - lineno = None - rule = Rule(docname, lineno, issue, text) - rules.append(rule) - f.close() - self.logger.info('done, %d rules loaded' % len(self.rules)) - - -def get_lineno(node): - """Obtain line number information for a node.""" - lineno = None - while lineno is None and node: - node = node.parent - lineno = node.line - return lineno - - -def extract_line(text, index): - """text may be a multiline string; extract - only the line containing the given character index. - - >>> extract_line("abc\ndefgh\ni", 6) - >>> 'defgh' - >>> for i in (0, 2, 3, 4, 10): - ... print extract_line("abc\ndefgh\ni", i) - abc - abc - abc - defgh - defgh - i - """ - p = text.rfind('\n', 0, index) + 1 - q = text.find('\n', index) - if q < 0: - q = len(text) - return text[p:q] - - -class SuspiciousVisitor(nodes.GenericNodeVisitor): - - lastlineno = 0 - - def __init__(self, document, builder): - nodes.GenericNodeVisitor.__init__(self, document) - self.builder = builder - - def default_visit(self, node): - if isinstance(node, (nodes.Text, nodes.image)): # direct text containers - text = node.astext() - # lineno seems to go backwards sometimes (?) - self.lastlineno = lineno = max(get_lineno(node) or 0, self.lastlineno) - seen = set() # don't report the same issue more than only once per line - for match in detect_all(text): - issue = match.group() - line = extract_line(text, match.start()) - if (issue, line) not in seen: - self.builder.check_issue(line, lineno, issue) - seen.add((issue, line)) - - unknown_visit = default_visit - - def visit_document(self, node): - self.lastlineno = 0 - - def visit_comment(self, node): - # ignore comments -- too much false positives. - # (although doing this could miss some errors; - # there were two sections "commented-out" by mistake - # in the Python docs that would not be caught) - raise nodes.SkipNode diff --git a/Doc/tools/rstlint.py b/Doc/tools/rstlint.py deleted file mode 100644 index 4ea68ef3b030c8..00000000000000 --- a/Doc/tools/rstlint.py +++ /dev/null @@ -1,408 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -# Check for stylistic and formal issues in .rst and .py -# files included in the documentation. -# -# 01/2009, Georg Brandl - -# TODO: - wrong versions in versionadded/changed -# - wrong markup after versionchanged directive - -import os -import re -import sys -import getopt -from string import ascii_letters -from os.path import join, splitext, abspath, exists -from collections import defaultdict - -directives = [ - # standard docutils ones - 'admonition', 'attention', 'caution', 'class', 'compound', 'container', - 'contents', 'csv-table', 'danger', 'date', 'default-role', 'epigraph', - 'error', 'figure', 'footer', 'header', 'highlights', 'hint', 'image', - 'important', 'include', 'line-block', 'list-table', 'meta', 'note', - 'parsed-literal', 'pull-quote', 'raw', 'replace', - 'restructuredtext-test-directive', 'role', 'rubric', 'sectnum', 'sidebar', - 'table', 'target-notes', 'tip', 'title', 'topic', 'unicode', 'warning', - # Sphinx and Python docs custom ones - 'acks', 'attribute', 'autoattribute', 'autoclass', 'autodata', - 'autoexception', 'autofunction', 'automethod', 'automodule', - 'availability', 'centered', 'cfunction', 'class', 'classmethod', 'cmacro', - 'cmdoption', 'cmember', 'code-block', 'confval', 'cssclass', 'ctype', - 'currentmodule', 'cvar', 'data', 'decorator', 'decoratormethod', - 'deprecated-removed', 'deprecated(?!-removed)', 'describe', 'directive', - 'doctest', 'envvar', 'event', 'exception', 'function', 'glossary', - 'highlight', 'highlightlang', 'impl-detail', 'index', 'literalinclude', - 'method', 'miscnews', 'module', 'moduleauthor', 'opcode', 'pdbcommand', - 'productionlist', 'program', 'role', 'sectionauthor', 'seealso', - 'sourcecode', 'staticmethod', 'tabularcolumns', 'testcode', 'testoutput', - 'testsetup', 'toctree', 'todo', 'todolist', 'versionadded', - 'versionchanged' -] - -roles = [ - "(? 81: - # don't complain about tables, links and function signatures - if line.lstrip()[0] not in '+|' and \ - 'http://' not in line and \ - not line.lstrip().startswith(('.. function', - '.. method', - '.. cfunction')): - yield lno+1, "line too long" - - -@checker('.html', severity=2, falsepositives=True) -def check_leaked_markup(fn, lines): - """Check HTML files for leaked reST markup; this only works if - the HTML files have been built. - """ - for lno, line in enumerate(lines): - if leaked_markup_re.search(line): - yield lno+1, 'possibly leaked markup: %r' % line - - -def hide_literal_blocks(lines): - """Tool to remove literal blocks from given lines. - - It yields empty lines in place of blocks, so line numbers are - still meaningful. - """ - in_block = False - for line in lines: - if line.endswith("::\n"): - in_block = True - elif in_block: - if line == "\n" or line.startswith(" "): - line = "\n" - else: - in_block = False - yield line - - -def type_of_explicit_markup(line): - if re.match(fr'\.\. {all_directives}::', line): - return 'directive' - if re.match(r'\.\. \[[0-9]+\] ', line): - return 'footnote' - if re.match(r'\.\. \[[^\]]+\] ', line): - return 'citation' - if re.match(r'\.\. _.*[^_]: ', line): - return 'target' - if re.match(r'\.\. \|[^\|]*\| ', line): - return 'substitution_definition' - return 'comment' - - -def hide_comments(lines): - """Tool to remove comments from given lines. - - It yields empty lines in place of comments, so line numbers are - still meaningful. - """ - in_multiline_comment = False - for line in lines: - if line == "..\n": - in_multiline_comment = True - elif in_multiline_comment: - if line == "\n" or line.startswith(" "): - line = "\n" - else: - in_multiline_comment = False - if line.startswith(".. ") and type_of_explicit_markup(line) == 'comment': - line = "\n" - yield line - - - -@checker(".rst", severity=2) -def check_missing_surrogate_space_on_plural(fn, lines): - r"""Check for missing 'backslash-space' between a code sample a letter. - - Good: ``Point``\ s - Bad: ``Point``s - """ - in_code_sample = False - check_next_one = False - for lno, line in enumerate(hide_comments(hide_literal_blocks(lines))): - tokens = line.split("``") - for token_no, token in enumerate(tokens): - if check_next_one: - if token[0] in ascii_letters: - yield lno + 1, f"Missing backslash-space between code sample and {token!r}." - check_next_one = False - if token_no == len(tokens) - 1: - continue - if in_code_sample: - check_next_one = True - in_code_sample = not in_code_sample - -def main(argv): - usage = '''\ -Usage: %s [-v] [-f] [-s sev] [-i path]* [path] - -Options: -v verbose (print all checked file names) - -f enable checkers that yield many false positives - -s sev only show problems with severity >= sev - -i path ignore subdir or file path -''' % argv[0] - try: - gopts, args = getopt.getopt(argv[1:], 'vfs:i:') - except getopt.GetoptError: - print(usage) - return 2 - - verbose = False - severity = 1 - ignore = [] - falsepos = False - for opt, val in gopts: - if opt == '-v': - verbose = True - elif opt == '-f': - falsepos = True - elif opt == '-s': - severity = int(val) - elif opt == '-i': - ignore.append(abspath(val)) - - if len(args) == 0: - path = '.' - elif len(args) == 1: - path = args[0] - else: - print(usage) - return 2 - - if not exists(path): - print('Error: path %s does not exist' % path) - return 2 - - count = defaultdict(int) - - print("""⚠ rstlint.py is no longer maintained here and will be removed -⚠ in a future release. -⚠ Please use https://pypi.org/p/sphinx-lint instead. -""") - - for root, dirs, files in os.walk(path): - # ignore subdirs in ignore list - if abspath(root) in ignore: - del dirs[:] - continue - - for fn in files: - fn = join(root, fn) - if fn[:2] == './': - fn = fn[2:] - - # ignore files in ignore list - if abspath(fn) in ignore: - continue - - ext = splitext(fn)[1] - checkerlist = checkers.get(ext, None) - if not checkerlist: - continue - - if verbose: - print('Checking %s...' % fn) - - try: - with open(fn, 'r', encoding='utf-8') as f: - lines = list(f) - except (IOError, OSError) as err: - print('%s: cannot open: %s' % (fn, err)) - count[4] += 1 - continue - - for checker in checkerlist: - if checker.falsepositives and not falsepos: - continue - csev = checker.severity - if csev >= severity: - for lno, msg in checker(fn, lines): - print('[%d] %s:%d: %s' % (csev, fn, lno, msg)) - count[csev] += 1 - if verbose: - print() - if not count: - if severity > 1: - print('No problems with severity >= %d found.' % severity) - else: - print('No problems found.') - else: - for severity in sorted(count): - number = count[severity] - print('%d problem%s with severity %d found.' % - (number, number > 1 and 's' or '', severity)) - return int(bool(count)) - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/Doc/tools/susp-ignored.csv b/Doc/tools/susp-ignored.csv deleted file mode 100644 index 0dba0744b5fe4b..00000000000000 --- a/Doc/tools/susp-ignored.csv +++ /dev/null @@ -1,400 +0,0 @@ -c-api/arg,,:ref,"PyArg_ParseTuple(args, ""O|O:ref"", &object, &callback)" -c-api/list,,:high,list[low:high] -c-api/sequence,,:i2,del o[i1:i2] -c-api/sequence,,:i2,o[i1:i2] -c-api/tuple,,:high,p[low:high] -c-api/unicode,,:end,str[start:end] -c-api/unicode,,:start,unicode[start:start+length] -distutils/examples,,`,This is the description of the ``foobar`` package. -distutils/setupscript,,::, -extending/embedding,,:numargs,"if(!PyArg_ParseTuple(args, "":numargs""))" -extending/extending,,:myfunction,"PyArg_ParseTuple(args, ""D:myfunction"", &c);" -extending/extending,,:set,"if (PyArg_ParseTuple(args, ""O:set_callback"", &temp)) {" -extending/newtypes,,:call,"if (!PyArg_ParseTuple(args, ""sss:call"", &arg1, &arg2, &arg3)) {" -faq/programming,,:chr,">=4.0) or 1+f(xc,yc,x*x-y*y+xc,2.0*x*y+yc,k-1,f):f(xc,yc,x,y,k,f):chr(" -faq/programming,,:reduce,"print((lambda Ru,Ro,Iu,Io,IM,Sx,Sy:reduce(lambda x,y:x+y,map(lambda y," -faq/programming,,:reduce,"Sx=Sx,Sy=Sy:reduce(lambda x,y:x+y,map(lambda x,xc=Ru,yc=yc,Ru=Ru,Ro=Ro," -faq/windows,,:d48eceb,"Python 3.6.4 (v3.6.4:d48eceb, Dec 19 2017, 06:04:45) [MSC v.1900 32 bit (Intel)] on win32" -howto/curses,,:black,"colors when it activates color mode. They are: 0:black, 1:red," -howto/curses,,:red,"colors when it activates color mode. They are: 0:black, 1:red," -howto/curses,,:green,"2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, and 7:white. The" -howto/curses,,:yellow,"2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, and 7:white. The" -howto/curses,,:blue,"2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, and 7:white. The" -howto/curses,,:magenta,"2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, and 7:white. The" -howto/curses,,:cyan,"2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, and 7:white. The" -howto/curses,,:white,"2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, and 7:white. The" -howto/descriptor,,:root,"INFO:root" -howto/descriptor,,:Updating,"root:Updating" -howto/descriptor,,:Accessing,"root:Accessing" -howto/instrumentation,,::,python$target:::function-entry -howto/instrumentation,,:function,python$target:::function-entry -howto/instrumentation,,::,python$target:::function-return -howto/instrumentation,,:function,python$target:::function-return -howto/instrumentation,,:call,156641360502280 function-entry:call_stack.py:start:23 -howto/instrumentation,,:start,156641360502280 function-entry:call_stack.py:start:23 -howto/instrumentation,,:function,156641360518804 function-entry: call_stack.py:function_1:1 -howto/instrumentation,,:function,156641360532797 function-entry: call_stack.py:function_3:9 -howto/instrumentation,,:function,156641360546807 function-return: call_stack.py:function_3:10 -howto/instrumentation,,:function,156641360563367 function-return: call_stack.py:function_1:2 -howto/instrumentation,,:function,156641360578365 function-entry: call_stack.py:function_2:5 -howto/instrumentation,,:function,156641360591757 function-entry: call_stack.py:function_1:1 -howto/instrumentation,,:function,156641360605556 function-entry: call_stack.py:function_3:9 -howto/instrumentation,,:function,156641360617482 function-return: call_stack.py:function_3:10 -howto/instrumentation,,:function,156641360629814 function-return: call_stack.py:function_1:2 -howto/instrumentation,,:function,156641360642285 function-return: call_stack.py:function_2:6 -howto/instrumentation,,:function,156641360656770 function-entry: call_stack.py:function_3:9 -howto/instrumentation,,:function,156641360669707 function-return: call_stack.py:function_3:10 -howto/instrumentation,,:function,156641360687853 function-entry: call_stack.py:function_4:13 -howto/instrumentation,,:function,156641360700719 function-return: call_stack.py:function_4:14 -howto/instrumentation,,:function,156641360719640 function-entry: call_stack.py:function_5:18 -howto/instrumentation,,:function,156641360732567 function-return: call_stack.py:function_5:21 -howto/instrumentation,,:call,156641360747370 function-return:call_stack.py:start:28 -howto/instrumentation,,:start,156641360747370 function-return:call_stack.py:start:28 -howto/ipaddress,,:DB8,>>> ipaddress.ip_address('2001:DB8::1') -howto/ipaddress,,::,>>> ipaddress.ip_address('2001:DB8::1') -howto/ipaddress,,:db8,IPv6Address('2001:db8::1') -howto/ipaddress,,::,IPv6Address('2001:db8::1') -howto/ipaddress,,::,IPv6Address('::1') -howto/ipaddress,,:db8,>>> ipaddress.ip_network('2001:db8::0/96') -howto/ipaddress,,::,>>> ipaddress.ip_network('2001:db8::0/96') -howto/ipaddress,,:db8,IPv6Network('2001:db8::/96') -howto/ipaddress,,::,IPv6Network('2001:db8::/96') -howto/ipaddress,,:db8,IPv6Network('2001:db8::/128') -howto/ipaddress,,::,IPv6Network('2001:db8::/128') -howto/ipaddress,,:db8,IPv6Interface('2001:db8::1/96') -howto/ipaddress,,::,IPv6Interface('2001:db8::1/96') -howto/ipaddress,,:db8,>>> addr6 = ipaddress.ip_address('2001:db8::1') -howto/ipaddress,,::,>>> addr6 = ipaddress.ip_address('2001:db8::1') -howto/ipaddress,,:db8,>>> host6 = ipaddress.ip_interface('2001:db8::1/96') -howto/ipaddress,,::,>>> host6 = ipaddress.ip_interface('2001:db8::1/96') -howto/ipaddress,,:db8,>>> net6 = ipaddress.ip_network('2001:db8::0/96') -howto/ipaddress,,::,>>> net6 = ipaddress.ip_network('2001:db8::0/96') -howto/ipaddress,,:ffff,IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff::') -howto/ipaddress,,::,IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff::') -howto/ipaddress,,::,IPv6Address('::ffff:ffff') -howto/ipaddress,,:ffff,IPv6Address('::ffff:ffff') -howto/ipaddress,,:db8,'2001:db8::/96' -howto/ipaddress,,::,'2001:db8::/96' -howto/ipaddress,,:db8,>>> ipaddress.ip_interface('2001:db8::1/96') -howto/ipaddress,,::,>>> ipaddress.ip_interface('2001:db8::1/96') -howto/ipaddress,,:db8,'2001:db8::1' -howto/ipaddress,,::,'2001:db8::1' -howto/ipaddress,,:db8,IPv6Address('2001:db8::ffff:ffff') -howto/ipaddress,,::,IPv6Address('2001:db8::ffff:ffff') -howto/ipaddress,,:ffff,IPv6Address('2001:db8::ffff:ffff') -howto/logging,,:And,"WARNING:And this, too" -howto/logging,,:And,"WARNING:root:And this, too" -howto/logging,,:And,"ERROR:root:And non-ASCII stuff, too, like " -howto/logging,,:Doing,INFO:root:Doing something -howto/logging,,:Finished,INFO:root:Finished -howto/logging,,:logger,severity:logger name:message -howto/logging,,:Look,WARNING:root:Look before you leap! -howto/logging,,:message,severity:logger name:message -howto/logging,,:root,DEBUG:root:This message should go to the log file -howto/logging,,:root,INFO:root:Doing something -howto/logging,,:root,INFO:root:Finished -howto/logging,,:root,INFO:root:So should this -howto/logging,,:root,"ERROR:root:And non-ASCII stuff, too, like " -howto/logging,,:root,INFO:root:Started -howto/logging,,:root,"WARNING:root:And this, too" -howto/logging,,:root,WARNING:root:Look before you leap! -howto/logging,,:root,WARNING:root:Watch out! -howto/logging,,:So,INFO:root:So should this -howto/logging,,:So,INFO:So should this -howto/logging,,:Started,INFO:root:Started -howto/logging,,:This,DEBUG:root:This message should go to the log file -howto/logging,,:This,DEBUG:This message should appear on the console -howto/logging,,:Watch,WARNING:root:Watch out! -howto/pyporting,,::,Programming Language :: Python :: 2 -howto/pyporting,,::,Programming Language :: Python :: 3 -howto/regex,,::, -howto/regex,,:foo,(?:foo) -howto/urllib2,,:password,"""joe:password@example.com""" -library/__main__,,`, -library/ast,,:upper,lower:upper -library/ast,,:step,lower:upper:step -library/audioop,,:ipos,"# factor = audioop.findfactor(in_test[ipos*2:ipos*2+len(out_test)]," -library/configparser,,:home,my_dir: ${Common:home_dir}/twosheds -library/configparser,,:option,${section:option} -library/configparser,,:path,python_dir: ${Frameworks:path}/Python/Versions/${Frameworks:Python} -library/configparser,,:Python,python_dir: ${Frameworks:path}/Python/Versions/${Frameworks:Python} -library/configparser,,:system,path: ${Common:system_dir}/Library/Frameworks/ -library/datetime,,:MM, -library/datetime,,:SS, -library/decimal,,:optional,"trailneg:optional trailing minus indicator" -library/difflib,,:ahi,a[alo:ahi] -library/difflib,,:bhi,b[blo:bhi] -library/difflib,,:i1, -library/difflib,,:i2, -library/difflib,,:j2, -library/doctest,,`,``factorial`` from the ``example`` module: -library/doctest,,`,The ``example`` module -library/doctest,,`,Using ``factorial`` -library/exceptions,,:err,err.object[err.start:err.end] -library/functions,,:step,a[start:stop:step] -library/functions,,:stop,"a[start:stop, i]" -library/functions,,:stop,a[start:stop:step] -library/hashlib,,:LEAF,"h00 = blake2b(buf[0:LEAF_SIZE], fanout=FANOUT, depth=DEPTH," -library/http.client,,:port,host:port -library/http.cookies,,`,!#$%&'*+-.^_`|~: -library/imaplib,,:MM,"""DD-Mmm-YYYY HH:MM:SS" -library/imaplib,,:SS,"""DD-Mmm-YYYY HH:MM:SS" -library/inspect,,:int,">>> def foo(a, *, b:int, **kwargs):" -library/inspect,,:int,"'(a, *, b:int, **kwargs)'" -library/inspect,,:int,'b:int' -library/ipaddress,,:db8,>>> ipaddress.ip_address('2001:db8::') -library/ipaddress,,::,>>> ipaddress.ip_address('2001:db8::') -library/ipaddress,,:db8,IPv6Address('2001:db8::') -library/ipaddress,,::,IPv6Address('2001:db8::') -library/ipaddress,,:db8,>>> ipaddress.IPv6Address('2001:db8::1000') -library/ipaddress,,::,>>> ipaddress.IPv6Address('2001:db8::1000') -library/ipaddress,,:db8,'2001:db8::1000' -library/ipaddress,,::,'2001:db8::1000' -library/ipaddress,,:db8,">>> f'{ipaddress.IPv6Address(""2001:db8::1000""):s}'" -library/ipaddress,,::,">>> f'{ipaddress.IPv6Address(""2001:db8::1000""):s}'" -library/ipaddress,,::,IPv6Address('ff02::5678%1') -library/ipaddress,,::,fe80::1234 -library/ipaddress,,:db8,">>> ipaddress.ip_address(""2001:db8::1"").reverse_pointer" -library/ipaddress,,::,">>> ipaddress.ip_address(""2001:db8::1"").reverse_pointer" -library/ipaddress,,::,"""::abc:7:def""" -library/ipaddress,,:def,"""::abc:7:def""" -library/ipaddress,,::,::FFFF/96 -library/ipaddress,,::,2002::/16 -library/ipaddress,,::,2001::/32 -library/ipaddress,,::,>>> str(ipaddress.IPv6Address('::1')) -library/ipaddress,,::,'::1' -library/ipaddress,,:ff00,ffff:ff00:: -library/ipaddress,,:db00,2001:db00::0/24 -library/ipaddress,,::,2001:db00::0/24 -library/ipaddress,,:db00,2001:db00::0/ffff:ff00:: -library/ipaddress,,::,2001:db00::0/ffff:ff00:: -library/itertools,,:step,elements from seq[start:stop:step] -library/itertools,,::,kernel = tuple(kernel)[::-1] -library/itertools,,:stop,elements from seq[start:stop:step] -library/logging.handlers,,:port,host:port -library/logging,,:root,WARNING:root:Watch out! -library/logging,,:Watch,WARNING:root:Watch out! -library/mmap,,:i2,obj[i1:i2] -library/multiprocessing,,`,# Add more tasks using `put()` -library/multiprocessing,,:queue,">>> QueueManager.register('get_queue', callable=lambda:queue)" -library/multiprocessing,,`,# register the Foo class; make `f()` and `g()` accessible via proxy -library/multiprocessing,,`,# register the Foo class; make `g()` and `_h()` accessible via proxy -library/multiprocessing,,`,# register the generator function baz; use `GeneratorProxy` to make proxies -library/nntplib,,:bytes,:bytes -library/nntplib,,:lines,:lines -library/optparse,,:len,"del parser.rargs[:len(value)]" -library/os.path,,:foo,c:foo -library/pathlib,,:bar,">>> PureWindowsPath('c:/Windows', 'd:bar')" -library/pathlib,,:bar,PureWindowsPath('d:bar') -library/pathlib,,:Program,>>> PureWindowsPath('c:Program Files/').root -library/pathlib,,:Program,>>> PureWindowsPath('c:Program Files/').anchor -library/pdb,,:lineno,filename:lineno -library/pickle,,:memory,"conn = sqlite3.connect("":memory:"")" -library/posix,,`,"CFLAGS=""`getconf LFS_CFLAGS`"" OPT=""-g -O2 $CFLAGS""" -library/pprint,,::,"'Programming Language :: Python :: 2.6'," -library/pprint,,::,"'Programming Language :: Python :: 2.7'," -library/pprint,,::,"'classifiers': ['Development Status :: 3 - Alpha'," -library/pprint,,::,"'Intended Audience :: Developers'," -library/pprint,,::,"'License :: OSI Approved :: MIT License'," -library/pprint,,::,"'Programming Language :: Python :: 2'," -library/pprint,,::,"'Programming Language :: Python :: 3'," -library/pprint,,::,"'Programming Language :: Python :: 3.2'," -library/pprint,,::,"'Programming Language :: Python :: 3.3'," -library/pprint,,::,"'Programming Language :: Python :: 3.4'," -library/pprint,,::,"'Topic :: Software Development :: Build Tools']," -library/profile,,:lineno,filename:lineno(function) -library/pyexpat,,:elem1, -library/pyexpat,,:py,"xmlns:py = ""http://www.python.org/ns/"">" -library/random,,:len,new_diff = mean(combined[:len(drug)]) - mean(combined[len(drug):]) -library/readline,,:bind,"python:bind -v" -library/readline,,:bind,"python:bind ^I rl_complete" -library/smtplib,,:port,method must support that as well as a regular host:port -library/socket,,::,'5aef:2b::8' -library/socket,,:can,"return (can_id, can_dlc, data[:can_dlc])" -library/socket,,:len,fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) -library/sqlite3,,:year,"cur.execute(""select * from lang where first_appeared=:year"", {""year"": 1972})" -library/sqlite3,,:memory, -library/sqlite3,,:template,"con = sqlite3.connect(""file:template.db?mode=ro"", uri=True)" -library/sqlite3,,:nosuchdb,"con = sqlite3.connect(""file:nosuchdb.db?mode=rw"", uri=True)" -library/sqlite3,,:mem1,"con1 = sqlite3.connect(""file:mem1?mode=memory&cache=shared"", uri=True)" -library/sqlite3,,:mem1,"con2 = sqlite3.connect(""file:mem1?mode=memory&cache=shared"", uri=True)" -library/ssl,,:My,"Organizational Unit Name (eg, section) []:My Group" -library/ssl,,:My,"Organization Name (eg, company) [Internet Widgits Pty Ltd]:My Organization, Inc." -library/ssl,,:myserver,"Common Name (eg, YOUR name) []:myserver.mygroup.myorganization.com" -library/ssl,,:MyState,State or Province Name (full name) [Some-State]:MyState -library/ssl,,:ops,Email Address []:ops@myserver.mygroup.myorganization.com -library/ssl,,:Some,"Locality Name (eg, city) []:Some City" -library/ssl,,:US,Country Name (2 letter code) [AU]:US -library/stdtypes,,:end,s[start:end] -library/stdtypes,,::,>>> hash(v[::-2]) == hash(b'abcefg'[::-2]) -library/stdtypes,,:len,s[len(s):len(s)] -library/stdtypes,,::,>>> y = m[::2] -library/stdtypes,,::,>>> z = y[::-2] -library/string,,`,"!""#$%&'()*+,-./:;<=>?@[\]^_`{|}~" -library/tarfile,,:bz2, -library/tarfile,,:compression,filemode[:compression] -library/tarfile,,:gz, -library/tarfile,,:xz,'a:xz' -library/tarfile,,:xz,'r:xz' -library/tarfile,,:xz,'w:xz' -library/time,,:mm, -library/time,,:ss, -library/tracemalloc,,:limit,"for index, stat in enumerate(top_stats[:limit], 1):" -library/turtle,,::,Example:: -library/unittest,,:foo,"self.assertEqual(cm.output, ['INFO:foo:first message'," -library/unittest,,:first,"self.assertEqual(cm.output, ['INFO:foo:first message'," -library/unittest,,:foo,'ERROR:foo.bar:second message']) -library/unittest,,:second,'ERROR:foo.bar:second message']) -library/urllib.request,,:close,Connection:close -library/urllib.request,,:port,:port -library/urllib.request,,:lang,"xmlns=""http://www.w3.org/1999/xhtml"" xml:lang=""en"" lang=""en"">\n\n\n" -library/urllib.request,,:password,"""joe:password@python.org""" -library/urllib.parse,,:scheme, -library/urllib.parse,,:scheme,URL:scheme://host/path -library/uuid,,:uuid,urn:uuid:12345678-1234-5678-1234-567812345678 -library/venv,,:param,":param nodist: If true, setuptools and pip are not installed into the" -library/venv,,:param,":param progress: If setuptools or pip are installed, the progress of the" -library/venv,,:param,":param nopip: If true, pip is not installed into the created" -library/venv,,:param,:param context: The information for the virtual environment -library/xmlrpc.client,,:nil,ex:nil -library/xmlrpc.client,,:pass,http://user:pass@host:port/path -library/xmlrpc.client,,:pass,user:pass -library/xmlrpc.client,,:port,http://user:pass@host:port/path -license,,`,"``Software''), to deal in the Software without restriction, including" -license,,`,"THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND," -license,,`,* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND -license,,`,THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -license,,`,* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY -license,,`,THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS ``AS IS'' AND -license,,:zooko,mailto:zooko@zooko.com -reference/expressions,,:index,x[index:index] -reference/lexical_analysis,,`,$ ? ` -reference/lexical_analysis,,:fileencoding,# vim:fileencoding= -tutorial/datastructures,,:value,It is also possible to delete a key:value -tutorial/datastructures,,:value,key:value pairs within the braces adds initial key:value pairs -tutorial/stdlib2,,:config,"logging.warning('Warning:config file %s not found', 'server.conf')" -tutorial/stdlib2,,:config,WARNING:root:Warning:config file server.conf not found -tutorial/stdlib2,,:Critical,CRITICAL:root:Critical error -- shutting down -tutorial/stdlib2,,:Error,ERROR:root:Error occurred -tutorial/stdlib2,,:root,CRITICAL:root:Critical error -- shutting down -tutorial/stdlib2,,:root,ERROR:root:Error occurred -tutorial/stdlib2,,:root,WARNING:root:Warning:config file server.conf not found -tutorial/stdlib2,,:start,extra = data[start:start+extra_size] -tutorial/stdlib2,,:start,"fields = struct.unpack('>> urlparse.urlparse('http://[1080::8:800:200C:417A]/foo') -whatsnew/2.7,,:Sunday,'2009:4:Sunday' -whatsnew/2.7,,:Cookie,"export PYTHONWARNINGS=all,error:::Cookie:0" -whatsnew/2.7,,::,"export PYTHONWARNINGS=all,error:::Cookie:0" -whatsnew/3.2,,:affe,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]'," -whatsnew/3.2,,:affe,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/') -whatsnew/3.2,,:beef,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]'," -whatsnew/3.2,,:beef,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/') -whatsnew/3.2,,:cafe,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]'," -whatsnew/3.2,,:cafe,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/') -whatsnew/3.2,,:deaf,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]'," -whatsnew/3.2,,:deaf,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/') -whatsnew/3.2,,:directory,${buildout:directory}/downloads/dist -whatsnew/3.2,,::,"$ export PYTHONWARNINGS='ignore::RuntimeWarning::,once::UnicodeWarning::'" -whatsnew/3.2,,:feed,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]'," -whatsnew/3.2,,:feed,>>> urllib.parse.urlparse('http://[dead:beef:cafe:5417:affe:8FA3:deaf:feed]/foo/') -whatsnew/3.2,,:gz,">>> with tarfile.open(name='myarchive.tar.gz', mode='w:gz') as tf:" -whatsnew/3.2,,:location,zope9-location = ${zope9:location} -whatsnew/3.2,,:prefix,zope-conf = ${custom:prefix}/etc/zope.conf -library/re,,`,!#$%&'*+-.^_`|~: -library/re,,`,!\#\$%\&'\*\+\-\.\^_`\|\~: -library/tarfile,,:xz,'x:xz' -library/warnings,,:message,action:message:category:module:line -library/warnings,,:category,action:message:category:module:line -library/warnings,,:module,action:message:category:module:line -library/warnings,,:line,action:message:category:module:line -library/warnings,,::,error::ResourceWarning -library/warnings,,::,default::DeprecationWarning -library/warnings,,::,default:::mymodule -library/warnings,,:mymodule,default:::mymodule -library/warnings,,::,error:::mymodule -library/warnings,,:mymodule,error:::mymodule -library/warnings,,::,ignore::DeprecationWarning -library/warnings,,::,ignore::PendingDeprecationWarning -library/warnings,,::,ignore::ImportWarning -library/warnings,,::,ignore::ResourceWarning -library/xml.etree.elementtree,,:sometag,prefix:sometag -library/xml.etree.elementtree,,:fictional,"Lancelot -library/xml.etree.elementtree,,:character,Archie Leach -library/xml.etree.elementtree,,:character,Sir Robin -library/xml.etree.elementtree,,:character,Gunther -library/xml.etree.elementtree,,:character,Commander Clement -library/xml.etree.elementtree,,:actor,"for actor in root.findall('real_person:actor', ns):" -library/xml.etree.elementtree,,:name,"name = actor.find('real_person:name', ns)" -library/xml.etree.elementtree,,:character,"for char in actor.findall('role:character', ns):" -library/xml.etree.elementtree,,:xi, -library/xml.etree.elementtree,,:include, -library/xml.etree.elementtree,,:include, Copyright (c) . -library/zipapp,,:main,"$ python -m zipapp myapp -m ""myapp:main""" -library/zipapp,,:fn,"pkg.mod:fn" -library/zipapp,,:callable,"pkg.module:callable" -library/stdtypes,,::,>>> m[::2].tolist() -whatsnew/3.5,,:root,'WARNING:root:warning\n' -whatsnew/3.5,,:warning,'WARNING:root:warning\n' -whatsnew/3.5,,::,>>> addr6 = ipaddress.IPv6Address('::1') -whatsnew/3.5,,:root,ERROR:root:exception -whatsnew/3.5,,:exception,ERROR:root:exception -whatsnew/changelog,,`,'`' -whatsnew/changelog,,:end,str[start:end] -library/binascii,,`,'`' -library/uu,,`,'`' -whatsnew/3.7,,`,'`' -whatsnew/3.7,,::,error::BytesWarning -whatsnew/changelog,,::,error::BytesWarning -whatsnew/changelog,,::,default::BytesWarning -whatsnew/changelog,,::,default::DeprecationWarning -library/importlib.metadata,,:main,"EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts')" -library/importlib.metadata,,`,loading the metadata for packages for the indicated ``context``. -library/re,,`,"`" -library/typing,,`,# Type of ``val`` is narrowed to ``str`` -library/typing,,`,"# Else, type of ``val`` is narrowed to ``float``." -library/typing,,`,# Type of ``val`` is narrowed to ``list[str]``. -library/typing,,`,# Type of ``val`` remains as ``list[object]``. -library/tkinter,,::,ttk::frame .frm -padding 10 -library/tkinter,,::,"grid [ttk::label .frm.lbl -text ""Hello World!""] -column 0 -row 0" -library/tkinter,,::,"grid [ttk::button .frm.btn -text ""Quit"" -command ""destroy .""] -column 1 -row 0" -library/tkinter,,::,ttk::frame -library/tkinter,,::,ttk::button -library/tkinter,,::,ttk::widget -reference/compound_stmts,,:exc,subclass of :exc:`BaseExceptionGroup`. It is not possible to mix except -reference/compound_stmts,,`,subclass of :exc:`BaseExceptionGroup`. It is not possible to mix except -reference/compound_stmts,,:keyword,"and except* in the same :keyword:`try`. :keyword:`break`," -reference/compound_stmts,,`,"and except* in the same :keyword:`try`. :keyword:`break`," -reference/compound_stmts,,:keyword,:keyword:`continue` and :keyword:`return` cannot appear in an except* -reference/compound_stmts,,`,:keyword:`continue` and :keyword:`return` cannot appear in an except* -whatsnew/changelog,,:CON,": os.path.abspath(“C:CON”) is now fixed to return “\.CON”, not" -whatsnew/changelog,,::,Lib/email/mime/nonmultipart.py::MIMENonMultipart -library/typing,,`,"assert_type(name, str) # OK, inferred type of `name` is `str`" -library/typing,,`,# after which we hope the inferred type will be `int` -whatsnew/changelog,,:company,-V:company/tag -library/typing,,`,# are located in the `typing_extensions` backports package. -library/dis,490,:TOS,TOS = TOS2[TOS1:TOS] -library/dis,497,:TOS,TOS2[TOS1:TOS] = TOS3 diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst index f27abe48b2d4ed..9ecbf8b87efbf1 100644 --- a/Doc/tutorial/classes.rst +++ b/Doc/tutorial/classes.rst @@ -581,7 +581,8 @@ this:: . -The name :class:`BaseClassName` must be defined in a scope containing the +The name :class:`BaseClassName` must be defined in a +namespace accessible from the scope containing the derived class definition. In place of a base class name, other arbitrary expressions are also allowed. This can be useful, for example, when the base class is defined in another module:: diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst index 99a77e7addd774..52db51e84cd5fc 100644 --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -840,8 +840,9 @@ will always bind to the first parameter. For example:: But using ``/`` (positional only arguments), it is possible since it allows ``name`` as a positional argument and ``'name'`` as a key in the keyword arguments:: - def foo(name, /, **kwds): - return 'name' in kwds + >>> def foo(name, /, **kwds): + ... return 'name' in kwds + ... >>> foo(1, **{'name': 2}) True diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst index 12b00be3793e14..c8e89d9b79bddd 100644 --- a/Doc/tutorial/datastructures.rst +++ b/Doc/tutorial/datastructures.rst @@ -122,7 +122,7 @@ An example that uses most of the list methods:: You might have noticed that methods like ``insert``, ``remove`` or ``sort`` that only modify the list have no return value printed -- they return the default -``None``. [1]_ This is a design principle for all mutable data structures in +``None``. [#]_ This is a design principle for all mutable data structures in Python. Another thing you might notice is that not all data can be sorted or @@ -731,5 +731,5 @@ interpreter will raise a :exc:`TypeError` exception. .. rubric:: Footnotes -.. [1] Other languages may return the mutated object, which allows method +.. [#] Other languages may return the mutated object, which allows method chaining, such as ``d->insert("a")->remove("b")->sort();``. diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst index 67bb19556681c0..e09c829b8e9721 100644 --- a/Doc/tutorial/errors.rst +++ b/Doc/tutorial/errors.rst @@ -496,7 +496,7 @@ Raising and Handling Multiple Unrelated Exceptions ================================================== There are situations where it is necessary to report several exceptions that -have occurred. This it often the case in concurrency frameworks, when several +have occurred. This is often the case in concurrency frameworks, when several tasks may have failed in parallel, but there are also other use cases where it is desirable to continue execution and collect multiple errors rather than raise the first exception. diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst index 9bee046809eb24..b71c61089e6dc1 100644 --- a/Doc/tutorial/interpreter.rst +++ b/Doc/tutorial/interpreter.rst @@ -52,7 +52,7 @@ A second way of starting the interpreter is ``python -c command [arg] ...``, which executes the statement(s) in *command*, analogous to the shell's :option:`-c` option. Since Python statements often contain spaces or other characters that are special to the shell, it is usually advised to quote -*command* in its entirety with single quotes. +*command* in its entirety. Some Python modules are also useful as scripts. These can be invoked using ``python -m module [arg] ...``, which executes the source file for *module* as diff --git a/Doc/tutorial/introduction.rst b/Doc/tutorial/introduction.rst index ba0f4770529783..558b1c3eec60ed 100644 --- a/Doc/tutorial/introduction.rst +++ b/Doc/tutorial/introduction.rst @@ -70,8 +70,8 @@ the ones with a fractional part (e.g. ``5.0``, ``1.6``) have type :class:`float`. We will see more about numeric types later in the tutorial. Division (``/``) always returns a float. To do :term:`floor division` and -get an integer result (discarding any fractional result) you can use the ``//`` -operator; to calculate the remainder you can use ``%``:: +get an integer result you can use the ``//`` operator; to calculate +the remainder you can use ``%``:: >>> 17 / 3 # classic division returns a float 5.666666666666667 diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 02c9f3095b4090..2a4d070ec057df 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -538,12 +538,11 @@ Miscellaneous options development (running from the source tree) then the default is "off". Note that the "importlib_bootstrap" and "importlib_bootstrap_external" frozen modules are always used, even if this flag is set to "off". - * ``-X perf`` to activate compatibility mode with the ``perf`` profiler. - When this option is activated, the Linux ``perf`` profiler will be able to + * ``-X perf`` enables support for the Linux ``perf`` profiler. + When this option is provided, the ``perf`` profiler will be able to report Python calls. This option is only available on some platforms and will do nothing if is not supported on the current system. The default value - is "off". See also :envvar:`PYTHONPERFSUPPORT` and :ref:`perf_profiling` - for more information. + is "off". See also :envvar:`PYTHONPERFSUPPORT` and :ref:`perf_profiling`. It also allows passing arbitrary values and retrieving them through the :data:`sys._xoptions` dictionary. @@ -1048,9 +1047,13 @@ conflict. .. envvar:: PYTHONPERFSUPPORT - If this variable is set to a nonzero value, it activates compatibility mode - with the ``perf`` profiler so Python calls can be detected by it. See the - :ref:`perf_profiling` section for more information. + If this variable is set to a nonzero value, it enables support for + the Linux ``perf`` profiler so Python calls can be detected by it. + + If set to ``0``, disable Linux ``perf`` profiler support. + + See also the :option:`-X perf <-X>` command-line option + and :ref:`perf_profiling`. .. versionadded:: 3.12 diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index ec57c880ee7ad0..0922972f9bf1dc 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -131,7 +131,8 @@ General Options Turn on internal statistics gathering. The statistics will be dumped to a arbitrary (probably unique) file in - ``/tmp/py_stats/``, or ``C:\temp\py_stats\`` on Windows. + ``/tmp/py_stats/``, or ``C:\temp\py_stats\`` on Windows. If that directory + does not exist, results will be printed on stdout. Use ``Tools/scripts/summarize_stats.py`` to read the stats. @@ -755,17 +756,24 @@ Compiler flags In particular, :envvar:`CFLAGS` should not contain: - * the compiler flag `-I` (for setting the search path for include files). - The `-I` flags are processed from left to right, and any flags in - :envvar:`CFLAGS` would take precedence over user- and package-supplied `-I` + * the compiler flag ``-I`` (for setting the search path for include files). + The ``-I`` flags are processed from left to right, and any flags in + :envvar:`CFLAGS` would take precedence over user- and package-supplied ``-I`` flags. - * hardening flags such as `-Werror` because distributions cannot control + * hardening flags such as ``-Werror`` because distributions cannot control whether packages installed by users conform to such heightened standards. .. versionadded:: 3.5 +.. envvar:: COMPILEALL_OPTS + + Options passed to the :mod:`compileall` command line when building PYC files + in ``make install``. Default: ``-j0``. + + .. versionadded:: 3.12 + .. envvar:: EXTRA_CFLAGS Extra C compiler flags. @@ -878,9 +886,9 @@ Linker flags In particular, :envvar:`LDFLAGS` should not contain: - * the compiler flag `-L` (for setting the search path for libraries). - The `-L` flags are processed from left to right, and any flags in - :envvar:`LDFLAGS` would take precedence over user- and package-supplied `-L` + * the compiler flag ``-L`` (for setting the search path for libraries). + The ``-L`` flags are processed from left to right, and any flags in + :envvar:`LDFLAGS` would take precedence over user- and package-supplied ``-L`` flags. .. envvar:: CONFIGURE_LDFLAGS_NODIST diff --git a/Doc/using/unix.rst b/Doc/using/unix.rst index 3f9f1364c8ae87..24c02c99f871d5 100644 --- a/Doc/using/unix.rst +++ b/Doc/using/unix.rst @@ -158,19 +158,19 @@ Custom OpenSSL .. code-block:: shell-session $ curl -O https://www.openssl.org/source/openssl-VERSION.tar.gz - $ tar xzf openssl-VERSION - $ pushd openssl-VERSION - $ ./config \ - --prefix=/usr/local/custom-openssl \ - --libdir=lib \ - --openssldir=/etc/ssl - $ make -j1 depend - $ make -j8 - $ make install_sw - $ popd + $ tar xzf openssl-VERSION + $ pushd openssl-VERSION + $ ./config \ + --prefix=/usr/local/custom-openssl \ + --libdir=lib \ + --openssldir=/etc/ssl + $ make -j1 depend + $ make -j8 + $ make install_sw + $ popd 3. Build Python with custom OpenSSL - (see the configure `--with-openssl` and `--with-openssl-rpath` options) + (see the configure ``--with-openssl`` and ``--with-openssl-rpath`` options) .. code-block:: shell-session diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc index c2a9f521a148b6..d535b254f05698 100644 --- a/Doc/using/venv-create.inc +++ b/Doc/using/venv-create.inc @@ -16,8 +16,8 @@ re-used. .. deprecated:: 3.6 ``pyvenv`` was the recommended tool for creating virtual environments for - Python 3.3 and 3.4, and is `deprecated in Python 3.6 - `_. + Python 3.3 and 3.4, and is + :ref:`deprecated in Python 3.6 `. .. versionchanged:: 3.5 The use of ``venv`` is now recommended for creating virtual environments. @@ -105,45 +105,3 @@ Multiple paths can be given to ``venv``, in which case an identical virtual environment will be created, according to the given options, at each provided path. -Once a virtual environment has been created, it can be "activated" using a -script in the virtual environment's binary directory. The invocation of the -script is platform-specific (`` must be replaced by the path of the -directory containing the virtual environment): - -+-------------+-----------------+-----------------------------------------+ -| Platform | Shell | Command to activate virtual environment | -+=============+=================+=========================================+ -| POSIX | bash/zsh | $ source /bin/activate | -+-------------+-----------------+-----------------------------------------+ -| | fish | $ source /bin/activate.fish | -+-------------+-----------------+-----------------------------------------+ -| | csh/tcsh | $ source /bin/activate.csh | -+-------------+-----------------+-----------------------------------------+ -| | PowerShell Core | $ /bin/Activate.ps1 | -+-------------+-----------------+-----------------------------------------+ -| Windows | cmd.exe | C:\\> \\Scripts\\activate.bat | -+-------------+-----------------+-----------------------------------------+ -| | PowerShell | PS C:\\> \\Scripts\\Activate.ps1 | -+-------------+-----------------+-----------------------------------------+ - -When a virtual environment is active, the :envvar:`VIRTUAL_ENV` environment -variable is set to the path of the virtual environment. This can be used to -check if one is running inside a virtual environment. - -You don't specifically *need* to activate an environment; activation just -prepends the virtual environment's binary directory to your path, so that -"python" invokes the virtual environment's Python interpreter and you can run -installed scripts without having to use their full path. However, all scripts -installed in a virtual environment should be runnable without activating it, -and run with the virtual environment's Python automatically. - -You can deactivate a virtual environment by typing "deactivate" in your shell. -The exact mechanism is platform-specific and is an internal implementation -detail (typically a script or shell function will be used). - -.. versionadded:: 3.4 - ``fish`` and ``csh`` activation scripts. - -.. versionadded:: 3.8 - PowerShell activation scripts installed under POSIX for PowerShell Core - support. diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index 9c339521c31151..fdbe4c15a20036 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -216,7 +216,7 @@ of available options is shown below. +---------------------------+--------------------------------------+--------------------------+ | Include_pip | Install bundled pip and setuptools | 1 | +---------------------------+--------------------------------------+--------------------------+ -| Include_symbols | Install debugging symbols (`*`.pdb) | 0 | +| Include_symbols | Install debugging symbols (``*.pdb``)| 0 | +---------------------------+--------------------------------------+--------------------------+ | Include_tcltk | Install Tcl/Tk support and IDLE | 1 | +---------------------------+--------------------------------------+--------------------------+ @@ -866,7 +866,6 @@ minor version. I.e. ``/usr/bin/python3.7-32`` will request usage of the not provably i386/32-bit". To request a specific environment, use the new ``-V:`` argument with the complete tag. - The ``/usr/bin/env`` form of shebang line has one further special property. Before looking for installed Python interpreters, this form will search the executable :envvar:`PATH` for a Python executable. This corresponds to the @@ -876,6 +875,13 @@ be found, it will be handled as described below. Additionally, the environment variable :envvar:`PYLAUNCHER_NO_SEARCH_PATH` may be set (to any value) to skip this additional search. +Shebang lines that do not match any of these patterns are treated as **Windows** +paths that are absolute or relative to the directory containing the script file. +This is a convenience for Windows-only scripts, such as those generated by an +installer, since the behavior is not compatible with Unix-style shells. +These paths may be quoted, and may include multiple arguments, after which the +path to the script and any additional arguments will be appended. + Arguments in shebang lines -------------------------- diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst index bfb2aacbc07747..0c3bfda1933957 100644 --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -395,7 +395,7 @@ This section has just been a quick overview of the new features, giving enough of an explanation to start you programming, but many details have been simplified or ignored. Where should you go to get a more complete picture? -https://docs.python.org/dev/howto/descriptor.html is a lengthy tutorial introduction to +The :ref:`descriptorhowto` is a lengthy tutorial introduction to the descriptor features, written by Guido van Rossum. If my description has whetted your appetite, go read this tutorial next, because it goes into much more detail about the new features while still remaining quite easy to read. @@ -1102,7 +1102,7 @@ code, none of the changes described here will affect you very much. * A different argument parsing function, :c:func:`PyArg_UnpackTuple`, has been added that's simpler and presumably faster. Instead of specifying a format string, the caller simply gives the minimum and maximum number of arguments - expected, and a set of pointers to :c:type:`PyObject\*` variables that will be + expected, and a set of pointers to :c:expr:`PyObject*` variables that will be filled in with argument values. * Two new flags :const:`METH_NOARGS` and :const:`METH_O` are available in method diff --git a/Doc/whatsnew/2.5.rst b/Doc/whatsnew/2.5.rst index 0aca2fe697ccdb..dcfaef6ed29494 100644 --- a/Doc/whatsnew/2.5.rst +++ b/Doc/whatsnew/2.5.rst @@ -1725,7 +1725,7 @@ attribute of the function object to change this:: ``ctypes.pythonapi`` object. This object does *not* release the global interpreter lock before calling a function, because the lock must be held when calling into the interpreter's code. There's a :class:`py_object()` type -constructor that will create a :c:type:`PyObject \*` pointer. A simple usage:: +constructor that will create a :c:expr:`PyObject *` pointer. A simple usage:: import ctypes diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst index 731ce6aac6919d..34f2656f765c7d 100644 --- a/Doc/whatsnew/2.6.rst +++ b/Doc/whatsnew/2.6.rst @@ -717,13 +717,13 @@ This will produce the output:: PEP 3101: Advanced String Formatting ===================================================== -In Python 3.0, the `%` operator is supplemented by a more powerful string +In Python 3.0, the ``%`` operator is supplemented by a more powerful string formatting method, :meth:`format`. Support for the :meth:`str.format` method has been backported to Python 2.6. -In 2.6, both 8-bit and Unicode strings have a `.format()` method that +In 2.6, both 8-bit and Unicode strings have a ``.format()`` method that treats the string as a template and takes the arguments to be formatted. -The formatting template uses curly brackets (`{`, `}`) as special characters:: +The formatting template uses curly brackets (``{``, ``}``) as special characters:: >>> # Substitute positional argument 0 into the string. >>> "User ID: {0}".format("root") diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst index e8f701d254cd28..276ab63b97f8a9 100644 --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -2485,8 +2485,8 @@ In the standard library: * The ElementTree library, :mod:`xml.etree`, no longer escapes ampersands and angle brackets when outputting an XML processing - instruction (which looks like ``) - or comment (which looks like ``). + instruction (which looks like ````) + or comment (which looks like ````). (Patch by Neil Muller; :issue:`2746`.) * The :meth:`~StringIO.StringIO.readline` method of :class:`~StringIO.StringIO` objects now does diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index db8d9281b1f2ed..2389b1a63b1ca2 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -669,6 +669,7 @@ Several other key features: GREEN = 1 BLUE = 2 + color = Color.GREEN match color: case Color.RED: print("I see red!") @@ -1183,7 +1184,7 @@ and will be incorrect in some rare cases, including some ``_``-s in New in 3.10 maintenance releases. -Apply syntax highlighting to `.pyi` files. (Contributed by Alex +Apply syntax highlighting to ``.pyi`` files. (Contributed by Alex Waygood and Terry Jan Reedy in :issue:`45447`.) Include prompts when saving Shell with inputs and outputs. @@ -2151,8 +2152,7 @@ Porting to Python 3.10 * The ``PY_SSIZE_T_CLEAN`` macro must now be defined to use :c:func:`PyArg_ParseTuple` and :c:func:`Py_BuildValue` formats which use ``#``: ``es#``, ``et#``, ``s#``, ``u#``, ``y#``, ``z#``, ``U#`` and ``Z#``. - See :ref:`Parsing arguments and building values - ` and the :pep:`353`. + See :ref:`arg-parsing` and :pep:`353`. (Contributed by Victor Stinner in :issue:`40943`.) * Since :c:func:`Py_REFCNT()` is changed to the inline static function, @@ -2183,8 +2183,7 @@ Porting to Python 3.10 :c:func:`Py_GetProgramFullPath`, :c:func:`Py_GetPythonHome` and :c:func:`Py_GetProgramName` functions now return ``NULL`` if called before :c:func:`Py_Initialize` (before Python is initialized). Use the new - :ref:`Python Initialization Configuration API ` to get the - :ref:`Python Path Configuration. `. + :ref:`init-config` API to get the :ref:`init-path-config`. (Contributed by Victor Stinner in :issue:`42260`.) * :c:func:`PyList_SET_ITEM`, :c:func:`PyTuple_SET_ITEM` and @@ -2198,7 +2197,7 @@ Porting to Python 3.10 ``picklebufobject.h``, ``pyarena.h``, ``pyctype.h``, ``pydebug.h``, ``pyfpe.h``, and ``pytime.h`` have been moved to the ``Include/cpython`` directory. These files must not be included directly, as they are already - included in ``Python.h``: :ref:`Include Files `. If they have + included in ``Python.h``; see :ref:`api-includes`. If they have been included directly, consider including ``Python.h`` instead. (Contributed by Nicholas Sim in :issue:`35134`.) diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst index dafbbb673f0ed6..6eb90df89cac0e 100644 --- a/Doc/whatsnew/3.11.rst +++ b/Doc/whatsnew/3.11.rst @@ -58,44 +58,49 @@ Summary -- Release highlights .. This section singles out the most important changes in Python 3.11. Brevity is key. -- Python 3.11 is between 10-60% faster than Python 3.10. On average, we measured a - 1.25x speedup on the standard benchmark suite. See `Faster CPython`_ for details. +* Python 3.11 is between 10-60% faster than Python 3.10. + On average, we measured a 1.25x speedup on the standard benchmark suite. + See :ref:`whatsnew311-faster-cpython` for details. .. PEP-sized items next. New syntax features: -* :pep:`654`: Exception Groups and ``except*``. +* :ref:`whatsnew311-pep654` New built-in features: -* :pep:`678`: Enriching Exceptions with Notes. +* :ref:`whatsnew311-pep678` New standard library modules: -* :pep:`680`: ``tomllib`` — Support for Parsing TOML in the Standard Library. +* :pep:`680`: :mod:`tomllib` — + Support for parsing `TOML `_ in the Standard Library Interpreter improvements: -* :pep:`657`: Include Fine Grained Error Locations in Tracebacks. +* :ref:`whatsnew311-pep657` * New :option:`-P` command line option and :envvar:`PYTHONSAFEPATH` environment - variable to disable automatically prepending a potentially unsafe path - (the working dir or script directory, depending on invocation) - to :data:`sys.path`. + variable to :ref:`disable automatically prepending potentially unsafe paths + ` to :data:`sys.path` New typing features: -* :pep:`646`: Variadic generics. -* :pep:`655`: Marking individual TypedDict items as required or potentially missing. -* :pep:`673`: ``Self`` type. -* :pep:`675`: Arbitrary literal string type. -* :pep:`681`: Data Class Transforms. +* :ref:`whatsnew311-pep646` +* :ref:`whatsnew311-pep655` +* :ref:`whatsnew311-pep673` +* :ref:`whatsnew311-pep675` +* :ref:`whatsnew311-pep681` -Important deprecations, removals or restrictions: +Important deprecations, removals and restrictions: -* :pep:`594`: Removing dead batteries from the standard library. -* :pep:`624`: Remove ``Py_UNICODE`` encoder APIs. -* :pep:`670`: Convert macros to functions in the Python C API. +* :pep:`594`: + :ref:`Many legacy standard library modules have been deprecated + ` and will be removed in Python 3.13 +* :pep:`624`: + :ref:`Py_UNICODE encoder APIs have been removed ` +* :pep:`670`: + :ref:`Macros converted to static inline functions ` .. _whatsnew311-features: @@ -105,8 +110,8 @@ New Features .. _whatsnew311-pep657: -PEP 657: Enhanced error locations in tracebacks ------------------------------------------------ +PEP 657: Fine-grained error locations in tracebacks +--------------------------------------------------- When printing tracebacks, the interpreter will now point to the exact expression that caused the error, instead of just the line. For example: @@ -381,7 +386,7 @@ Kumar Srinivasan and Graham Bleaney.) .. _whatsnew311-pep681: -PEP 681: Data Class Transforms +PEP 681: Data class transforms ------------------------------ :data:`~typing.dataclass_transform` may be used to @@ -457,6 +462,8 @@ Other Language Changes pickles instance attributes implemented as :term:`slots <__slots__>`. (Contributed by Serhiy Storchaka in :issue:`26579`.) +.. _whatsnew311-pythonsafepath: + * Added a :option:`-P` command line option and a :envvar:`PYTHONSAFEPATH` environment variable, which disable the automatic prepending to :data:`sys.path` @@ -530,6 +537,17 @@ Other CPython Implementation Changes and with the new :option:`--help-all`. (Contributed by Éric Araujo in :issue:`46142`.) +* Converting between :class:`int` and :class:`str` in bases other than 2 + (binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal) + now raises a :exc:`ValueError` if the number of digits in string form is + above a limit to avoid potential denial of service attacks due to the + algorithmic complexity. This is a mitigation for `CVE-2020-10735 + `_. + This limit can be configured or disabled by environment variable, command + line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion + length limitation ` documentation. The default limit + is 4300 digits in string form. + .. _whatsnew311-new-modules: @@ -545,6 +563,8 @@ New Modules (Contributed by Sebastian Rittau in :issue:`42012`.) +.. _whatsnew311-improved-modules: + Improved Modules ================ @@ -594,12 +614,18 @@ asyncio These are primarily intended for internal use, notably by :class:`~asyncio.TaskGroup`. + +.. _whatsnew311-contextlib: + contextlib ---------- -Added non parallel-safe :func:`~contextlib.chdir` context manager to change -the current working directory and then restore it on exit. Simple wrapper -around :func:`~os.chdir`. (Contributed by Filipe Laíns in :issue:`25625`) +* Added non parallel-safe :func:`~contextlib.chdir` context manager to change + the current working directory and then restore it on exit. Simple wrapper + around :func:`~os.chdir`. (Contributed by Filipe Laíns in :issue:`25625`) + + +.. _whatsnew311-dataclasses: dataclasses ----------- @@ -609,58 +635,100 @@ dataclasses :class:`dict`, :class:`list` or :class:`set`. (Contributed by Eric V. Smith in :issue:`44674`.) + +.. _whatsnew311-datetime: + datetime -------- * Add :attr:`datetime.UTC`, a convenience alias for :attr:`datetime.timezone.utc`. (Contributed by Kabir Kwatra in :gh:`91973`.) + * :meth:`datetime.date.fromisoformat`, :meth:`datetime.time.fromisoformat` and :meth:`datetime.datetime.fromisoformat` can now be used to parse most ISO 8601 formats (barring only those that support fractional hours and minutes). (Contributed by Paul Ganssle in :gh:`80010`.) + +.. _whatsnew311-enum: + enum ---- -* ``EnumMeta`` renamed to ``EnumType`` (``EnumMeta`` kept as alias). +* Renamed :class:`!EnumMeta` to :class:`~enum.EnumType` + (:class:`!EnumMeta` kept as an alias). + +* Added :class:`~enum.StrEnum`, + with members that can be used as (and must be) strings. + +* Added :class:`~enum.ReprEnum`, + which only modifies the :meth:`~object.__repr__` of members + while returning their literal values (rather than names) + for :meth:`~object.__str__` and :meth:`~object.__format__` + (used by :func:`str`, :func:`format` and :term:`f-string`\s). + +* Changed :class:`~enum.IntEnum`, :class:`~enum.IntFlag` and :class:`~enum.StrEnum` + to now inherit from :class:`~enum.ReprEnum`, + so their :func:`str` output now matches :func:`format` + (both ``str(AnIntEnum.ONE)`` and ``format(AnIntEnum.ONE)`` return ``'1'``, + whereas before ``str(AnIntEnum.ONE)`` returned ``'AnIntEnum.ONE'``. + +* Changed :meth:`Enum.__format__() ` + (the default for :func:`format`, :meth:`str.format` and :term:`f-string`\s) + of enums with mixed-in types (e.g. :class:`int`, :class:`str`) + to also include the class name in the output, not just the member's key. + This matches the existing behavior of :meth:`enum.Enum.__str__`, + returning e.g. ``'AnEnum.MEMBER'`` for an enum ``AnEnum(str, Enum)`` + instead of just ``'MEMBER'``. -* ``StrEnum`` added -- enum members are and must be strings. +* Added a new *boundary* class parameter to :class:`~enum.Flag` enums + and the :class:`~enum.FlagBoundary` enum with its options, + to control how to handle out-of-range flag values. -* ``ReprEnum`` added -- causes only the ``__repr__`` to be modified, not the - ``__str__`` nor the ``__format__``. +* Added the :func:`~enum.verify` enum decorator + and the :class:`~enum.EnumCheck` enum with its options, + to check enum classes against several specific constraints. -* ``FlagBoundary`` added -- controls behavior when invalid values are given to - a flag. +* Added the :func:`~enum.member` and :func:`~enum.nonmember` decorators, + to ensure the decorated object is/is not converted to an enum member. -* ``EnumCheck`` added -- used by ``verify`` to ensure various constraints. +* Added the :func:`~enum.property` decorator, + which works like :func:`property` except for enums. + Use this instead of :func:`types.DynamicClassAttribute`. -* ``verify`` added -- function to ensure given ``EnumCheck`` constraints. +* Added the :func:`~enum.global_enum` enum decorator, + which adjusts :meth:`~object.__repr__` and :meth:`~object.__str__` + to show values as members of their module rather than the enum class. + For example, ``'re.ASCII'`` for the :data:`~re.ASCII` member + of :class:`re.RegexFlag` rather than ``'RegexFlag.ASCII'``. -* ``member`` added -- decorator to ensure given object is converted to an enum - member. +* Enhanced :class:`~enum.Flag` to support + :func:`len`, iteration and :keyword:`in`/:keyword:`not in` on its members. + For example, the following now works: + ``len(AFlag(3)) == 2 and list(AFlag(3)) == (AFlag.ONE, AFlag.TWO)`` -* ``nonmember`` added -- decorator to ensure given object is not converted to - an enum member. +* Changed :class:`~enum.Enum` and :class:`~enum.Flag` + so that members are now defined + before :meth:`~object.__init_subclass__` is called; + :func:`dir` now includes methods, etc., from mixed-in data types. -* ``property`` added -- use instead of ``types.DynamicClassAttribute``. +* Changed :class:`~enum.Flag` + to only consider primary values (power of two) canonical + while composite values (``3``, ``6``, ``10``, etc.) are considered aliases; + inverted flags are coerced to their positive equivalent. -* ``global_enum`` added -- enum decorator to adjust ``__repr__`` and ``__str__`` - to show members in the global context -- see ``re.RegexFlag`` for an example. -* ``Flag`` enhancements: members support length, iteration, and containment - checks. +.. _whatsnew311-fcntl: -* ``Enum``/``Flag`` fixes: members are now defined before ``__init_subclass__`` - is called; ``dir()`` now includes methods, etc., from mixed-in data types. +fcntl +----- + +* On FreeBSD, the :data:`!F_DUP2FD` and :data:`!F_DUP2FD_CLOEXEC` flags respectively + are supported, the former equals to ``dup2`` usage while the latter set + the ``FD_CLOEXEC`` flag in addition. -* ``Flag`` fixes: only primary values (power of two) are considered canonical - while composite values (3, 6, 10, etc.) are considered aliases; inverted - flags are coerced to their positive equivalent. -* ``IntEnum`` / ``IntFlag`` / ``StrEnum`` fixes: these now inherit from - ``ReprEnum`` so the ``str()`` output now matches ``format()`` output, - which is the data types' (so both ``str(AnIntEnum.ONE)`` and - ``format(AnIntEnum.ONE)`` is equal to ``'1'``). +.. _whatsnew311-fractions: fractions --------- @@ -672,6 +740,9 @@ fractions that an ``isinstance(some_fraction, typing.SupportsInt)`` check passes. (Contributed by Mark Dickinson in :issue:`44547`.) + +.. _whatsnew311-functools: + functools --------- @@ -702,6 +773,9 @@ functools (Contributed by Yurii Karabas in :issue:`46014`.) + +.. _whatsnew311-hashlib: + hashlib ------- @@ -720,31 +794,48 @@ hashlib of files or file-like objects. (Contributed by Christian Heimes in :gh:`89313`.) + +.. _whatsnew311-idle: + IDLE and idlelib ---------------- -* Apply syntax highlighting to `.pyi` files. (Contributed by Alex +* Apply syntax highlighting to ``.pyi`` files. (Contributed by Alex Waygood and Terry Jan Reedy in :issue:`45447`.) * Include prompts when saving Shell with inputs and outputs. (Contributed by Terry Jan Reedy in :gh:`95191`.) + +.. _whatsnew311-inspect: + inspect ------- -* Add :func:`inspect.getmembers_static`: return all members without + +* Add :func:`~inspect.getmembers_static` to return all members without triggering dynamic lookup via the descriptor protocol. (Contributed by Weipeng Hong in :issue:`30533`.) -* Add :func:`inspect.ismethodwrapper` for checking if the type of an object is a - :class:`~types.MethodWrapperType`. (Contributed by Hakan Çelik in :issue:`29418`.) +* Add :func:`~inspect.ismethodwrapper` + for checking if the type of an object is a :class:`~types.MethodWrapperType`. + (Contributed by Hakan Çelik in :issue:`29418`.) -* Change the frame-related functions in the :mod:`inspect` module to return a - regular object (that is backwards compatible with the old tuple-like - interface) that include the extended :pep:`657` position information (end +* Change the frame-related functions in the :mod:`inspect` module to return new + :class:`~inspect.FrameInfo` and :class:`~inspect.Traceback` class instances + (backwards compatible with the previous :term:`named tuple`-like interfaces) + that includes the extended :pep:`657` position information (end line number, column and end column). The affected functions are: - :func:`inspect.getframeinfo`, :func:`inspect.getouterframes`, :func:`inspect.getinnerframes`, - :func:`inspect.stack` and :func:`inspect.trace`. (Contributed by Pablo Galindo in - :gh:`88116`.) + + * :func:`inspect.getframeinfo` + * :func:`inspect.getouterframes` + * :func:`inspect.getinnerframes`, + * :func:`inspect.stack` + * :func:`inspect.trace` + + (Contributed by Pablo Galindo in :gh:`88116`.) + + +.. _whatsnew311-locale: locale ------ @@ -753,6 +844,28 @@ locale ``locale.getpreferredencoding(False)`` but ignores the :ref:`Python UTF-8 Mode `. + +.. _whatsnew311-logging: + +logging +------- + +* Added :func:`~logging.getLevelNamesMapping` + to return a mapping from logging level names (e.g. ``'CRITICAL'``) + to the values of their corresponding :ref:`levels` (e.g. ``50``, by default). + (Contributed by Andrei Kulakovin in :gh:`88024`.) + +* Added a :meth:`~logging.handlers.SysLogHandler.createSocket` method + to :class:`~logging.handlers.SysLogHandler`, to match + :meth:`SocketHandler.createSocket() + `. + It is called automatically during handler initialization + and when emitting an event, if there is no active socket. + (Contributed by Kirill Pinchuk in :gh:`88457`.) + + +.. _whatsnew311-math: + math ---- @@ -772,6 +885,8 @@ math (Contributed by Victor Stinner in :issue:`46917`.) +.. _whatsnew311-operator: + operator -------- @@ -780,6 +895,8 @@ operator (Contributed by Antony Lee in :issue:`44019`.) +.. _whatsnew311-os: + os -- @@ -788,6 +905,8 @@ os (Contributed by Dong-hee Na in :issue:`44611`.) +.. _whatsnew311-pathlib: + pathlib ------- @@ -796,6 +915,9 @@ pathlib :data:`~os.sep` or :data:`~os.altsep`. (Contributed by Eisuke Kawasima in :issue:`22276` and :issue:`33392`.) + +.. _whatsnew311-re: + re -- @@ -803,6 +925,9 @@ re ``?+``, ``{m,n}+``) are now supported in regular expressions. (Contributed by Jeffrey C. Jacobs and Serhiy Storchaka in :issue:`433030`.) + +.. _whatsnew311-shutil: + shutil ------ @@ -810,6 +935,8 @@ shutil (Contributed by Serhiy Storchaka in :issue:`46245`.) +.. _whatsnew311-socket: + socket ------ @@ -821,6 +948,9 @@ socket instead of only raising the last error. (Contributed by Irit Katriel in :issue:`29980`.) + +.. _whatsnew311-sqlite3: + sqlite3 ------- @@ -872,13 +1002,27 @@ sqlite3 (Contributed by Aviv Palivoda and Erlend E. Aasland in :issue:`24905`.) +.. _whatsnew311-string: + +string +------ + +* Add :meth:`~string.Template.get_identifiers` + and :meth:`~string.Template.is_valid` to :class:`string.Template`, + which respectively return all valid placeholders, + and whether any invalid placeholders are present. + (Contributed by Ben Kehoe in :gh:`90465`.) + + +.. _whatsnew311-sys: + sys --- * :func:`sys.exc_info` now derives the ``type`` and ``traceback`` fields from the ``value`` (the exception instance), so when an exception is modified while it is being handled, the changes are reflected in - the results of subsequent calls to :func:`exc_info`. + the results of subsequent calls to :func:`!exc_info`. (Contributed by Irit Katriel in :issue:`45711`.) * Add :func:`sys.exception` which returns the active exception instance @@ -889,6 +1033,8 @@ sys (Contributed by Victor Stinner in :gh:`57684`.) +.. _whatsnew311-sysconfig: + sysconfig --------- @@ -906,6 +1052,21 @@ sysconfig (Contributed by Miro Hrončok in :issue:`45413`.) +.. _whatsnew311-tempfile: + +tempfile +-------- + +* :class:`~tempfile.SpooledTemporaryFile` objects now fully implement the methods + of :class:`io.BufferedIOBase` or :class:`io.TextIOBase` + (depending on file mode). + This lets them work correctly with APIs that expect file-like objects, + such as compression modules. + (Contributed by Carey Metcalfe in :gh:`70363`.) + + +.. _whatsnew311-threading: + threading --------- @@ -917,6 +1078,8 @@ threading (Contributed by Victor Stinner in :issue:`41710`.) +.. _whatsnew311-time: + time ---- @@ -934,6 +1097,18 @@ time (Contributed by Benjamin Szőke, Dong-hee Na, Eryk Sun and Victor Stinner in :issue:`21302` and :issue:`45429`.) +.. _whatsnew311-tkinter: + +tkinter +------- + +* Added method ``info_patchlevel()`` which returns the exact version of + the Tcl library as a named tuple similar to :data:`sys.version_info`. + (Contributed by Serhiy Storchaka in :gh:`91827`.) + + +.. _whatsnew311-traceback: + traceback --------- @@ -947,6 +1122,8 @@ traceback (Contributed by Irit Katriel in :issue:`33809`.) +.. _whatsnew311-typing: + typing ------ @@ -987,7 +1164,7 @@ For major changes, see :ref:`new-feat-related-type-hints-311`. to clear all registered overloads of a function. (Contributed by Jelle Zijlstra in :gh:`89263`.) -* The :meth:`__init__` method of :class:`~typing.Protocol` subclasses +* The :meth:`~object.__init__` method of :class:`~typing.Protocol` subclasses is now preserved. (Contributed by Adrian Garcia Badarasco in :gh:`88970`.) * The representation of empty tuple types (``Tuple[()]``) is simplified. @@ -1016,20 +1193,17 @@ For major changes, see :ref:`new-feat-related-type-hints-311`. by Nikita Sobolev in :gh:`90729`.) -tkinter -------- - -* Added method ``info_patchlevel()`` which returns the exact version of - the Tcl library as a named tuple similar to :data:`sys.version_info`. - (Contributed by Serhiy Storchaka in :gh:`91827`.) - +.. _whatsnew311-unicodedata: unicodedata ----------- -* The Unicode database has been updated to version 14.0.0. (Contributed by Benjamin Peterson in :issue:`45190`). +* The Unicode database has been updated to version 14.0.0. + (Contributed by Benjamin Peterson in :issue:`45190`). +.. _whatsnew311-unittest: + unittest -------- @@ -1042,6 +1216,8 @@ unittest (Contributed by Serhiy Storchaka in :issue:`45046`.) +.. _whatsnew311-venv: + venv ---- @@ -1055,6 +1231,9 @@ venv Third party code that also creates new virtual environments should do the same. (Contributed by Miro Hrončok in :issue:`45413`.) + +.. _whatsnew311-warnings: + warnings -------- @@ -1062,52 +1241,79 @@ warnings providing a more concise way to locally ignore warnings or convert them to errors. (Contributed by Zac Hatfield-Dodds in :issue:`47074`.) + +.. _whatsnew311-zipfile: + zipfile ------- -* Added support for specifying member name encoding for reading - metadata in the zipfile's directory and file headers. +* Added support for specifying member name encoding for reading metadata + in a :class:`~zipfile.ZipFile`'s directory and file headers. (Contributed by Stephen J. Turnbull and Serhiy Storchaka in :issue:`28080`.) -fcntl ------ +* Added :meth:`ZipFile.mkdir() ` + for creating new directories inside ZIP archives. + (Contributed by Sam Ezeh in :gh:`49083`.) + +* Added :attr:`~zipfile.Path.stem`, :attr:`~zipfile.Path.suffix` + and :attr:`~zipfile.Path.suffixes` to :class:`zipfile.Path`. + (Contributed by Miguel Brito in :gh:`88261`.) -* On FreeBSD, the :attr:`F_DUP2FD` and :attr:`F_DUP2FD_CLOEXEC` flags respectively - are supported, the former equals to ``dup2`` usage while the latter set - the ``FD_CLOEXEC`` flag in addition. +.. _whatsnew311-optimizations: Optimizations ============= -* Compiler now optimizes simple C-style formatting with literal format - containing only format codes ``%s``, ``%r`` and ``%a`` and makes it as - fast as corresponding f-string expression. +This section covers specific optimizations independent of the +:ref:`whatsnew311-faster-cpython` project, which is covered in its own section. + +* The compiler now optimizes simple + :ref:`printf-style % formatting ` on string literals + containing only the format codes ``%s``, ``%r`` and ``%a`` and makes it as + fast as a corresponding :term:`f-string` expression. (Contributed by Serhiy Storchaka in :issue:`28307`.) -* "Zero-cost" exceptions are implemented. The cost of ``try`` statements is - almost eliminated when no exception is raised. - (Contributed by Mark Shannon in :issue:`40222`.) +* Integer division (``//``) is better tuned for optimization by compilers. + It is now around 20% faster on x86-64 when dividing an :class:`int` + by a value smaller than ``2**30``. + (Contributed by Gregory P. Smith and Tim Peters in :gh:`90564`.) -* Pure ASCII strings are now normalized in constant time by :func:`unicodedata.normalize`. - (Contributed by Dong-hee Na in :issue:`44987`.) +* :func:`sum` is now nearly 30% faster for integers smaller than ``2**30``. + (Contributed by Stefan Behnel in :gh:`68264`.) -* :mod:`math` functions :func:`~math.comb` and :func:`~math.perm` are now up - to 10 times or more faster for large arguments (the speed up is larger for - larger *k*). - (Contributed by Serhiy Storchaka in :issue:`37295`.) +* Resizing lists is streamlined for the common case, + speeding up :meth:`list.append` by ≈15% + and simple :term:`list comprehension`\s by up to 20-30% + (Contributed by Dennis Sweeney in :gh:`91165`.) -* Dict don't store hash value when all inserted keys are Unicode objects. - This reduces dict size. For example, ``sys.getsizeof(dict.fromkeys("abcdefg"))`` - becomes 272 bytes from 352 bytes on 64bit platform. +* Dictionaries don't store hash values when all keys are Unicode objects, + decreasing :class:`dict` size. + For example, ``sys.getsizeof(dict.fromkeys("abcdefg"))`` + is reduced from 352 bytes to 272 bytes (23% smaller) on 64-bit platforms. (Contributed by Inada Naoki in :issue:`46845`.) -* :mod:`re`'s regular expression matching engine has been partially refactored, - and now uses computed gotos (or "threaded code") on supported platforms. As a - result, Python 3.11 executes the `pyperformance regular expression benchmarks - `_ up to 10% - faster than Python 3.10. +* Using :class:`asyncio.DatagramProtocol` is now orders of magnitude faster + when transferring large files over UDP, + with speeds over 100 times higher for a ≈60 MiB file. + (Contributed by msoxzw in :gh:`91487`.) + +* :mod:`math` functions :func:`~math.comb` and :func:`~math.perm` are now + ≈10 times faster for large arguments (with a larger speedup for larger *k*). + (Contributed by Serhiy Storchaka in :issue:`37295`.) + +* The :mod:`statistics` functions :func:`~statistics.mean`, + :func:`~statistics.variance` and :func:`~statistics.stdev` now consume + iterators in one pass rather than converting them to a :class:`list` first. + This is twice as fast and can save substantial memory. + (Contributed by Raymond Hettinger in :gh:`90415`.) + +* :func:`unicodedata.normalize` + now normalizes pure-ASCII strings in constant time. + (Contributed by Dong-hee Na in :issue:`44987`.) + +.. _whatsnew311-faster-cpython: Faster CPython ============== @@ -1121,11 +1327,16 @@ could be up to 10-60% faster. This project focuses on two major areas in Python: faster startup and faster runtime. Other optimizations not under this project are listed in `Optimizations`_. + +.. _whatsnew311-faster-startup: + Faster Startup -------------- +.. _whatsnew311-faster-imports: + Frozen imports / Static code objects -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Python caches bytecode in the :ref:`__pycache__` directory to speed up module loading. @@ -1150,11 +1361,16 @@ impact for short-running programs using Python. (Contributed by Eric Snow, Guido van Rossum and Kumar Aditya in numerous issues.) +.. _whatsnew311-faster-runtime: + Faster Runtime -------------- +.. _whatsnew311-lazy-python-frames: + Cheaper, lazy Python frames -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Python frames are created whenever Python calls a Python function. This frame holds execution information. The following are new frame optimizations: @@ -1171,10 +1387,13 @@ up significantly. We measured a 3-7% speedup in pyperformance. (Contributed by Mark Shannon in :issue:`44590`.) + .. _inline-calls: +.. _whatsnew311-inline-calls: Inlined Python function calls -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + During a Python function call, Python will call an evaluating C function to interpret that function's code. This effectively limits pure Python recursion to what's safe for the C stack. @@ -1191,8 +1410,12 @@ We measured a 1-3% improvement in pyperformance. (Contributed by Pablo Galindo and Mark Shannon in :issue:`45256`.) + +.. _whatsnew311-pep659: + PEP 659: Specializing Adaptive Interpreter -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + :pep:`659` is one of the key parts of the faster CPython project. The general idea is that while Python is a dynamic language, most code has regions where objects and types rarely change. This concept is known as *type stability*. @@ -1274,6 +1497,8 @@ Bucher, with additional help from Irit Katriel and Dennis Sweeney.) be sped up by :issue:`45947`. +.. _whatsnew311-faster-cpython-misc: + Misc ---- @@ -1285,6 +1510,9 @@ Misc time required for catching an exception by about 10%. (Contributed by Irit Katriel in :issue:`45711`.) + +.. _whatsnew311-faster-cpython-faq: + FAQ --- @@ -1319,6 +1547,8 @@ FAQ | A: No. We're still exploring other optimizations. +.. _whatsnew311-faster-cpython-about: + About ----- @@ -1328,137 +1558,218 @@ funded by Bloomberg LP to work on the project part-time. Finally, many contributors are volunteers from the community. +.. _whatsnew311-bytecode-changes: + CPython bytecode changes ======================== -* The bytecode now contains inline cache entries, which take the form of - :opcode:`CACHE` instructions. Many opcodes expect to be followed by an exact - number of caches, and instruct the interpreter to skip over them at runtime. - Populated caches can look like arbitrary instructions, so great care should be - taken when reading or modifying raw, adaptive bytecode containing quickened - data. +The bytecode now contains inline cache entries, +which take the form of the newly-added :opcode:`CACHE` instructions. +Many opcodes expect to be followed by an exact number of caches, +and instruct the interpreter to skip over them at runtime. +Populated caches can look like arbitrary instructions, +so great care should be taken when reading or modifying +raw, adaptive bytecode containing quickened data. -* Replaced all numeric ``BINARY_*`` and ``INPLACE_*`` instructions with a single - :opcode:`BINARY_OP` implementation. -* Replaced the three call instructions: :opcode:`CALL_FUNCTION`, - :opcode:`CALL_FUNCTION_KW` and :opcode:`CALL_METHOD` with - :opcode:`PUSH_NULL`, :opcode:`PRECALL`, :opcode:`CALL`, - and :opcode:`KW_NAMES`. - This decouples the argument shifting for methods from the handling of - keyword arguments and allows better specialization of calls. +.. _whatsnew311-added-opcodes: -* Removed ``COPY_DICT_WITHOUT_KEYS`` and ``GEN_START``. +New opcodes +----------- + +* :opcode:`ASYNC_GEN_WRAP`, :opcode:`RETURN_GENERATOR` and :opcode:`SEND`, + used in generators and co-routines. -* :opcode:`MATCH_CLASS` and :opcode:`MATCH_KEYS` no longer push an additional - boolean value indicating whether the match succeeded or failed. Instead, they - indicate failure with :const:`None` (where a tuple of extracted values would - otherwise be). +* :opcode:`COPY_FREE_VARS`, + which avoids needing special caller-side code for closures. -* Replace several stack manipulation instructions (``DUP_TOP``, ``DUP_TOP_TWO``, - ``ROT_TWO``, ``ROT_THREE``, ``ROT_FOUR``, and ``ROT_N``) with new - :opcode:`COPY` and :opcode:`SWAP` instructions. +* :opcode:`JUMP_BACKWARD_NO_INTERRUPT`, + for use in certain loops where handling interrupts is undesirable. -* Replaced :opcode:`JUMP_IF_NOT_EXC_MATCH` by :opcode:`CHECK_EXC_MATCH` which - performs the check but does not jump. +* :opcode:`MAKE_CELL`, to create :ref:`cell-objects`. -* Replaced :opcode:`JUMP_IF_NOT_EG_MATCH` by :opcode:`CHECK_EG_MATCH` which - performs the check but does not jump. +* :opcode:`CHECK_EG_MATCH` and :opcode:`PREP_RERAISE_STAR`, + to handle the :ref:`new exception groups and except* ` + added in :pep:`654`. -* Replaced :opcode:`JUMP_ABSOLUTE` by the relative :opcode:`JUMP_BACKWARD`. +* :opcode:`PUSH_EXC_INFO`, for use in exception handlers. -* Added :opcode:`JUMP_BACKWARD_NO_INTERRUPT`, which is used in certain loops where it - is undesirable to handle interrupts. +* :opcode:`RESUME`, a no-op, + for internal tracing, debugging and optimization checks. -* Replaced :opcode:`POP_JUMP_IF_TRUE` and :opcode:`POP_JUMP_IF_FALSE` by - the relative :opcode:`POP_JUMP_FORWARD_IF_TRUE`, :opcode:`POP_JUMP_BACKWARD_IF_TRUE`, - :opcode:`POP_JUMP_FORWARD_IF_FALSE` and :opcode:`POP_JUMP_BACKWARD_IF_FALSE`. -* Added :opcode:`POP_JUMP_FORWARD_IF_NOT_NONE`, :opcode:`POP_JUMP_BACKWARD_IF_NOT_NONE`, - :opcode:`POP_JUMP_FORWARD_IF_NONE` and :opcode:`POP_JUMP_BACKWARD_IF_NONE` - opcodes to speed up conditional jumps. +.. _whatsnew311-replaced-opcodes: -* :opcode:`JUMP_IF_TRUE_OR_POP` and :opcode:`JUMP_IF_FALSE_OR_POP` are now - relative rather than absolute. +Replaced opcodes +---------------- -* :opcode:`RESUME` has been added. It is a no-op. Performs internal tracing, - debugging and optimization checks. ++------------------------------------+-----------------------------------+-----------------------------------------+ +| Replaced Opcode(s) | New Opcode(s) | Notes | ++====================================+===================================+=========================================+ +| | :opcode:`!BINARY_*` | :opcode:`BINARY_OP` | Replaced all numeric binary/in-place | +| | :opcode:`!INPLACE_*` | | opcodes with a single opcode | ++------------------------------------+-----------------------------------+-----------------------------------------+ +| | :opcode:`!CALL_FUNCTION` | | :opcode:`CALL` | Decouples argument shifting for methods | +| | :opcode:`!CALL_FUNCTION_KW` | | :opcode:`KW_NAMES` | from handling of keyword arguments; | +| | :opcode:`!CALL_METHOD` | | :opcode:`PRECALL` | allows better specialization of calls | +| | | :opcode:`PUSH_NULL` | | ++------------------------------------+-----------------------------------+-----------------------------------------+ +| | :opcode:`!DUP_TOP` | | :opcode:`COPY` | Stack manipulation instructions | +| | :opcode:`!DUP_TOP_TWO` | | :opcode:`SWAP` | | +| | :opcode:`!ROT_TWO` | | | +| | :opcode:`!ROT_THREE` | | | +| | :opcode:`!ROT_FOUR` | | | +| | :opcode:`!ROT_N` | | | ++------------------------------------+-----------------------------------+-----------------------------------------+ +| | :opcode:`!JUMP_IF_NOT_EXC_MATCH` | | :opcode:`CHECK_EXC_MATCH` | Now performs check but doesn't jump | ++------------------------------------+-----------------------------------+-----------------------------------------+ +| | :opcode:`!JUMP_ABSOLUTE` | | :opcode:`JUMP_BACKWARD` | See [#bytecode-jump]_; | +| | :opcode:`!POP_JUMP_IF_FALSE` | | :opcode:`POP_JUMP_BACKWARD_IF_* | ``TRUE``, ``FALSE``, | +| | :opcode:`!POP_JUMP_IF_TRUE` | ` | ``NONE`` and ``NOT_NONE`` variants | +| | | :opcode:`POP_JUMP_FORWARD_IF_* | for each direction | +| | ` | | ++------------------------------------+-----------------------------------+-----------------------------------------+ +| | :opcode:`!SETUP_WITH` | :opcode:`BEFORE_WITH` | :keyword:`with` block setup | +| | :opcode:`!SETUP_ASYNC_WITH` | | | ++------------------------------------+-----------------------------------+-----------------------------------------+ + +.. [#bytecode-jump] All jump opcodes are now relative, including the + existing :opcode:`JUMP_IF_TRUE_OR_POP` and :opcode:`JUMP_IF_FALSE_OR_POP`. + The argument is now an offset from the current instruction + rather than an absolute location. + + +.. _whatsnew311-changed-opcodes: +.. _whatsnew311-removed-opcodes: +.. _whatsnew311-changed-removed-opcodes: + +Changed/removed opcodes +----------------------- + +* Changed :opcode:`MATCH_CLASS` and :opcode:`MATCH_KEYS` + to no longer push an additional boolean value to indicate success/failure. + Instead, ``None`` is pushed on failure + in place of the tuple of extracted values. + +* Changed opcodes that work with exceptions to reflect them + now being represented as one item on the stack instead of three + (see :gh:`89874`). + +* Removed :opcode:`!COPY_DICT_WITHOUT_KEYS`, :opcode:`!GEN_START`, + :opcode:`!POP_BLOCK`, :opcode:`!SETUP_FINALLY` and :opcode:`!YIELD_FROM`. + + +.. _whatsnew311-deprecated: +.. _whatsnew311-python-api-deprecated: Deprecated ========== +This section lists Python APIs that have been deprecated in Python 3.11. + +Deprecated C APIs are :ref:`listed separately `. + + +.. _whatsnew311-deprecated-language: +.. _whatsnew311-deprecated-builtins: + +Language/Builtins +----------------- + * Chaining :class:`classmethod` descriptors (introduced in :issue:`19072`) is now deprecated. It can no longer be used to wrap other descriptors such as :class:`property`. The core design of this feature was flawed and caused a number of downstream problems. To "pass-through" a - :class:`classmethod`, consider using the ``__wrapped__`` attribute + :class:`classmethod`, consider using the :attr:`!__wrapped__` attribute that was added in Python 3.10. (Contributed by Raymond Hettinger in :gh:`89519`.) -* Octal escapes in string and bytes literals with value larger than ``0o377`` now - produce :exc:`DeprecationWarning`. - In a future Python version they will be a :exc:`SyntaxWarning` and +* Octal escapes in string and bytes literals with values larger than ``0o377`` + (255 in decimal) now produce a :exc:`DeprecationWarning`. + In a future Python version, they will raise a :exc:`SyntaxWarning` and eventually a :exc:`SyntaxError`. (Contributed by Serhiy Storchaka in :gh:`81548`.) -* The :mod:`lib2to3` package and ``2to3`` tool are now deprecated and may not - be able to parse Python 3.10 or newer. See the :pep:`617` (New PEG parser for - CPython). (Contributed by Victor Stinner in :issue:`40360`.) +* The delegation of :func:`int` to :meth:`~object.__trunc__` is now deprecated. + Calling ``int(a)`` when ``type(a)`` implements :meth:`!__trunc__` but not + :meth:`~object.__int__` or :meth:`~object.__index__` now raises + a :exc:`DeprecationWarning`. + (Contributed by Zackery Spytz in :issue:`44977`.) -* Undocumented modules ``sre_compile``, ``sre_constants`` and ``sre_parse`` - are now deprecated. - (Contributed by Serhiy Storchaka in :issue:`47152`.) -* :class:`webbrowser.MacOSX` is deprecated and will be removed in Python 3.13. - It is untested and undocumented and also not used by webbrowser itself. - (Contributed by Dong-hee Na in :issue:`42255`.) +.. _whatsnew311-deprecated-modules: -* The behavior of returning a value from a :class:`~unittest.TestCase` and - :class:`~unittest.IsolatedAsyncioTestCase` test methods (other than the - default ``None`` value), is now deprecated. +Modules +------- -* Deprecated the following :mod:`unittest` functions, scheduled for removal in - Python 3.13: +.. _whatsnew311-pep594: - * :func:`unittest.findTestCases` - * :func:`unittest.makeSuite` - * :func:`unittest.getTestCaseNames` +* :pep:`594` led to the deprecations of the following modules + slated for removal in Python 3.13: - Use :class:`~unittest.TestLoader` method instead: + +---------------------+---------------------+---------------------+---------------------+---------------------+ + | :mod:`aifc` | :mod:`chunk` | :mod:`msilib` | :mod:`pipes` | :mod:`telnetlib` | + +---------------------+---------------------+---------------------+---------------------+---------------------+ + | :mod:`audioop` | :mod:`crypt` | :mod:`nis` | :mod:`sndhdr` | :mod:`uu` | + +---------------------+---------------------+---------------------+---------------------+---------------------+ + | :mod:`cgi` | :mod:`imghdr` | :mod:`nntplib` | :mod:`spwd` | :mod:`xdrlib` | + +---------------------+---------------------+---------------------+---------------------+---------------------+ + | :mod:`cgitb` | :mod:`mailcap` | :mod:`ossaudiodev` | :mod:`sunau` | | + +---------------------+---------------------+---------------------+---------------------+---------------------+ - * :meth:`unittest.TestLoader.loadTestsFromModule` - * :meth:`unittest.TestLoader.loadTestsFromTestCase` - * :meth:`unittest.TestLoader.getTestCaseNames` + (Contributed by Brett Cannon in :issue:`47061` and Victor Stinner in + :gh:`68966`.) - (Contributed by Erlend E. Aasland in :issue:`5846`.) +* The :mod:`asynchat`, :mod:`asyncore` and :mod:`smtpd` modules have been + deprecated since at least Python 3.6. Their documentation and deprecation + warnings have now been updated to note they will be removed in Python 3.12. + (Contributed by Hugo van Kemenade in :issue:`47022`.) -* The :meth:`turtle.RawTurtle.settiltangle` is deprecated since Python 3.1, - it now emits a deprecation warning and will be removed in Python 3.13. Use - :meth:`turtle.RawTurtle.tiltangle` instead (it was earlier incorrectly marked - as deprecated, its docstring is now corrected). - (Contributed by Hugo van Kemenade in :issue:`45837`.) +* The :mod:`lib2to3` package and :ref:`2to3 <2to3-reference>` tool + are now deprecated and may not be able to parse Python 3.10 or newer. + See :pep:`617`, introducing the new PEG parser, for details. + (Contributed by Victor Stinner in :issue:`40360`.) -* The delegation of :func:`int` to :meth:`__trunc__` is now deprecated. Calling - ``int(a)`` when ``type(a)`` implements :meth:`__trunc__` but not - :meth:`__int__` or :meth:`__index__` now raises a :exc:`DeprecationWarning`. - (Contributed by Zackery Spytz in :issue:`44977`.) +* Undocumented modules :mod:`!sre_compile`, :mod:`!sre_constants` + and :mod:`!sre_parse` are now deprecated. + (Contributed by Serhiy Storchaka in :issue:`47152`.) + + +.. _whatsnew311-deprecated-stdlib: + +Standard Library +---------------- * The following have been deprecated in :mod:`configparser` since Python 3.2. - Their deprecation warnings have now been updated to note they will removed in - Python 3.12: + Their deprecation warnings have now been updated to note they will be removed + in Python 3.12: - * the :class:`configparser.SafeConfigParser` class - * the :attr:`configparser.ParsingError.filename` property + * the :class:`!configparser.SafeConfigParser` class + * the :attr:`!configparser.ParsingError.filename` property * the :meth:`configparser.RawConfigParser.readfp` method (Contributed by Hugo van Kemenade in :issue:`45173`.) -* :class:`configparser.LegacyInterpolation` has been deprecated in the docstring - since Python 3.2. It now emits a :exc:`DeprecationWarning` and will be removed +* :class:`!configparser.LegacyInterpolation` has been deprecated in the docstring + since Python 3.2, and is not listed in the :mod:`configparser` documentation. + It now emits a :exc:`DeprecationWarning` and will be removed in Python 3.13. Use :class:`configparser.BasicInterpolation` or :class:`configparser.ExtendedInterpolation` instead. (Contributed by Hugo van Kemenade in :issue:`46607`.) +* The older set of :mod:`importlib.resources` functions were deprecated + in favor of the replacements added in Python 3.9 + and will be removed in a future Python version, + due to not supporting resources located within package subdirectories: + + * :func:`importlib.resources.contents` + * :func:`importlib.resources.is_resource` + * :func:`importlib.resources.open_binary` + * :func:`importlib.resources.open_text` + * :func:`importlib.resources.read_binary` + * :func:`importlib.resources.read_text` + * :func:`importlib.resources.path` + * The :func:`locale.getdefaultlocale` function is deprecated and will be removed in Python 3.13. Use :func:`locale.setlocale`, :func:`locale.getpreferredencoding(False) ` and @@ -1469,45 +1780,25 @@ Deprecated removed in Python 3.13. Use ``locale.setlocale(locale.LC_ALL, "")`` instead. (Contributed by Victor Stinner in :gh:`90817`.) -* The :mod:`asynchat`, :mod:`asyncore` and :mod:`smtpd` modules have been - deprecated since at least Python 3.6. Their documentation and deprecation - warnings have now been updated to note they will removed in Python 3.12 - (:pep:`594`). - (Contributed by Hugo van Kemenade in :issue:`47022`.) - -* :pep:`594` led to the deprecations of the following modules which are - slated for removal in Python 3.13: - - * :mod:`aifc` - * :mod:`audioop` - * :mod:`cgi` - * :mod:`cgitb` - * :mod:`chunk` - * :mod:`crypt` - * :mod:`imghdr` - * :mod:`mailcap` - * :mod:`msilib` - * :mod:`nis` - * :mod:`nntplib` - * :mod:`ossaudiodev` - * :mod:`pipes` - * :mod:`sndhdr` - * :mod:`spwd` - * :mod:`sunau` - * :mod:`telnetlib` - * :mod:`uu` - * :mod:`xdrlib` +* Stricter rules will now be applied for numerical group references + and group names in :ref:`regular expressions `. + Only sequences of ASCII digits will now be accepted as a numerical reference, + and the group name in :class:`bytes` patterns and replacement strings + can only contain ASCII letters, digits and underscores. + For now, a deprecation warning is raised for syntax violating these rules. + (Contributed by Serhiy Storchaka in :gh:`91760`.) - (Contributed by Brett Cannon in :issue:`47061` and Victor Stinner in - :gh:`68966`.) +* In the :mod:`re` module, the :func:`!re.template` function + and the corresponding :data:`!re.TEMPLATE` and :data:`!re.T` flags + are deprecated, as they were undocumented and lacked an obvious purpose. + They will be removed in Python 3.13. + (Contributed by Serhiy Storchaka and Miro Hrončok in :gh:`92728`.) -* More strict rules will be applied now applied for numerical group references - and group names in regular expressions in future Python versions. - Only sequence of ASCII digits will be now accepted as a numerical reference. - The group name in bytes patterns and replacement strings could only - contain ASCII letters and digits and underscore. - For now, a deprecation warning is raised for such syntax. - (Contributed by Serhiy Storchaka in :gh:`91760`.) +* :func:`turtle.settiltangle` has been deprecated since Python 3.1; + it now emits a deprecation warning and will be removed in Python 3.13. Use + :func:`turtle.tiltangle` instead (it was earlier incorrectly marked + as deprecated, and its docstring is now corrected). + (Contributed by Hugo van Kemenade in :issue:`45837`.) * :class:`typing.Text`, which exists solely to provide compatibility support between Python 2 and Python 3 code, is now deprecated. Its removal is @@ -1515,184 +1806,232 @@ Deprecated wherever possible. (Contributed by Alex Waygood in :gh:`92332`.) -* The keyword argument syntax for constructing :data:`~typing.TypedDict` types +* The keyword argument syntax for constructing :data:`typing.TypedDict` types is now deprecated. Support will be removed in Python 3.13. (Contributed by Jingchen Ye in :gh:`90224`.) -* The :func:`re.template` function and the corresponding :const:`re.TEMPLATE` - and :const:`re.T` flags are deprecated, as they were undocumented and - lacked an obvious purpose. They will be removed in Python 3.13. - (Contributed by Serhiy Storchaka and Miro Hrončok in :gh:`92728`.) +* :class:`!webbrowser.MacOSX` is deprecated and will be removed in Python 3.13. + It is untested, undocumented, and not used by :mod:`webbrowser` itself. + (Contributed by Dong-hee Na in :issue:`42255`.) + +* The behavior of returning a value from a :class:`~unittest.TestCase` and + :class:`~unittest.IsolatedAsyncioTestCase` test methods (other than the + default ``None`` value) is now deprecated. +* Deprecated the following not-formally-documented :mod:`unittest` functions, + scheduled for removal in Python 3.13: + + * :func:`!unittest.findTestCases` + * :func:`!unittest.makeSuite` + * :func:`!unittest.getTestCaseNames` + + Use :class:`~unittest.TestLoader` methods instead: + + * :meth:`unittest.TestLoader.loadTestsFromModule` + * :meth:`unittest.TestLoader.loadTestsFromTestCase` + * :meth:`unittest.TestLoader.getTestCaseNames` + + (Contributed by Erlend E. Aasland in :issue:`5846`.) + + +.. _whatsnew311-pending-removal: +.. _whatsnew311-python-api-pending-removal: Pending Removal in Python 3.12 ============================== -The following APIs have been deprecated in earlier Python releases, +The following Python APIs have been deprecated in earlier Python releases, and will be removed in Python 3.12. -Python API: +C APIs pending removal are +:ref:`listed separately `. -* :class:`pkgutil.ImpImporter` -* :class:`pkgutil.ImpLoader` -* :envvar:`PYTHONTHREADDEBUG` +* The :mod:`asynchat` module +* The :mod:`asyncore` module +* The :ref:`entire distutils package ` +* The :mod:`imp` module +* The :class:`typing.io ` namespace +* The :class:`typing.re ` namespace +* :func:`!cgi.log` * :func:`importlib.find_loader` -* :func:`importlib.util.module_for_loader` -* :func:`importlib.util.set_loader_wrapper` -* :func:`importlib.util.set_package_wrapper` * :meth:`importlib.abc.Loader.module_repr` -* :meth:`importlib.abc.Loadermodule_repr` -* :meth:`importlib.abc.MetaPathFinder.find_module` * :meth:`importlib.abc.MetaPathFinder.find_module` * :meth:`importlib.abc.PathEntryFinder.find_loader` * :meth:`importlib.abc.PathEntryFinder.find_module` -* :meth:`importlib.machinery.BuiltinImporter.find_module` -* :meth:`importlib.machinery.BuiltinLoader.module_repr` -* :meth:`importlib.machinery.FileFinder.find_loader` -* :meth:`importlib.machinery.FileFinder.find_module` -* :meth:`importlib.machinery.FrozenImporter.find_module` -* :meth:`importlib.machinery.FrozenLoader.module_repr` +* :meth:`!importlib.machinery.BuiltinImporter.find_module` +* :meth:`!importlib.machinery.BuiltinLoader.module_repr` +* :meth:`!importlib.machinery.FileFinder.find_loader` +* :meth:`!importlib.machinery.FileFinder.find_module` +* :meth:`!importlib.machinery.FrozenImporter.find_module` +* :meth:`!importlib.machinery.FrozenLoader.module_repr` * :meth:`importlib.machinery.PathFinder.find_module` -* :meth:`importlib.machinery.WindowsRegistryFinder.find_module` +* :meth:`!importlib.machinery.WindowsRegistryFinder.find_module` +* :func:`importlib.util.module_for_loader` +* :func:`!importlib.util.set_loader_wrapper` +* :func:`!importlib.util.set_package_wrapper` +* :class:`pkgutil.ImpImporter` +* :class:`pkgutil.ImpLoader` * :meth:`pathlib.Path.link_to` -* The entire :ref:`distutils namespace ` -* :func:`cgi.log` -* :func:`sqlite3.OptimizedUnicode` -* :func:`sqlite3.enable_shared_cache` +* :func:`!sqlite3.enable_shared_cache` +* :func:`!sqlite3.OptimizedUnicode` +* :envvar:`PYTHONTHREADDEBUG` environment variable +* The following deprecated aliases in :mod:`unittest`: + + ============================ =============================== =============== + Deprecated alias Method Name Deprecated in + ============================ =============================== =============== + ``failUnless`` :meth:`.assertTrue` 3.1 + ``failIf`` :meth:`.assertFalse` 3.1 + ``failUnlessEqual`` :meth:`.assertEqual` 3.1 + ``failIfEqual`` :meth:`.assertNotEqual` 3.1 + ``failUnlessAlmostEqual`` :meth:`.assertAlmostEqual` 3.1 + ``failIfAlmostEqual`` :meth:`.assertNotAlmostEqual` 3.1 + ``failUnlessRaises`` :meth:`.assertRaises` 3.1 + ``assert_`` :meth:`.assertTrue` 3.2 + ``assertEquals`` :meth:`.assertEqual` 3.2 + ``assertNotEquals`` :meth:`.assertNotEqual` 3.2 + ``assertAlmostEquals`` :meth:`.assertAlmostEqual` 3.2 + ``assertNotAlmostEquals`` :meth:`.assertNotAlmostEqual` 3.2 + ``assertRegexpMatches`` :meth:`.assertRegex` 3.2 + ``assertRaisesRegexp`` :meth:`.assertRaisesRegex` 3.2 + ``assertNotRegexpMatches`` :meth:`.assertNotRegex` 3.5 + ============================ =============================== =============== + +.. _whatsnew311-removed: +.. _whatsnew311-python-api-removed: -C API: +Removed +======= -* :c:func:`PyUnicode_AS_DATA` -* :c:func:`PyUnicode_AS_UNICODE` -* :c:func:`PyUnicode_AsUnicodeAndSize` -* :c:func:`PyUnicode_AsUnicode` -* :c:func:`PyUnicode_FromUnicode` -* :c:func:`PyUnicode_GET_DATA_SIZE` -* :c:func:`PyUnicode_GET_SIZE` -* :c:func:`PyUnicode_GetSize` -* :c:func:`PyUnicode_IS_COMPACT` -* :c:func:`PyUnicode_IS_READY` -* :c:func:`PyUnicode_READY` -* :c:func:`Py_UNICODE_WSTR_LENGTH` -* :c:func:`_PyUnicode_AsUnicode` -* :c:macro:`PyUnicode_WCHAR_KIND` -* :c:type:`PyUnicodeObject` -* :c:func:`PyUnicode_InternImmortal()` +This section lists Python APIs that have been removed in Python 3.11. +Removed C APIs are :ref:`listed separately `. -Removed -======= +* Removed the :func:`!@asyncio.coroutine` :term:`decorator` + enabling legacy generator-based coroutines to be compatible with + :keyword:`async` / :keyword:`await` code. + The function has been deprecated since Python 3.8 and the removal was + initially scheduled for Python 3.10. Use :keyword:`async def` instead. + (Contributed by Illia Volochii in :issue:`43216`.) -* :class:`smtpd.MailmanProxy` is now removed as it is unusable without - an external module, ``mailman``. (Contributed by Dong-hee Na in :issue:`35800`.) +* Removed :class:`!asyncio.coroutines.CoroWrapper` used for wrapping legacy + generator-based coroutine objects in the debug mode. + (Contributed by Illia Volochii in :issue:`43216`.) -* The ``binhex`` module, deprecated in Python 3.9, is now removed. - The following :mod:`binascii` functions, deprecated in Python 3.9, are now - also removed: +* Due to significant security concerns, the *reuse_address* parameter of + :meth:`asyncio.loop.create_datagram_endpoint`, disabled in Python 3.9, is + now entirely removed. This is because of the behavior of the socket option + ``SO_REUSEADDR`` in UDP. + (Contributed by Hugo van Kemenade in :issue:`45129`.) - * ``a2b_hqx()``, ``b2a_hqx()``; - * ``rlecode_hqx()``, ``rledecode_hqx()``. +* Removed the :mod:`!binhex` module, deprecated in Python 3.9. + Also removed the related, similarly-deprecated :mod:`binascii` functions: + + * :func:`!binascii.a2b_hqx` + * :func:`!binascii.b2a_hqx` + * :func:`!binascii.rlecode_hqx` + * :func:`!binascii.rldecode_hqx` The :func:`binascii.crc_hqx` function remains available. (Contributed by Victor Stinner in :issue:`45085`.) -* The distutils ``bdist_msi`` command, deprecated in Python 3.9, is now removed. +* Removed the :mod:`distutils` ``bdist_msi`` command deprecated in Python 3.9. Use ``bdist_wheel`` (wheel packages) instead. (Contributed by Hugo van Kemenade in :issue:`45124`.) -* Due to significant security concerns, the *reuse_address* parameter of - :meth:`asyncio.loop.create_datagram_endpoint`, disabled in Python 3.9, is - now entirely removed. This is because of the behavior of the socket option - ``SO_REUSEADDR`` in UDP. - (Contributed by Hugo van Kemenade in :issue:`45129`.) - -* Removed :meth:`__getitem__` methods of +* Removed the :meth:`~object.__getitem__` methods of :class:`xml.dom.pulldom.DOMEventStream`, :class:`wsgiref.util.FileWrapper` and :class:`fileinput.FileInput`, deprecated since Python 3.9. (Contributed by Hugo van Kemenade in :issue:`45132`.) -* The following deprecated functions and methods are removed in the :mod:`gettext` - module: :func:`~gettext.lgettext`, :func:`~gettext.ldgettext`, - :func:`~gettext.lngettext` and :func:`~gettext.ldngettext`. - - Function :func:`~gettext.bind_textdomain_codeset`, methods - :meth:`~gettext.NullTranslations.output_charset` and - :meth:`~gettext.NullTranslations.set_output_charset`, and the *codeset* - parameter of functions :func:`~gettext.translation` and - :func:`~gettext.install` are also removed, since they are only used for - the ``l*gettext()`` functions. +* Removed the deprecated :mod:`gettext` functions + :func:`!lgettext`, :func:`!ldgettext`, + :func:`!lngettext` and :func:`!ldngettext`. + Also removed the :func:`!bind_textdomain_codeset` function, + the :meth:`!NullTranslations.output_charset` and + :meth:`!NullTranslations.set_output_charset` methods, + and the *codeset* parameter of :func:`!translation` and :func:`!install`, + since they are only used for the :func:`!l*gettext` functions. (Contributed by Dong-hee Na and Serhiy Storchaka in :issue:`44235`.) -* The :func:`@asyncio.coroutine ` :term:`decorator` enabling - legacy generator-based coroutines to be compatible with async/await code. - The function has been deprecated since Python 3.8 and the removal was - initially scheduled for Python 3.10. Use :keyword:`async def` instead. - (Contributed by Illia Volochii in :issue:`43216`.) - -* :class:`asyncio.coroutines.CoroWrapper` used for wrapping legacy - generator-based coroutine objects in the debug mode. - (Contributed by Illia Volochii in :issue:`43216`.) - -* Removed the deprecated ``split()`` method of :class:`_tkinter.TkappType`. - (Contributed by Erlend E. Aasland in :issue:`38371`.) - * Removed from the :mod:`inspect` module: - * the ``getargspec`` function, deprecated since Python 3.0; + * The :func:`!getargspec` function, deprecated since Python 3.0; use :func:`inspect.signature` or :func:`inspect.getfullargspec` instead. - * the ``formatargspec`` function, deprecated since Python 3.5; - use the :func:`inspect.signature` function and :class:`Signature` object - directly. + * The :func:`!formatargspec` function, deprecated since Python 3.5; + use the :func:`inspect.signature` function + or the :class:`inspect.Signature` object directly. - * the undocumented ``Signature.from_builtin`` and ``Signature.from_function`` - functions, deprecated since Python 3.5; use the - :meth:`Signature.from_callable() ` method - instead. + * The undocumented :meth:`!Signature.from_builtin` + and :meth:`!Signature.from_function` methods, deprecated since Python 3.5; + use the :meth:`Signature.from_callable() ` + method instead. (Contributed by Hugo van Kemenade in :issue:`45320`.) -* Remove namespace package support from unittest discovery. It was introduced in - Python 3.4 but has been broken since Python 3.7. - (Contributed by Inada Naoki in :issue:`23882`.) - -* Remove ``__class_getitem__`` method from :class:`pathlib.PurePath`, +* Removed the :meth:`~object.__class_getitem__` method + from :class:`pathlib.PurePath`, because it was not used and added by mistake in previous versions. (Contributed by Nikita Sobolev in :issue:`46483`.) -* Remove the undocumented private ``float.__set_format__()`` method, previously - known as ``float.__setformat__()`` in Python 3.7. Its docstring said: "You - probably don't want to use this function. It exists mainly to be used in - Python's test suite." +* Removed the :class:`!MailmanProxy` class in the :mod:`smtpd` module, + as it is unusable without the external :mod:`!mailman` package. + (Contributed by Dong-hee Na in :issue:`35800`.) + +* Removed the deprecated :meth:`!split` method of :class:`!_tkinter.TkappType`. + (Contributed by Erlend E. Aasland in :issue:`38371`.) + +* Removed namespace package support from :mod:`unittest` discovery. + It was introduced in Python 3.4 but has been broken since Python 3.7. + (Contributed by Inada Naoki in :issue:`23882`.) + +* Removed the undocumented private :meth:`!float.__set_format__()` method, + previously known as :meth:`!float.__setformat__()` in Python 3.7. + Its docstring said: "You probably don't want to use this function. + It exists mainly to be used in Python's test suite." (Contributed by Victor Stinner in :issue:`46852`.) +* The :option:`!--experimental-isolated-subinterpreters` configure flag + (and corresponding :c:macro:`!EXPERIMENTAL_ISOLATED_SUBINTERPRETERS` macro) + have been removed. + +* `Pynche `_ + --- The Pythonically Natural Color and Hue Editor --- has been moved out + of ``Tools/scripts`` and is `being developed independently + `_ from the Python source tree. + + +.. _whatsnew311-porting: +.. _whatsnew311-python-api-porting: + Porting to Python 3.11 ====================== This section lists previously described changes and other bugfixes -that may require changes to your code. - - -Changes in the Python API -------------------------- +in the Python API that may require changes to your Python code. -* Prohibited passing non-:class:`concurrent.futures.ThreadPoolExecutor` - executors to :meth:`loop.set_default_executor` following a deprecation in - Python 3.8. - (Contributed by Illia Volochii in :issue:`43234`.) +Porting notes for the C API are +:ref:`listed separately `. * :func:`open`, :func:`io.open`, :func:`codecs.open` and :class:`fileinput.FileInput` no longer accept ``'U'`` ("universal newline") - in the file mode. This flag was deprecated since Python 3.3. In Python 3, the - "universal newline" is used by default when a file is open in text mode. The - :ref:`newline parameter ` of :func:`open` controls - how universal newlines works. + in the file mode. In Python 3, "universal newline" mode is used by default + whenever a file is opened in text mode, + and the ``'U'`` flag has been deprecated since Python 3.3. + The :ref:`newline parameter ` + to these functions controls how universal newlines work. (Contributed by Victor Stinner in :issue:`37330`.) -* The :mod:`pdb` module now reads the :file:`.pdbrc` configuration file with - the ``'utf-8'`` encoding. - (Contributed by Srinivas Reddy Thatiparthy (శ్రీనివాస్ రెడ్డి తాటిపర్తి) in :issue:`41137`.) +* :class:`ast.AST` node positions are now validated when provided to + :func:`compile` and other related functions. If invalid positions are detected, + a :exc:`ValueError` will be raised. (Contributed by Pablo Galindo in :gh:`93351`) + +* Prohibited passing non-:class:`concurrent.futures.ThreadPoolExecutor` + executors to :meth:`asyncio.loop.set_default_executor` + following a deprecation in Python 3.8. + (Contributed by Illia Volochii in :issue:`43234`.) * :mod:`calendar`: The :class:`calendar.LocaleTextCalendar` and :class:`calendar.LocaleHTMLCalendar` classes now use @@ -1700,58 +2039,96 @@ Changes in the Python API if no locale is specified. (Contributed by Victor Stinner in :issue:`46659`.) -* Global inline flags (e.g. ``(?i)``) can now only be used at the start of - the regular expressions. Using them not at the start of expression was - deprecated since Python 3.6. - (Contributed by Serhiy Storchaka in :issue:`47066`.) - -* :mod:`re` module: Fix a few long-standing bugs where, in rare cases, - capturing group could get wrong result. So the result may be different than - before. - (Contributed by Ma Lin in :issue:`35859`.) +* The :mod:`pdb` module now reads the :file:`.pdbrc` configuration file with + the ``'UTF-8'`` encoding. + (Contributed by Srinivas Reddy Thatiparthy (శ్రీనివాస్ రెడ్డి తాటిపర్తి) in :issue:`41137`.) -* The *population* parameter of :func:`random.sample` must be a sequence. - Automatic conversion of sets to lists is no longer supported. If the sample size +* The *population* parameter of :func:`random.sample` must be a sequence, + and automatic conversion of :class:`set`\s to :class:`list`\s + is no longer supported. Also, if the sample size is larger than the population size, a :exc:`ValueError` is raised. (Contributed by Raymond Hettinger in :issue:`40465`.) -* :class:`ast.AST` node positions are now validated when provided to - :func:`compile` and other related functions. If invalid positions are detected, - a :exc:`ValueError` will be raised. (Contributed by Pablo Galindo in :gh:`93351`) +* The *random* optional parameter of :func:`random.shuffle` was removed. + It was previously an arbitrary random function to use for the shuffle; + now, :func:`random.random` (its previous default) will always be used. + +* In :mod:`re` :ref:`re-syntax`, global inline flags (e.g. ``(?i)``) + can now only be used at the start of regular expressions. + Using them elsewhere has been deprecated since Python 3.6. + (Contributed by Serhiy Storchaka in :issue:`47066`.) + +* In the :mod:`re` module, several long-standing bugs where fixed that, + in rare cases, could cause capture groups to get the wrong result. + Therefore, this could change the captured output in these cases. + (Contributed by Ma Lin in :issue:`35859`.) + -* :c:member:`~PyTypeObject.tp_dictoffset` should be treated as write-only. - It can be set to describe C extension clases to the VM, but should be regarded - as meaningless when read. To get the pointer to the object's dictionary call - :c:func:`PyObject_GenericGetDict` instead. +.. _whatsnew311-build-changes: Build Changes ============= -* Building Python now requires a C11 compiler. Optional C11 features are not - required. - (Contributed by Victor Stinner in :issue:`46656`.) - -* Building Python now requires support of IEEE 754 floating point numbers. - (Contributed by Victor Stinner in :issue:`46917`.) +* CPython now has :pep:`11` :pep:`Tier 3 support <11#tier-3>` for + cross compiling to the `WebAssembly `_ platforms + `Emscripten `_ + (``wasm32-unknown-emscripten``, i.e. Python in the browser) + and `WebAssembly System Interface (WASI) `_ + (``wasm32-unknown-wasi``). + The effort is inspired by previous work like `Pyodide `_. + These platforms provide a limited subset of POSIX APIs; Python standard + libraries features and modules related to networking, processes, threading, + signals, mmap, and users/groups are not available or don't work. + (Emscripten contributed by Christian Heimes and Ethan Smith in :gh:`84461` + and WASI contributed by Christian Heimes in :gh:`90473`; + platforms promoted in :gh:`95085`) + +* Building Python now requires: + + * A `C11 `_ compiler. + `Optional C11 features + `_ + are not required. + (Contributed by Victor Stinner in :issue:`46656`.) + + * Support for `IEEE 754 `_ + floating point numbers. + (Contributed by Victor Stinner in :issue:`46917`.) + + * Support for `floating point Not-a-Number (NaN) + `_, + as the :c:macro:`!Py_NO_NAN` macro has been removed. + (Contributed by Victor Stinner in :issue:`46656`.) + + * A `C99 `_ + ```` header file providing the + :c:func:`!copysign`, :c:func:`!hypot`, :c:func:`!isfinite`, + :c:func:`!isinf`, :c:func:`!isnan`, and :c:func:`!round` functions + (contributed by Victor Stinner in :issue:`45440`); + and a :c:data:`!NAN` constant or the :c:func:`!__builtin_nan` function + (Contributed by Victor Stinner in :issue:`46640`). + +* The :mod:`tkinter` package now requires `Tcl/Tk `_ + version 8.5.12 or newer. + (Contributed by Serhiy Storchaka in :issue:`46996`.) -* CPython can now be built with the ThinLTO option via ``--with-lto=thin``. - (Contributed by Dong-hee Na and Brett Holman in :issue:`44340`.) +* Build dependencies, compiler flags, and linker flags for most stdlib + extension modules are now detected by :program:`configure`. libffi, libnsl, + libsqlite3, zlib, bzip2, liblzma, libcrypt, Tcl/Tk, and uuid flags + are detected by `pkg-config + `_ (when available). + :mod:`tkinter` now requires a pkg-config command + to detect development settings for `Tcl/Tk`_ headers and libraries. + (Contributed by Christian Heimes and Erlend Egeberg Aasland in + :issue:`45847`, :issue:`45747`, and :issue:`45763`.) * libpython is no longer linked against libcrypt. (Contributed by Mike Gilbert in :issue:`45433`.) -* Building Python now requires a C99 ```` header file providing - the following functions: ``copysign()``, ``hypot()``, ``isfinite()``, - ``isinf()``, ``isnan()``, ``round()``. - (Contributed by Victor Stinner in :issue:`45440`.) - -* Building Python now requires a C99 ```` header file providing - a ``NAN`` constant, or the ``__builtin_nan()`` built-in function. - (Contributed by Victor Stinner in :issue:`46640`.) - -* Building Python now requires support for floating point Not-a-Number (NaN): - remove the ``Py_NO_NAN`` macro. - (Contributed by Victor Stinner in :issue:`46656`.) +* CPython can now be built with the + `ThinLTO `_ option + via passing ``thin`` to :option:`--with-lto`, i.e. ``--with-lto=thin``. + (Contributed by Dong-hee Na and Brett Holman in :issue:`44340`.) * Freelists for object structs can now be disabled. A new :program:`configure` option :option:`!--without-freelists` can be used to disable all freelists @@ -1760,60 +2137,39 @@ Build Changes * ``Modules/Setup`` and ``Modules/makesetup`` have been improved and tied up. Extension modules can now be built through ``makesetup``. All except some - test modules can be linked statically into main binary or library. + test modules can be linked statically into a main binary or library. (Contributed by Brett Cannon and Christian Heimes in :issue:`45548`, :issue:`45570`, :issue:`45571`, and :issue:`43974`.) -* Build dependencies, compiler flags, and linker flags for most stdlib - extension modules are now detected by :program:`configure`. libffi, libnsl, - libsqlite3, zlib, bzip2, liblzma, libcrypt, Tcl/Tk, and uuid flags - are detected by ``pkg-config`` (when available). :mod:`tkinter` now - requires ``pkg-config`` command to detect development settings for Tcl/Tk - headers and libraries. - (Contributed by Christian Heimes and Erlend Egeberg Aasland in - :issue:`45847`, :issue:`45747`, and :issue:`45763`.) - .. note:: - Use the environment variables :envvar:`TCLTK_CFLAGS` and - :envvar:`TCLTK_LIBS` to manually specify the location of Tcl/Tk headers - and libraries. The :program:`configure` options ``--with-tcltk-includes`` - and ``--with-tcltk-libs`` have been removed. + Use the environment variables :envvar:`!TCLTK_CFLAGS` and + :envvar:`!TCLTK_LIBS` to manually specify the location of Tcl/Tk headers + and libraries. The :program:`configure` options + :option:`!--with-tcltk-includes` and :option:`!--with-tcltk-libs` + have been removed. On RHEL 7 and CentOS 7 the development packages do not provide ``tcl.pc`` - and ``tk.pc``, use :envvar:`TCLTK_LIBS="-ltk8.5 -ltkstub8.5 -ltcl8.5"`. + and ``tk.pc``; use ``TCLTK_LIBS="-ltk8.5 -ltkstub8.5 -ltcl8.5"``. The directory ``Misc/rhel7`` contains ``.pc`` files and instructions - how to build Python with RHEL 7's and CentOS 7's Tcl/Tk and OpenSSL. - -* CPython now has :pep:`11` tier 3 support for cross compiling to WebAssembly - platform ``wasm32-unknown-emscripten`` (Python in the browser). The effort - is inspired by previous work like `Pyodide `_. - Emscripten provides a limited subset of POSIX APIs. Python standard - libraries features and modules related to networking, processes, threading, - signals, mmap, and users/groups are not available or don't work. - (Contributed by Christian Heimes and Ethan Smith in :gh:`84461`, - promoted in :gh:`95085`) - -* CPython now has :pep:`11` tier 3 support for cross compiling to WebAssembly - platform ``wasm32-unknown-wasi`` (WebAssembly System Interface). Like on - Emscripten, only a subset of Python's standard library is available on WASI. - (Contributed by Christian Heimes in :gh:`90473`, promoted in :gh:`95085`) + on how to build Python with RHEL 7's and CentOS 7's Tcl/Tk and OpenSSL. * CPython will now use 30-bit digits by default for the Python :class:`int` implementation. Previously, the default was to use 30-bit digits on platforms with ``SIZEOF_VOID_P >= 8``, and 15-bit digits otherwise. It's still possible to explicitly request use of 15-bit digits via either the - ``--enable-big-digits`` option to the configure script or (for Windows) the - ``PYLONG_BITS_IN_DIGIT`` variable in ``PC/pyconfig.h``, but this option may - be removed at some point in the future. (Contributed by Mark Dickinson in - :issue:`45569`.) + :option:`--enable-big-digits` option to the configure script + or (for Windows) the ``PYLONG_BITS_IN_DIGIT`` variable in ``PC/pyconfig.h``, + but this option may be removed at some point in the future. + (Contributed by Mark Dickinson in :issue:`45569`.) -* The :mod:`tkinter` package now requires Tcl/Tk version 8.5.12 or newer. - (Contributed by Serhiy Storchaka in :issue:`46996`.) +.. _whatsnew311-c-api: C API Changes ============= +.. _whatsnew311-c-api-new-features: + New Features ------------ @@ -1877,9 +2233,25 @@ New Features * Added the :c:member:`PyConfig.safe_path` member. (Contributed by Victor Stinner in :gh:`57684`.) + +.. _whatsnew311-c-api-porting: + Porting to Python 3.11 ---------------------- +.. _whatsnew311-pep670: + +* Some macros have been converted to static inline functions to avoid + `macro pitfalls `_. + The change should be mostly transparent to users, + as the replacement functions will cast their arguments to the expected types + to avoid compiler warnings due to static type checks. + However, when the limited C API is set to >=3.11, + these casts are not done, + and callers will need to cast arguments to their expected types. + See :pep:`670` for more details. + (Contributed by Victor Stinner and Erlend E. Aasland in :gh:`89653`.) + * :c:func:`PyErr_SetExcInfo()` no longer uses the ``type`` and ``traceback`` arguments, the interpreter now derives those values from the exception instance (the ``value`` argument). The function still steals references @@ -2163,6 +2535,9 @@ Porting to Python 3.11 paths and then modify them, finish initialization and use :c:func:`PySys_GetObject` to retrieve :data:`sys.path` as a Python list object and modify it directly. + +.. _whatsnew311-c-api-deprecated: + Deprecated ---------- @@ -2188,6 +2563,35 @@ Deprecated * Deprecate the ``ob_shash`` member of the :c:type:`PyBytesObject`. Use :c:func:`PyObject_Hash` instead. (Contributed by Inada Naoki in :issue:`46864`.) + +.. _whatsnew311-c-api-pending-removal: + +Pending Removal in Python 3.12 +------------------------------ + +The following C APIs have been deprecated in earlier Python releases, +and will be removed in Python 3.12. + +* :c:func:`PyUnicode_AS_DATA` +* :c:func:`PyUnicode_AS_UNICODE` +* :c:func:`PyUnicode_AsUnicodeAndSize` +* :c:func:`PyUnicode_AsUnicode` +* :c:func:`PyUnicode_FromUnicode` +* :c:func:`PyUnicode_GET_DATA_SIZE` +* :c:func:`PyUnicode_GET_SIZE` +* :c:func:`PyUnicode_GetSize` +* :c:func:`PyUnicode_IS_COMPACT` +* :c:func:`PyUnicode_IS_READY` +* :c:func:`PyUnicode_READY` +* :c:func:`Py_UNICODE_WSTR_LENGTH` +* :c:func:`_PyUnicode_AsUnicode` +* :c:macro:`PyUnicode_WCHAR_KIND` +* :c:type:`PyUnicodeObject` +* :c:func:`PyUnicode_InternImmortal()` + + +.. _whatsnew311-c-api-removed: + Removed ------- @@ -2245,6 +2649,8 @@ Removed API). (Contributed by Victor Stinner in :issue:`45412`.) +.. _whatsnew311-pep624: + * Remove the :c:type:`Py_UNICODE` encoder APIs, as they have been deprecated since Python 3.3, are little used diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 507ba35221467e..dff4de621b4c49 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -37,11 +37,11 @@ * Credit the author of a patch or bugfix. Just the name is sufficient; the e-mail address isn't necessary. - * It's helpful to add the bug/patch number as a comment: + * It's helpful to add the issue number as a comment: XXX Describe the transmogrify() function added to the socket module. - (Contributed by P.Y. Developer in :issue:`12345`.) + (Contributed by P.Y. Developer in :gh:`12345`.) This saves the maintainer the effort of going through the VCS log when researching a change. @@ -70,10 +70,73 @@ Important deprecations, removals or restrictions: * :pep:`623`, Remove wstr from Unicode +* :pep:`632`, Remove the ``distutils`` package. + +Improved Error Messages +======================= + +* Modules from the standard library are now potentially suggested as part of + the error messages displayed by the interpreter when a :exc:`NameError` is + raised to the top level. Contributed by Pablo Galindo in :gh:`98254`. + + >>> sys.version_info + Traceback (most recent call last): + File "", line 1, in + NameError: name 'sys' is not defined. Did you forget to import 'sys'? + +* Improve the error suggestion for :exc:`NameError` exceptions for instances. + Now if a :exc:`NameError` is raised in a method and the instance has an + attribute that's exactly equal to the name in the exception, the suggestion + will include ``self.`` instead of the closest match in the method + scope. Contributed by Pablo Galindo in :gh:`99139`. + + >>> class A: + ... def __init__(self): + ... self.blech = 1 + ... + ... def foo(self): + ... somethin = blech + + >>> A().foo() + Traceback (most recent call last): + File "", line 1 + somethin = blech + ^^^^^ + NameError: name 'blech' is not defined. Did you mean: 'self.blech'? + + +* Improve the :exc:`SyntaxError` error message when the user types ``import x + from y`` instead of ``from y import x``. Contributed by Pablo Galindo in :gh:`98931`. + + >>> import a.y.z from b.y.z + Traceback (most recent call last): + File "", line 1 + import a.y.z from b.y.z + ^^^^^^^^^^^^^^^^^^^^^^^ + SyntaxError: Did you mean to use 'from ... import ...' instead? + +* :exc:`ImportError` exceptions raised from failed ``from import + `` statements now include suggestions for the value of ```` based on the + available names in ````. Contributed by Pablo Galindo in :gh:`91058`. + + >>> from collections import chainmap + Traceback (most recent call last): + File "", line 1, in + ImportError: cannot import name 'chainmap' from 'collections'. Did you mean: 'ChainMap'? + New Features ============ +* Add :ref:`perf_profiling` through the new + environment variable :envvar:`PYTHONPERFSUPPORT`, + the new command-line option :option:`-X perf <-X>`, + as well as the new :func:`sys.activate_stack_trampoline`, + :func:`sys.deactivate_stack_trampoline`, + and :func:`sys.is_stack_trampoline_active` APIs. + (Design by Pablo Galindo. Contributed by Pablo Galindo and Christian Heimes + with contributions from Gregory P. Smith [Google] and Mark Shannon + in :gh:`96123`.) Other Language Changes @@ -93,6 +156,29 @@ Other Language Changes when parsing source code containing null bytes. (Contributed by Pablo Galindo in :gh:`96670`.) +* The Garbage Collector now runs only on the eval breaker mechanism of the + Python bytecode evaluation loop instead on object allocations. The GC can + also run when :c:func:`PyErr_CheckSignals` is called so C extensions that + need to run for a long time without executing any Python code also have a + chance to execute the GC periodically. (Contributed by Pablo Galindo in + :gh:`97922`.) + +* A backslash-character pair that is not a valid escape sequence now generates + a :exc:`SyntaxWarning`, instead of :exc:`DeprecationWarning`. + For example, ``re.compile("\d+\.\d+")`` now emits a :exc:`SyntaxWarning` + (``"\d"`` is an invalid escape sequence), use raw strings for regular + expression: ``re.compile(r"\d+\.\d+")``. + In a future Python version, :exc:`SyntaxError` will eventually be raised, + instead of :exc:`SyntaxWarning`. + (Contributed by Victor Stinner in :gh:`98401`.) + +* Octal escapes with value larger than ``0o377`` (ex: ``"\477"``), deprecated + in Python 3.11, now produce a :exc:`SyntaxWarning`, instead of + :exc:`DeprecationWarning`. + In a future Python version they will be eventually a :exc:`SyntaxError`. + (Contributed by Victor Stinner in :gh:`98401`.) + + New Modules =========== @@ -102,6 +188,40 @@ New Modules Improved Modules ================ +array +----- + +* The :class:`array.array` class now supports subscripting, making it a + :term:`generic type`. (Contributed by Jelle Zijlstra in :gh:`98658`.) + +asyncio +------- + +* On Linux, :mod:`asyncio` uses :class:`~asyncio.PidfdChildWatcher` by default + if :func:`os.pidfd_open` is available and functional instead of + :class:`~asyncio.ThreadedChildWatcher`. + (Contributed by Kumar Aditya in :gh:`98024`.) + +* The child watcher classes :class:`~asyncio.MultiLoopChildWatcher`, + :class:`~asyncio.FastChildWatcher`, :class:`~asyncio.AbstractChildWatcher` + and :class:`~asyncio.SafeChildWatcher` are deprecated and + will be removed in Python 3.14. It is recommended to not manually + configure a child watcher as the event loop now uses the best available + child watcher for each platform (:class:`~asyncio.PidfdChildWatcher` + if supported and :class:`~asyncio.ThreadedChildWatcher` otherwise). + (Contributed by Kumar Aditya in :gh:`94597`.) + +* :func:`asyncio.set_child_watcher`, :func:`asyncio.get_child_watcher`, + :meth:`asyncio.AbstractEventLoopPolicy.set_child_watcher` and + :meth:`asyncio.AbstractEventLoopPolicy.get_child_watcher` are deprecated + and will be removed in Python 3.14. + (Contributed by Kumar Aditya in :gh:`94597`.) + +* Add *loop_factory* parameter to :func:`asyncio.run` to allow specifying + a custom event loop factory. + (Contributed by Kumar Aditya in :gh:`99388`.) + + pathlib ------- @@ -109,6 +229,15 @@ pathlib all file or directory names within them, similar to :func:`os.walk`. (Contributed by Stanislav Zmiev in :gh:`90385`.) +* Add *walk_up* optional parameter to :meth:`pathlib.PurePath.relative_to` + to allow the insertion of ``..`` entries in the result; this behavior is + more consistent with :func:`os.path.relpath`. + (Contributed by Domenico Ragusa in :issue:`40358`.) + +* Add :meth:`pathlib.Path.is_junction` as a proxy to :func:`os.path.isjunction`. + (Contributed by Charles Machalow in :gh:`99547`.) + + dis --- @@ -127,6 +256,14 @@ os for a process with :func:`os.pidfd_open` in non-blocking mode. (Contributed by Kumar Aditya in :gh:`93312`.) +* Add :func:`os.path.isjunction` to check if a given path is a junction. + (Contributed by Charles Machalow in :gh:`99547`.) + +* :class:`os.DirEntry` now includes an :meth:`os.DirEntry.is_junction` + method to check if the entry is a junction. + (Contributed by Charles Machalow in :gh:`99547`.) + + shutil ------ @@ -143,6 +280,13 @@ sqlite3 * Add a :ref:`command-line interface `. (Contributed by Erlend E. Aasland in :gh:`77617`.) +* Add the :attr:`~sqlite3.Connection.autocommit` attribute + to :class:`~sqlite3.Connection` + and the *autocommit* parameter to :func:`~sqlite3.connect` + to control :pep:`249`-compliant + :ref:`transaction handling `. + (Contributed by Erlend E. Aasland in :gh:`83638`.) + threading --------- @@ -163,6 +307,19 @@ tempfile The :class:`tempfile.NamedTemporaryFile` function has a new optional parameter *delete_on_close* (Contributed by Evgeny Zorin in :gh:`58451`.) +sys +--- + +* Add :func:`sys.activate_stack_trampoline` and + :func:`sys.deactivate_stack_trampoline` for activating and deactivating + stack profiler trampolines, + and :func:`sys.is_stack_trampoline_active` for querying if stack profiler + trampolines are active. + (Contributed by Pablo Galindo and Christian Heimes + with contributions from Gregory P. Smith [Google] and Mark Shannon + in :gh:`96123`.) + + Optimizations ============= @@ -174,6 +331,11 @@ Optimizations process, which improves performance by 1-5%. (Contributed by Kevin Modzelewski in :gh:`90536`.) +* Speed up the regular expression substitution (functions :func:`re.sub` and + :func:`re.subn` and corresponding :class:`re.Pattern` methods) for + replacement strings containing group references by 2--3 times. + (Contributed by Serhiy Storchaka in :gh:`91524`.) + CPython bytecode changes ======================== @@ -280,8 +442,8 @@ Pending Removal in Python 3.14 * Creating :c:data:`immutable types ` with mutable bases using the C API. -* ``__package__`` will cease to be set or taken into consideration by - the import system (:gh:`97879`). +* ``__package__`` and ``__cached__`` will cease to be set or taken + into consideration by the import system (:gh:`97879`). Pending Removal in Future Versions @@ -305,6 +467,12 @@ although there is currently no date scheduled for their removal. Removed ======= +* Remove the ``distutils`` package. It was deprecated in Python 3.10 by + :pep:`632` "Deprecate distutils module". For projects still using + ``distutils`` and cannot be updated to something else, the ``setuptools`` + project can be installed: it still provides ``distutils``. + (Contributed by Victor Stinner in :gh:`92584`.) + * Removed many old deprecated :mod:`unittest` features: - A number of :class:`~unittest.TestCase` method aliases: @@ -370,10 +538,6 @@ Removed (Contributed by Erlend E. Aasland in :gh:`92548`.) -* The ``--experimental-isolated-subinterpreters`` configure flag - (and corresponding ``EXPERIMENTAL_ISOLATED_SUBINTERPRETERS``) - have been removed. - * ``smtpd`` has been removed according to the schedule in :pep:`594`, having been deprecated in Python 3.4.7 and 3.5.4. Use aiosmtpd_ PyPI module or any other @@ -382,6 +546,12 @@ Removed .. _aiosmtpd: https://pypi.org/project/aiosmtpd/ +* ``asynchat`` and ``asyncore`` have been removed + according to the schedule in :pep:`594`, + having been deprecated in Python 3.6. + Use :mod:`asyncio` instead. + (Contributed by Nikita Sobolev in :gh:`96580`.) + * Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python 3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) function is a built-in function. Since Python 3.10, :func:`_pyio.open` is @@ -443,6 +613,21 @@ Removed * ``importlib.util.set_package`` has been removed. (Contributed by Brett Cannon in :gh:`65961`.) +* Removed the ``suspicious`` rule from the documentation Makefile, and + removed ``Doc/tools/rstlint.py``, both in favor of `sphinx-lint + `_. + (Contributed by Julien Palard in :gh:`98179`.) + +* Remove the *keyfile*, *certfile* and *check_hostname* parameters, deprecated + since Python 3.6, in modules: :mod:`ftplib`, :mod:`http.client`, + :mod:`imaplib`, :mod:`poplib` and :mod:`smtplib`. Use the *context* parameter + (*ssl_context* in :mod:`imaplib`) instead. + (Contributed by Victor Stinner in :gh:`94172`.) + +* :mod:`ftplib`: Remove the ``FTP_TLS.ssl_version`` class attribute: use the + *context* parameter instead. + (Contributed by Victor Stinner in :gh:`94172`.) + Porting to Python 3.12 ====================== @@ -483,6 +668,11 @@ Changes in the Python API in Python 3.9. (Contributed by Victor Stinner in :gh:`94352`.) +* The :mod:`os` module no longer accepts bytes-like paths, like + :class:`bytearray` and :class:`memoryview` types: only the exact + :class:`bytes` type is accepted for bytes strings. + (Contributed by Victor Stinner in :gh:`98393`.) + Build Changes ============= @@ -502,6 +692,12 @@ Build Changes if the Clang compiler accepts the flag. (Contributed by Dong-hee Na in :gh:`89536`.) +* Add ``COMPILEALL_OPTS`` variable in Makefile to override :mod:`compileall` + options (default: ``-j0``) in ``make install``. Also merged the 3 + ``compileall`` commands into a single command to build .pyc files for all + optimization levels (0, 1, 2) at once. + (Contributed by Victor Stinner in :gh:`99289`.) + C API Changes ============= @@ -536,6 +732,18 @@ New Features ``__dict__`` and weakrefs with less bookkeeping, using less memory and with faster access. +* API for performing calls using + :ref:`the vectorcall protocol ` was added to the + :ref:`Limited API `: + + * :c:func:`PyObject_Vectorcall` + * :c:func:`PyObject_VectorcallMethod` + * :const:`PY_VECTORCALL_ARGUMENTS_OFFSET` + + This means that both the incoming and outgoing ends of the vector call + protocol are now available in the :ref:`Limited API `. (Contributed + by Wenzel Jakob in :gh:`98586`.) + * Added two new public functions, :c:func:`PyEval_SetProfileAllThreads` and :c:func:`PyEval_SetTraceAllThreads`, that allow to set tracing and profiling @@ -546,6 +754,21 @@ New Features which sets the vectorcall field of a given :c:type:`PyFunctionObject`. (Contributed by Andrew Frost in :gh:`92257`.) +* The C API now permits registering callbacks via :c:func:`PyDict_AddWatcher`, + :c:func:`PyDict_AddWatch` and related APIs to be called whenever a dictionary + is modified. This is intended for use by optimizing interpreters, JIT + compilers, or debuggers. + (Contributed by Carl Meyer in :gh:`91052`.) + +* Added :c:func:`PyType_AddWatcher` and :c:func:`PyType_Watch` API to register + callbacks to receive notification on changes to a type. + (Contributed by Carl Meyer in :gh:`91051`.) + + +* Add :c:func:`PyFrame_GetVar` and :c:func:`PyFrame_GetVarString` functions to + get a frame variable by its name. + (Contributed by Victor Stinner in :gh:`91248`.) + Porting to Python 3.12 ---------------------- @@ -588,12 +811,17 @@ Porting to Python 3.12 ``tp_weaklistoffset``, respectively. The use of ``tp_dictoffset`` and ``tp_weaklistoffset`` is still supported, but does not fully support multiple inheritance - (:gh: `95589`), and performance may be worse. + (:gh:`95589`), and performance may be worse. Classes declaring :const:`Py_TPFLAGS_MANAGED_DICT` should call :c:func:`_PyObject_VisitManagedDict` and :c:func:`_PyObject_ClearManagedDict` to traverse and clear their instance's dictionaries. To clear weakrefs, call :c:func:`PyObject_ClearWeakRefs`, as before. +* The :c:func:`PyUnicode_FSDecoder` function no longer accepts bytes-like + paths, like :class:`bytearray` and :class:`memoryview` types: only the exact + :class:`bytes` type is accepted for bytes strings. + (Contributed by Victor Stinner in :gh:`98393`.) + Deprecated ---------- @@ -628,6 +856,37 @@ Deprecated * Creating :c:data:`immutable types ` with mutable bases is deprecated and will be disabled in Python 3.14. +* The ``structmember.h`` header is deprecated, though it continues to be + available and there are no plans to remove it. + + Its contents are now available just by including ``Python.h``, + with a ``Py`` prefix added if it was missing: + + - :c:struct:`PyMemberDef`, :c:func:`PyMember_GetOne` and + :c:func:`PyMember_SetOne` + - Type macros like :c:macro:`Py_T_INT`, :c:macro:`Py_T_DOUBLE`, etc. + (previously ``T_INT``, ``T_DOUBLE``, etc.) + - The flags :c:macro:`Py_READONLY` (previously ``READONLY``) and + :c:macro:`Py_AUDIT_READ` (previously all uppercase) + + Several items are not exposed from ``Python.h``: + + - :c:macro:`T_OBJECT` (use :c:macro:`Py_T_OBJECT_EX`) + - :c:macro:`T_NONE` (previously undocumented, and pretty quirky) + - The macro ``WRITE_RESTRICTED`` which does nothing. + - The macros ``RESTRICTED`` and ``READ_RESTRICTED``, equivalents of + :c:macro:`Py_AUDIT_READ`. + - In some configurations, ```` is not included from ``Python.h``. + It should be included manually when using ``offsetof()``. + + The deprecated header continues to provide its original + contents under the original names. + Your old code can stay unchanged, unless the extra include and non-namespaced + macros bother you greatly. + + (Contributed in :gh:`47146` by Petr Viktorin, based on + earlier work by Alexander Belopolsky and Matthias Braun.) + Removed ------- @@ -652,3 +911,7 @@ Removed * Remove the ``PyUnicode_InternImmortal()`` function and the ``SSTATE_INTERNED_IMMORTAL`` macro. (Contributed by Victor Stinner in :gh:`85858`.) + +* Remove ``_use_broken_old_ctypes_structure_semantics_`` flag + from :mod:`ctypes` module. + (Contributed by Nikita Sobolev in :gh:`99285`.) diff --git a/Doc/whatsnew/3.2.rst b/Doc/whatsnew/3.2.rst index 65100b0be36a5a..6037db9f954d26 100644 --- a/Doc/whatsnew/3.2.rst +++ b/Doc/whatsnew/3.2.rst @@ -1746,7 +1746,7 @@ names. instead of module names for running specific tests (:issue:`10620`). The new test discovery can find tests within packages, locating any test importable from the top-level directory. The top-level directory can be specified with - the `-t` option, a pattern for matching files with ``-p``, and a directory to + the ``-t`` option, a pattern for matching files with ``-p``, and a directory to start discovery with ``-s``: .. code-block:: shell-session @@ -1857,7 +1857,7 @@ asyncore :class:`asyncore.dispatcher` now provides a :meth:`~asyncore.dispatcher.handle_accepted()` method -returning a `(sock, addr)` pair which is called when a connection has actually +returning a ``(sock, addr)`` pair which is called when a connection has actually been established with a new remote endpoint. This is supposed to be used as a replacement for old :meth:`~asyncore.dispatcher.handle_accept()` and avoids the user to call :meth:`~asyncore.dispatcher.accept()` directly. diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst index fef1a8ac4c0101..96a632577b2c56 100644 --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -2389,10 +2389,10 @@ Porting Python code :attr:`sys.path_importer_cache` where it represents the use of implicit finders, but semantically it should not change anything. -* :class:`importlib.abc.Finder` no longer specifies a `find_module()` abstract +* :class:`importlib.abc.Finder` no longer specifies a ``find_module()`` abstract method that must be implemented. If you were relying on subclasses to implement that method, make sure to check for the method's existence first. - You will probably want to check for `find_loader()` first, though, in the + You will probably want to check for ``find_loader()`` first, though, in the case of working with :term:`path entry finders `. * :mod:`pkgutil` has been converted to use :mod:`importlib` internally. This diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst index 2c83c7ba3ad3b0..f872579ef546f5 100644 --- a/Doc/whatsnew/3.5.rst +++ b/Doc/whatsnew/3.5.rst @@ -2469,11 +2469,11 @@ Changes in the Python API ``opt-`` tag in ``.pyc`` file names. The :func:`importlib.util.cache_from_source` has gained an *optimization* parameter to help control the ``opt-`` tag. Because of this, the - *debug_override* parameter of the function is now deprecated. `.pyo` files + *debug_override* parameter of the function is now deprecated. ``.pyo`` files are also no longer supported as a file argument to the Python interpreter and thus serve no purpose when distributed on their own (i.e. sourceless code distribution). Due to the fact that the magic number for bytecode has changed - in Python 3.5, all old `.pyo` files from previous versions of Python are + in Python 3.5, all old ``.pyo`` files from previous versions of Python are invalid regardless of this PEP. * The :mod:`socket` module now exports the :data:`~socket.CAN_RAW_FD_FRAMES` diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst index dfb56770ae7026..e4294c88b58572 100644 --- a/Doc/whatsnew/3.6.rst +++ b/Doc/whatsnew/3.6.rst @@ -960,8 +960,8 @@ contextlib The :class:`contextlib.AbstractContextManager` class has been added to provide an abstract base class for context managers. It provides a -sensible default implementation for `__enter__()` which returns -``self`` and leaves `__exit__()` an abstract method. A matching +sensible default implementation for ``__enter__()`` which returns +``self`` and leaves ``__exit__()`` an abstract method. A matching class has been added to the :mod:`typing` module as :class:`typing.ContextManager`. (Contributed by Brett Cannon in :issue:`25609`.) @@ -1388,7 +1388,7 @@ are treated as punctuation. site ---- -When specifying paths to add to :attr:`sys.path` in a `.pth` file, +When specifying paths to add to :attr:`sys.path` in a ``.pth`` file, you may now specify file paths on top of directories (e.g. zip files). (Contributed by Wolfgang Langner in :issue:`26587`). @@ -2052,6 +2052,8 @@ tkinter The :mod:`tkinter.tix` module is now deprecated. :mod:`tkinter` users should use :mod:`tkinter.ttk` instead. +.. _whatsnew36-venv: + venv ~~~~ diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst index f06cf29c713f99..df3b636cb9ec46 100644 --- a/Doc/whatsnew/3.7.rst +++ b/Doc/whatsnew/3.7.rst @@ -2497,7 +2497,7 @@ number of other issues). Some known details affected: * :c:func:`PySys_AddWarnOptionUnicode` is not currently usable by embedding applications due to the requirement to create a Unicode object prior to - calling `Py_Initialize`. Use :c:func:`PySys_AddWarnOption` instead. + calling ``Py_Initialize``. Use :c:func:`PySys_AddWarnOption` instead. * warnings filters added by an embedding application with :c:func:`PySys_AddWarnOption` should now more consistently take precedence diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 95aa6f7a8582ae..37a6cf24e54562 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -122,8 +122,8 @@ Positional-only parameters There is a new function parameter syntax ``/`` to indicate that some function parameters must be specified positionally and cannot be used as keyword arguments. This is the same notation shown by ``help()`` for C -functions annotated with Larry Hastings' `Argument Clinic -`_ tool. +functions annotated with Larry Hastings' +:ref:`Argument Clinic ` tool. In the following example, parameters *a* and *b* are positional-only, while *c* or *d* can be positional or keyword, and *e* or *f* are diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index ff01a65772991f..624e71f9254c45 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -500,7 +500,7 @@ Reedy in :issue:`40468`.) Move the indent space setting from the Font tab to the new Windows tab. (Contributed by Mark Roseman and Terry Jan Reedy in :issue:`33962`.) -Apply syntax highlighting to `.pyi` files. (Contributed by Alex +Apply syntax highlighting to ``.pyi`` files. (Contributed by Alex Waygood and Terry Jan Reedy in :issue:`45447`.) imaplib diff --git a/Grammar/python.gram b/Grammar/python.gram index 439f08a2cd131b..2498251293e80e 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -194,7 +194,10 @@ yield_stmt[stmt_ty]: y=yield_expr { _PyAST_Expr(y, EXTRA) } assert_stmt[stmt_ty]: 'assert' a=expression b=[',' z=expression { z }] { _PyAST_Assert(a, b, EXTRA) } -import_stmt[stmt_ty]: import_name | import_from +import_stmt[stmt_ty]: + | invalid_import + | import_name + | import_from # Import statements # ----------------- @@ -954,6 +957,7 @@ kwargs[asdl_seq*]: | ','.kwarg_or_double_starred+ starred_expression[expr_ty]: + | invalid_starred_expression | '*' a=expression { _PyAST_Starred(a, Load, EXTRA) } kwarg_or_starred[KeywordOrStarred*]: @@ -1080,6 +1084,8 @@ invalid_arguments: RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, _PyPegen_get_last_comprehension_item(PyPegen_last_item(b, comprehension_ty)), "Generator expression must be parenthesized") } | a=NAME b='=' expression for_if_clauses { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "invalid syntax. Maybe you meant '==' or ':=' instead of '='?")} + | (args ',')? a=NAME b='=' &(',' | ')') { + RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "expected argument value expression")} | a=args b=for_if_clauses { _PyPegen_nonparen_genexp_in_call(p, a, b) } | args ',' a=expression b=for_if_clauses { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, _PyPegen_get_last_comprehension_item(PyPegen_last_item(b, comprehension_ty)), "Generator expression must be parenthesized") } @@ -1092,6 +1098,8 @@ invalid_kwarg: | !(NAME '=') a=expression b='=' { RAISE_SYNTAX_ERROR_KNOWN_RANGE( a, b, "expression cannot contain assignment, perhaps you meant \"==\"?") } + | a='**' expression '=' b=expression { + RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "cannot assign to keyword argument unpacking") } # IMPORTANT: Note that the "_without_invalid" suffix causes the rule to not call invalid rules under it expression_without_invalid[expr_ty]: @@ -1230,6 +1238,10 @@ invalid_group: RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot use starred expression here") } | '(' a='**' expression ')' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot use double starred expression here") } +invalid_import: + | a='import' dotted_name 'from' dotted_name { + RAISE_SYNTAX_ERROR_STARTING_FROM(a, "Did you mean to use 'from ... import ...' instead?") } + invalid_import_from_targets: | import_from_as_names ',' NEWLINE { RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") } @@ -1247,8 +1259,10 @@ invalid_try_stmt: | a='try' ':' NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block after 'try' statement on line %d", a->lineno) } | 'try' ':' block !('except' | 'finally') { RAISE_SYNTAX_ERROR("expected 'except' or 'finally' block") } - | 'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block* { - RAISE_SYNTAX_ERROR("cannot have both 'except' and 'except*' on the same 'try'") } + | 'try' ':' block* except_block+ a='except' b='*' expression ['as' NAME] ':' { + RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "cannot have both 'except' and 'except*' on the same 'try'") } + | 'try' ':' block* except_star_block+ a='except' [expression ['as' NAME]] ':' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot have both 'except' and 'except*' on the same 'try'") } invalid_except_stmt: | 'except' '*'? a=expression ',' expressions ['as' NAME ] ':' { RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized") } @@ -1319,3 +1333,5 @@ invalid_kvpair: RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, a->lineno, a->end_col_offset - 1, a->end_lineno, -1, "':' expected after dictionary key") } | expression ':' a='*' bitwise_or { RAISE_SYNTAX_ERROR_STARTING_FROM(a, "cannot use a starred expression in a dictionary value") } | expression a=':' &('}'|',') {RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "expression expected after dictionary key and ':'") } +invalid_starred_expression: + | a='*' expression '=' b=expression { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "cannot assign to iterable argument unpacking") } diff --git a/Include/abstract.h b/Include/abstract.h index 784ff7e928676f..064b0300b51ea2 100644 --- a/Include/abstract.h +++ b/Include/abstract.h @@ -238,6 +238,22 @@ PyAPI_FUNC(Py_ssize_t) PyVectorcall_NARGS(size_t nargsf); "tuple" and keyword arguments "dict". "dict" may also be NULL */ PyAPI_FUNC(PyObject *) PyVectorcall_Call(PyObject *callable, PyObject *tuple, PyObject *dict); +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030C0000 +#define PY_VECTORCALL_ARGUMENTS_OFFSET \ + (_Py_STATIC_CAST(size_t, 1) << (8 * sizeof(size_t) - 1)) + +/* Perform a PEP 590-style vector call on 'callable' */ +PyAPI_FUNC(PyObject *) PyObject_Vectorcall( + PyObject *callable, + PyObject *const *args, + size_t nargsf, + PyObject *kwnames); + +/* Call the method 'name' on args[0] with arguments in args[1..nargsf-1]. */ +PyAPI_FUNC(PyObject *) PyObject_VectorcallMethod( + PyObject *name, PyObject *const *args, + size_t nargsf, PyObject *kwnames); +#endif /* Implemented elsewhere: diff --git a/Include/cpython/abstract.h b/Include/cpython/abstract.h index 6da29cde9f6092..3b27aab2fc4798 100644 --- a/Include/cpython/abstract.h +++ b/Include/cpython/abstract.h @@ -50,9 +50,6 @@ PyAPI_FUNC(PyObject *) _PyObject_MakeTpCall( PyObject *const *args, Py_ssize_t nargs, PyObject *keywords); -#define PY_VECTORCALL_ARGUMENTS_OFFSET \ - (_Py_STATIC_CAST(size_t, 1) << (8 * sizeof(size_t) - 1)) - // PyVectorcall_NARGS() is exported as a function for the stable ABI. // Here (when we are not using the stable ABI), the name is overridden to // call a static inline function for best performance. @@ -65,12 +62,6 @@ _PyVectorcall_NARGS(size_t n) PyAPI_FUNC(vectorcallfunc) PyVectorcall_Function(PyObject *callable); -PyAPI_FUNC(PyObject *) PyObject_Vectorcall( - PyObject *callable, - PyObject *const *args, - size_t nargsf, - PyObject *kwnames); - // Backwards compatibility aliases for API that was provisional in Python 3.8 #define _PyObject_Vectorcall PyObject_Vectorcall #define _PyObject_VectorcallMethod PyObject_VectorcallMethod @@ -96,10 +87,6 @@ PyAPI_FUNC(PyObject *) _PyObject_FastCall( PyAPI_FUNC(PyObject *) PyObject_CallOneArg(PyObject *func, PyObject *arg); -PyAPI_FUNC(PyObject *) PyObject_VectorcallMethod( - PyObject *name, PyObject *const *args, - size_t nargsf, PyObject *kwnames); - static inline PyObject * PyObject_CallMethodNoArgs(PyObject *self, PyObject *name) { diff --git a/Include/cpython/code.h b/Include/cpython/code.h index 7ce69022557af0..ebac0b12a461bc 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -32,6 +32,13 @@ typedef uint16_t _Py_CODEUNIT; #define _Py_SET_OPCODE(word, opcode) \ do { ((unsigned char *)&(word))[0] = (opcode); } while (0) +typedef struct { + PyObject *_co_code; + PyObject *_co_varnames; + PyObject *_co_cellvars; + PyObject *_co_freevars; +} _PyCoCached; + // To avoid repeating ourselves in deepfreeze.py, all PyCodeObject members are // defined in this macro: #define _PyCode_DEF(SIZE) { \ @@ -63,7 +70,6 @@ typedef uint16_t _Py_CODEUNIT; PyObject *co_exceptiontable; /* Byte string encoding exception handling \ table */ \ int co_flags; /* CO_..., see below */ \ - short co_warmup; /* Warmup counter for quickening */ \ short _co_linearray_entry_size; /* Size of each entry in _co_linearray */ \ \ /* The rest are not so impactful on performance. */ \ @@ -90,7 +96,7 @@ typedef uint16_t _Py_CODEUNIT; PyObject *co_qualname; /* unicode (qualname, for reference) */ \ PyObject *co_linetable; /* bytes object that holds location info */ \ PyObject *co_weakreflist; /* to support weakrefs to code objects */ \ - PyObject *_co_code; /* cached co_code object/attribute */ \ + _PyCoCached *_co_cached; /* cached co_* attributes */ \ int _co_firsttraceable; /* index of first traceable instruction */ \ char *_co_linearray; /* array of line offsets */ \ /* Scratch space for extra data relating to the code object. \ diff --git a/Include/cpython/compile.h b/Include/cpython/compile.h index 518a3764992954..f5a62a8ec6dd0c 100644 --- a/Include/cpython/compile.h +++ b/Include/cpython/compile.h @@ -31,11 +31,26 @@ typedef struct { #define _PyCompilerFlags_INIT \ (PyCompilerFlags){.cf_flags = 0, .cf_feature_version = PY_MINOR_VERSION} +/* source location information */ +typedef struct { + int lineno; + int end_lineno; + int col_offset; + int end_col_offset; +} _PyCompilerSrcLocation; + +#define SRC_LOCATION_FROM_AST(n) \ + (_PyCompilerSrcLocation){ \ + .lineno = (n)->lineno, \ + .end_lineno = (n)->end_lineno, \ + .col_offset = (n)->col_offset, \ + .end_col_offset = (n)->end_col_offset } + /* Future feature support */ typedef struct { - int ff_features; /* flags set by future statements */ - int ff_lineno; /* line number of last future statement */ + int ff_features; /* flags set by future statements */ + _PyCompilerSrcLocation ff_location; /* location of last future statement */ } PyFutureFeatures; #define FUTURE_NESTED_SCOPES "nested_scopes" diff --git a/Include/cpython/dictobject.h b/Include/cpython/dictobject.h index 565ad791a6cb28..2dff59ef0b8a6b 100644 --- a/Include/cpython/dictobject.h +++ b/Include/cpython/dictobject.h @@ -83,3 +83,27 @@ typedef struct { PyAPI_FUNC(PyObject *) _PyDictView_New(PyObject *, PyTypeObject *); PyAPI_FUNC(PyObject *) _PyDictView_Intersect(PyObject* self, PyObject *other); + +/* Dictionary watchers */ + +typedef enum { + PyDict_EVENT_ADDED, + PyDict_EVENT_MODIFIED, + PyDict_EVENT_DELETED, + PyDict_EVENT_CLONED, + PyDict_EVENT_CLEARED, + PyDict_EVENT_DEALLOCATED, +} PyDict_WatchEvent; + +// Callback to be invoked when a watched dict is cleared, dealloced, or modified. +// In clear/dealloc case, key and new_value will be NULL. Otherwise, new_value will be the +// new value for key, NULL if key is being deleted. +typedef int(*PyDict_WatchCallback)(PyDict_WatchEvent event, PyObject* dict, PyObject* key, PyObject* new_value); + +// Register/unregister a dict-watcher callback +PyAPI_FUNC(int) PyDict_AddWatcher(PyDict_WatchCallback callback); +PyAPI_FUNC(int) PyDict_ClearWatcher(int watcher_id); + +// Mark given dictionary as "watched" (callback will be called if it is modified) +PyAPI_FUNC(int) PyDict_Watch(int watcher_id, PyObject* dict); +PyAPI_FUNC(int) PyDict_Unwatch(int watcher_id, PyObject* dict); diff --git a/Include/cpython/funcobject.h b/Include/cpython/funcobject.h index dd8f20b2c20b39..5979febc2e3421 100644 --- a/Include/cpython/funcobject.h +++ b/Include/cpython/funcobject.h @@ -131,6 +131,55 @@ PyAPI_DATA(PyTypeObject) PyStaticMethod_Type; PyAPI_FUNC(PyObject *) PyClassMethod_New(PyObject *); PyAPI_FUNC(PyObject *) PyStaticMethod_New(PyObject *); +#define FOREACH_FUNC_EVENT(V) \ + V(CREATE) \ + V(DESTROY) \ + V(MODIFY_CODE) \ + V(MODIFY_DEFAULTS) \ + V(MODIFY_KWDEFAULTS) + +typedef enum { + #define DEF_EVENT(EVENT) PyFunction_EVENT_##EVENT, + FOREACH_FUNC_EVENT(DEF_EVENT) + #undef DEF_EVENT +} PyFunction_WatchEvent; + +/* + * A callback that is invoked for different events in a function's lifecycle. + * + * The callback is invoked with a borrowed reference to func, after it is + * created and before it is modified or destroyed. The callback should not + * modify func. + * + * When a function's code object, defaults, or kwdefaults are modified the + * callback will be invoked with the respective event and new_value will + * contain a borrowed reference to the new value that is about to be stored in + * the function. Otherwise the third argument is NULL. + * + * If the callback returns with an exception set, it must return -1. Otherwise + * it should return 0. + */ +typedef int (*PyFunction_WatchCallback)( + PyFunction_WatchEvent event, + PyFunctionObject *func, + PyObject *new_value); + +/* + * Register a per-interpreter callback that will be invoked for function lifecycle + * events. + * + * Returns a handle that may be passed to PyFunction_ClearWatcher on success, + * or -1 and sets an error if no more handles are available. + */ +PyAPI_FUNC(int) PyFunction_AddWatcher(PyFunction_WatchCallback callback); + +/* + * Clear the watcher associated with the watcher_id handle. + * + * Returns 0 on success or -1 if no watcher exists for the supplied id. + */ +PyAPI_FUNC(int) PyFunction_ClearWatcher(int watcher_id); + #ifdef __cplusplus } #endif diff --git a/Include/cpython/import.h b/Include/cpython/import.h index a69b4f34def342..a58801b47f1bec 100644 --- a/Include/cpython/import.h +++ b/Include/cpython/import.h @@ -25,6 +25,7 @@ struct _inittab { const char *name; /* ASCII encoded string */ PyObject* (*initfunc)(void); }; +// This is not used after Py_Initialize() is called. PyAPI_DATA(struct _inittab *) PyImport_Inittab; PyAPI_FUNC(int) PyImport_ExtendInittab(struct _inittab *newtab); diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index c22c8d52b4f2ec..6ce42b4c09502f 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -213,10 +213,6 @@ typedef struct PyConfig { // If equal to 0, stop Python initialization before the "main" phase. int _init_main; - // If non-zero, disallow threads, subprocesses, and fork. - // Default: 0. - int _isolated_interpreter; - // If non-zero, we believe we're running from a source tree. int _is_python_build; } PyConfig; @@ -245,6 +241,31 @@ PyAPI_FUNC(PyStatus) PyConfig_SetWideStringList(PyConfig *config, Py_ssize_t length, wchar_t **items); +/* --- PyInterpreterConfig ------------------------------------ */ + +typedef struct { + int allow_fork; + int allow_exec; + int allow_threads; + int allow_daemon_threads; +} _PyInterpreterConfig; + +#define _PyInterpreterConfig_INIT \ + { \ + .allow_fork = 0, \ + .allow_exec = 0, \ + .allow_threads = 1, \ + .allow_daemon_threads = 0, \ + } + +#define _PyInterpreterConfig_LEGACY_INIT \ + { \ + .allow_fork = 1, \ + .allow_exec = 1, \ + .allow_threads = 1, \ + .allow_daemon_threads = 1, \ + } + /* --- Helper functions --------------------------------------- */ /* Get the original command line arguments, before Python modified them. diff --git a/Include/cpython/memoryobject.h b/Include/cpython/memoryobject.h new file mode 100644 index 00000000000000..e2a1e168e463b8 --- /dev/null +++ b/Include/cpython/memoryobject.h @@ -0,0 +1,42 @@ +#ifndef Py_CPYTHON_MEMORYOBJECT_H +# error "this header file must not be included directly" +#endif + +PyAPI_DATA(PyTypeObject) _PyManagedBuffer_Type; + +/* The structs are declared here so that macros can work, but they shouldn't + be considered public. Don't access their fields directly, use the macros + and functions instead! */ +#define _Py_MANAGED_BUFFER_RELEASED 0x001 /* access to exporter blocked */ +#define _Py_MANAGED_BUFFER_FREE_FORMAT 0x002 /* free format */ + +typedef struct { + PyObject_HEAD + int flags; /* state flags */ + Py_ssize_t exports; /* number of direct memoryview exports */ + Py_buffer master; /* snapshot buffer obtained from the original exporter */ +} _PyManagedBufferObject; + + +/* memoryview state flags */ +#define _Py_MEMORYVIEW_RELEASED 0x001 /* access to master buffer blocked */ +#define _Py_MEMORYVIEW_C 0x002 /* C-contiguous layout */ +#define _Py_MEMORYVIEW_FORTRAN 0x004 /* Fortran contiguous layout */ +#define _Py_MEMORYVIEW_SCALAR 0x008 /* scalar: ndim = 0 */ +#define _Py_MEMORYVIEW_PIL 0x010 /* PIL-style layout */ + +typedef struct { + PyObject_VAR_HEAD + _PyManagedBufferObject *mbuf; /* managed buffer */ + Py_hash_t hash; /* hash value for read-only views */ + int flags; /* state flags */ + Py_ssize_t exports; /* number of buffer re-exports */ + Py_buffer view; /* private copy of the exporter's view */ + PyObject *weakreflist; + Py_ssize_t ob_array[1]; /* shape, strides, suboffsets */ +} PyMemoryViewObject; + +/* Get a pointer to the memoryview's private copy of the exporter's buffer. */ +#define PyMemoryView_GET_BUFFER(op) (&((PyMemoryViewObject *)(op))->view) +/* Get a pointer to the exporting object (this may be NULL!). */ +#define PyMemoryView_GET_BASE(op) (((PyMemoryViewObject *)(op))->view.obj) diff --git a/Include/cpython/modsupport.h b/Include/cpython/modsupport.h index d8458923b3fab8..88f34fe7513bf2 100644 --- a/Include/cpython/modsupport.h +++ b/Include/cpython/modsupport.h @@ -106,5 +106,3 @@ PyAPI_FUNC(PyObject * const *) _PyArg_UnpackKeywordsWithVararg( (minpos), (maxpos), (minkw), (buf))) PyAPI_FUNC(PyObject *) _PyModule_CreateInitialized(PyModuleDef*, int apiver); - -PyAPI_DATA(const char *) _Py_PackageContext; diff --git a/Include/cpython/object.h b/Include/cpython/object.h index c80fc1df0e0ba4..3abfcb7d44f0fb 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -41,7 +41,7 @@ typedef struct _Py_Identifier { Py_ssize_t index; } _Py_Identifier; -#if defined(NEEDS_PY_IDENTIFIER) || !defined(Py_BUILD_CORE) +#ifndef Py_BUILD_CORE // For now we are keeping _Py_IDENTIFIER for continued use // in non-builtin extensions (and naughty PyPI modules). @@ -49,11 +49,7 @@ typedef struct _Py_Identifier { #define _Py_static_string(varname, value) static _Py_Identifier varname = _Py_static_string_init(value) #define _Py_IDENTIFIER(varname) _Py_static_string(PyId_##varname, #varname) -#endif /* NEEDS_PY_IDENTIFIER */ - -typedef int (*getbufferproc)(PyObject *, Py_buffer *, int); -typedef void (*releasebufferproc)(PyObject *, Py_buffer *); - +#endif /* !Py_BUILD_CORE */ typedef struct { /* Number implementations must check *both* @@ -224,6 +220,9 @@ struct _typeobject { destructor tp_finalize; vectorcallfunc tp_vectorcall; + + /* bitset of which type-watchers care about this type */ + char tp_watched; }; /* This struct is used by the specializer @@ -510,3 +509,11 @@ Py_DEPRECATED(3.11) typedef int UsingDeprecatedTrashcanMacro; PyAPI_FUNC(int) _PyObject_VisitManagedDict(PyObject *obj, visitproc visit, void *arg); PyAPI_FUNC(void) _PyObject_ClearManagedDict(PyObject *obj); + +#define TYPE_MAX_WATCHERS 8 + +typedef int(*PyType_WatchCallback)(PyTypeObject *); +PyAPI_FUNC(int) PyType_AddWatcher(PyType_WatchCallback callback); +PyAPI_FUNC(int) PyType_ClearWatcher(int watcher_id); +PyAPI_FUNC(int) PyType_Watch(int watcher_id, PyObject *type); +PyAPI_FUNC(int) PyType_Unwatch(int watcher_id, PyObject *type); diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index f33d3caaa2082e..141341667795e8 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -37,6 +37,7 @@ typedef struct { PyObject *msg; PyObject *name; PyObject *path; + PyObject *name_from; } PyImportErrorObject; typedef struct { @@ -176,4 +177,11 @@ PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalErrorFormat( const char *format, ...); +extern PyObject *_PyErr_SetImportErrorWithNameFrom( + PyObject *, + PyObject *, + PyObject *, + PyObject *); + + #define Py_FatalError(message) _Py_FatalErrorFunc(__func__, (message)) diff --git a/Include/cpython/pyframe.h b/Include/cpython/pyframe.h index 1dc634ccee9a27..6ec292718aff1a 100644 --- a/Include/cpython/pyframe.h +++ b/Include/cpython/pyframe.h @@ -14,4 +14,5 @@ PyAPI_FUNC(PyObject *) PyFrame_GetBuiltins(PyFrameObject *frame); PyAPI_FUNC(PyObject *) PyFrame_GetGenerator(PyFrameObject *frame); PyAPI_FUNC(int) PyFrame_GetLasti(PyFrameObject *frame); - +PyAPI_FUNC(PyObject*) PyFrame_GetVar(PyFrameObject *frame, PyObject *name); +PyAPI_FUNC(PyObject*) PyFrame_GetVarString(PyFrameObject *frame, const char *name); diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h index bb5b07ef5901c8..e1f83acbffc360 100644 --- a/Include/cpython/pylifecycle.h +++ b/Include/cpython/pylifecycle.h @@ -62,4 +62,5 @@ PyAPI_FUNC(int) _Py_CoerceLegacyLocale(int warn); PyAPI_FUNC(int) _Py_LegacyLocaleDetected(int warn); PyAPI_FUNC(char *) _Py_SetLocaleFromEnv(int category); -PyAPI_FUNC(PyThreadState *) _Py_NewInterpreter(int isolated_subinterpreter); +PyAPI_FUNC(PyThreadState *) _Py_NewInterpreterFromConfig( + const _PyInterpreterConfig *); diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 7722a384cbfa01..c51542bcc895cb 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -3,11 +3,39 @@ #endif +/* +Runtime Feature Flags + +Each flag indicate whether or not a specific runtime feature +is available in a given context. For example, forking the process +might not be allowed in the current interpreter (i.e. os.fork() would fail). +*/ + +/* Set if threads are allowed. */ +#define Py_RTFLAGS_THREADS (1UL << 10) + +/* Set if daemon threads are allowed. */ +#define Py_RTFLAGS_DAEMON_THREADS (1UL << 11) + +/* Set if os.fork() is allowed. */ +#define Py_RTFLAGS_FORK (1UL << 15) + +/* Set if os.exec*() is allowed. */ +#define Py_RTFLAGS_EXEC (1UL << 16) + + +PyAPI_FUNC(int) _PyInterpreterState_HasFeature(PyInterpreterState *interp, + unsigned long feature); + + +/* private interpreter helpers */ + PyAPI_FUNC(int) _PyInterpreterState_RequiresIDRef(PyInterpreterState *); PyAPI_FUNC(void) _PyInterpreterState_RequireIDRef(PyInterpreterState *, int); PyAPI_FUNC(PyObject *) _PyInterpreterState_GetMainModule(PyInterpreterState *); + /* State unique per thread */ /* Py_tracefunc return -1 when raising an exception, or 0 for success. */ @@ -92,9 +120,6 @@ struct _ts { after allocation. */ int _initialized; - /* Was this thread state statically allocated? */ - int _static; - int py_recursion_remaining; int py_recursion_limit; diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 3ca6ace24c5f74..86eeab67275ec8 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -135,7 +135,7 @@ typedef struct { unsigned int ascii:1; /* Padding to ensure that PyUnicode_DATA() is always aligned to 4 bytes (see issue #19537 on m68k). */ - unsigned int :25; + unsigned int :26; } state; } PyASCIIObject; @@ -945,7 +945,7 @@ PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*); and where the hash values are equal (i.e. a very probable match) */ PyAPI_FUNC(int) _PyUnicode_EQ(PyObject *, PyObject *); -/* Equality check. Returns -1 on failure. */ +/* Equality check. */ PyAPI_FUNC(int) _PyUnicode_Equal(PyObject *, PyObject *); PyAPI_FUNC(int) _PyUnicode_WideCharString_Converter(PyObject *, void *); diff --git a/Include/descrobject.h b/Include/descrobject.h index 77f221df07714f..0a420b865dfd1b 100644 --- a/Include/descrobject.h +++ b/Include/descrobject.h @@ -32,6 +32,61 @@ PyAPI_FUNC(PyObject *) PyDescr_NewGetSet(PyTypeObject *, PyGetSetDef *); PyAPI_FUNC(PyObject *) PyDictProxy_New(PyObject *); PyAPI_FUNC(PyObject *) PyWrapper_New(PyObject *, PyObject *); + +/* An array of PyMemberDef structures defines the name, type and offset + of selected members of a C structure. These can be read by + PyMember_GetOne() and set by PyMember_SetOne() (except if their READONLY + flag is set). The array must be terminated with an entry whose name + pointer is NULL. */ +struct PyMemberDef { + const char *name; + int type; + Py_ssize_t offset; + int flags; + const char *doc; +}; + +// These constants used to be in structmember.h, not prefixed by Py_. +// (structmember.h now has aliases to the new names.) + +/* Types */ +#define Py_T_SHORT 0 +#define Py_T_INT 1 +#define Py_T_LONG 2 +#define Py_T_FLOAT 3 +#define Py_T_DOUBLE 4 +#define Py_T_STRING 5 +#define _Py_T_OBJECT 6 // Deprecated, use Py_T_OBJECT_EX instead +/* the ordering here is weird for binary compatibility */ +#define Py_T_CHAR 7 /* 1-character string */ +#define Py_T_BYTE 8 /* 8-bit signed int */ +/* unsigned variants: */ +#define Py_T_UBYTE 9 +#define Py_T_USHORT 10 +#define Py_T_UINT 11 +#define Py_T_ULONG 12 + +/* Added by Jack: strings contained in the structure */ +#define Py_T_STRING_INPLACE 13 + +/* Added by Lillo: bools contained in the structure (assumed char) */ +#define Py_T_BOOL 14 + +#define Py_T_OBJECT_EX 16 +#define Py_T_LONGLONG 17 +#define Py_T_ULONGLONG 18 + +#define Py_T_PYSSIZET 19 /* Py_ssize_t */ +#define _Py_T_NONE 20 // Deprecated. Value is always None. + +/* Flags */ +#define Py_READONLY 1 +#define Py_AUDIT_READ 2 // Added in 3.10, harmless no-op before that +#define _Py_WRITE_RESTRICTED 4 // Deprecated, no-op. Do not reuse the value. + +PyAPI_FUNC(PyObject *) PyMember_GetOne(const char *, PyMemberDef *); +PyAPI_FUNC(int) PyMember_SetOne(char *, PyMemberDef *, PyObject *); + #ifndef Py_LIMITED_API # define Py_CPYTHON_DESCROBJECT_H # include "cpython/descrobject.h" diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index bf5945435c1774..80c1bfb6c9afa2 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -91,28 +91,6 @@ typedef struct { #define INLINE_CACHE_ENTRIES_FOR_ITER CACHE_ENTRIES(_PyForIterCache) -#define QUICKENING_WARMUP_DELAY 8 - -/* We want to compare to zero for efficiency, so we offset values accordingly */ -#define QUICKENING_INITIAL_WARMUP_VALUE (-QUICKENING_WARMUP_DELAY) - -void _PyCode_Quicken(PyCodeObject *code); - -static inline void -_PyCode_Warmup(PyCodeObject *code) -{ - if (code->co_warmup != 0) { - code->co_warmup++; - if (code->co_warmup == 0) { - _PyCode_Quicken(code); - } - } -} - -extern uint8_t _PyOpcode_Adaptive[256]; - -extern Py_ssize_t _Py_QuickenedCount; - // Borrowed references to common callables: struct callable_cache { PyObject *isinstance; @@ -235,27 +213,30 @@ extern int _PyLineTable_PreviousAddressRange(PyCodeAddressRange *range); /* Specialization functions */ -extern int _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, - PyObject *name); -extern int _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, +extern void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name); -extern int _Py_Specialize_LoadGlobal(PyObject *globals, PyObject *builtins, _Py_CODEUNIT *instr, PyObject *name); -extern int _Py_Specialize_BinarySubscr(PyObject *sub, PyObject *container, _Py_CODEUNIT *instr); -extern int _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *instr); -extern int _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, - int nargs, PyObject *kwnames); +extern void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, + PyObject *name); +extern void _Py_Specialize_LoadGlobal(PyObject *globals, PyObject *builtins, + _Py_CODEUNIT *instr, PyObject *name); +extern void _Py_Specialize_BinarySubscr(PyObject *sub, PyObject *container, + _Py_CODEUNIT *instr); +extern void _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, + _Py_CODEUNIT *instr); +extern void _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, + int nargs, PyObject *kwnames); extern void _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, int oparg, PyObject **locals); extern void _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, int oparg); extern void _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg); -extern void _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr); +extern void _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg); -/* Deallocator function for static codeobjects used in deepfreeze.py */ -extern void _PyStaticCode_Dealloc(PyCodeObject *co); -/* Function to intern strings of codeobjects */ -extern int _PyStaticCode_InternStrings(PyCodeObject *co); +/* Finalizer function for static codeobjects used in deepfreeze.py */ +extern void _PyStaticCode_Fini(PyCodeObject *co); +/* Function to intern strings of codeobjects and quicken the bytecode */ +extern int _PyStaticCode_Init(PyCodeObject *co); #ifdef Py_STATS @@ -312,6 +293,12 @@ write_obj(uint16_t *p, PyObject *val) memcpy(p, &val, sizeof(val)); } +static inline uint16_t +read_u16(uint16_t *p) +{ + return *p; +} + static inline uint32_t read_u32(uint16_t *p) { @@ -397,8 +384,22 @@ write_location_entry_start(uint8_t *ptr, int code, int length) /* With a 16-bit counter, we have 12 bits for the counter value, and 4 bits for the backoff */ #define ADAPTIVE_BACKOFF_BITS 4 -/* The initial counter value is 31 == 2**ADAPTIVE_BACKOFF_START - 1 */ -#define ADAPTIVE_BACKOFF_START 5 + +// A value of 1 means that we attempt to specialize the *second* time each +// instruction is executed. Executing twice is a much better indicator of +// "hotness" than executing once, but additional warmup delays only prevent +// specialization. Most types stabilize by the second execution, too: +#define ADAPTIVE_WARMUP_VALUE 1 +#define ADAPTIVE_WARMUP_BACKOFF 1 + +// A value of 52 means that we attempt to re-specialize after 53 misses (a prime +// number, useful for avoiding artifacts if every nth value is a different type +// or something). Setting the backoff to 0 means that the counter is reset to +// the same state as a warming-up instruction (value == 1, backoff == 1) after +// deoptimization. This isn't strictly necessary, but it is bit easier to reason +// about when thinking about the opcode transitions as a state machine: +#define ADAPTIVE_COOLDOWN_VALUE 52 +#define ADAPTIVE_COOLDOWN_BACKOFF 0 #define MAX_BACKOFF_VALUE (16 - ADAPTIVE_BACKOFF_BITS) @@ -410,9 +411,15 @@ adaptive_counter_bits(int value, int backoff) { } static inline uint16_t -adaptive_counter_start(void) { - unsigned int value = (1 << ADAPTIVE_BACKOFF_START) - 1; - return adaptive_counter_bits(value, ADAPTIVE_BACKOFF_START); +adaptive_counter_warmup(void) { + return adaptive_counter_bits(ADAPTIVE_WARMUP_VALUE, + ADAPTIVE_WARMUP_BACKOFF); +} + +static inline uint16_t +adaptive_counter_cooldown(void) { + return adaptive_counter_bits(ADAPTIVE_COOLDOWN_VALUE, + ADAPTIVE_COOLDOWN_BACKOFF); } static inline uint16_t @@ -455,6 +462,16 @@ _PyCode_LineNumberFromArray(PyCodeObject *co, int index) } } +typedef struct _PyShimCodeDef { + const uint8_t *code; + int codelen; + int stacksize; + const char *cname; +} _PyShimCodeDef; + +extern PyCodeObject * +_Py_MakeShimCode(const _PyShimCodeDef *code); + #ifdef __cplusplus } diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index 1a628a08ca4ebf..967fe92a5bc2b2 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -18,10 +18,11 @@ PyAPI_FUNC(PyCodeObject*) _PyAST_Compile( PyCompilerFlags *flags, int optimize, struct _arena *arena); -extern PyFutureFeatures* _PyFuture_FromAST( + +int _PyFuture_FromAST( struct _mod * mod, - PyObject *filename - ); + PyObject *filename, + PyFutureFeatures* futures); extern PyObject* _Py_Mangle(PyObject *p, PyObject *name); @@ -39,6 +40,13 @@ extern int _PyAST_Optimize( _PyASTOptimizeState *state); /* Access compiler internals for unit testing */ + +PyAPI_FUNC(PyObject*) _PyCompile_CodeGen( + PyObject *ast, + PyObject *filename, + PyCompilerFlags *flags, + int optimize); + PyAPI_FUNC(PyObject*) _PyCompile_OptimizeCfg( PyObject *instructions, PyObject *consts); diff --git a/Include/internal/pycore_context.h b/Include/internal/pycore_context.h index 1bf4e8f3ee532e..52dfe3ef233874 100644 --- a/Include/internal/pycore_context.h +++ b/Include/internal/pycore_context.h @@ -18,6 +18,10 @@ void _PyContext_Fini(PyInterpreterState *); /* other API */ +typedef struct { + PyObject_HEAD +} _PyContextTokenMissing; + #ifndef WITH_FREELISTS // without freelists # define PyContext_MAXFREELIST 0 diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index 464092996cae00..25bd3bffb2e7aa 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -9,6 +9,9 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_dict_state.h" +#include "pycore_runtime.h" // _PyRuntime + /* runtime lifecycle */ @@ -17,25 +20,6 @@ extern void _PyDict_Fini(PyInterpreterState *interp); /* other API */ -#ifndef WITH_FREELISTS -// without freelists -# define PyDict_MAXFREELIST 0 -#endif - -#ifndef PyDict_MAXFREELIST -# define PyDict_MAXFREELIST 80 -#endif - -struct _Py_dict_state { -#if PyDict_MAXFREELIST > 0 - /* Dictionary reuse scheme to save calls to malloc and free */ - PyDictObject *free_list[PyDict_MAXFREELIST]; - int numfree; - PyDictKeysObject *keys_free_list[PyDict_MAXFREELIST]; - int keys_numfree; -#endif -}; - typedef struct { /* Cached hash code of me_key. */ Py_hash_t me_hash; @@ -152,9 +136,32 @@ struct _dictvalues { (PyDictUnicodeEntry*)(&((int8_t*)((dk)->dk_indices))[(size_t)1 << (dk)->dk_log2_index_bytes])) #define DK_IS_UNICODE(dk) ((dk)->dk_kind != DICT_KEYS_GENERAL) -extern uint64_t _pydict_global_version; +#define DICT_VERSION_INCREMENT (1 << DICT_MAX_WATCHERS) +#define DICT_VERSION_MASK (DICT_VERSION_INCREMENT - 1) + +#define DICT_NEXT_VERSION() \ + (_PyRuntime.dict_state.global_version += DICT_VERSION_INCREMENT) -#define DICT_NEXT_VERSION() (++_pydict_global_version) +void +_PyDict_SendEvent(int watcher_bits, + PyDict_WatchEvent event, + PyDictObject *mp, + PyObject *key, + PyObject *value); + +static inline uint64_t +_PyDict_NotifyEvent(PyDict_WatchEvent event, + PyDictObject *mp, + PyObject *key, + PyObject *value) +{ + int watcher_bits = mp->ma_version_tag & DICT_VERSION_MASK; + if (watcher_bits) { + _PyDict_SendEvent(watcher_bits, event, mp, key, value); + return DICT_NEXT_VERSION() | watcher_bits; + } + return DICT_NEXT_VERSION(); +} extern PyObject *_PyObject_MakeDictFromInstanceAttributes(PyObject *obj, PyDictValues *values); extern PyObject *_PyDict_FromItems( diff --git a/Include/internal/pycore_dict_state.h b/Include/internal/pycore_dict_state.h new file mode 100644 index 00000000000000..77375ea8beb877 --- /dev/null +++ b/Include/internal/pycore_dict_state.h @@ -0,0 +1,47 @@ +#ifndef Py_INTERNAL_DICT_STATE_H +#define Py_INTERNAL_DICT_STATE_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +struct _Py_dict_runtime_state { + /*Global counter used to set ma_version_tag field of dictionary. + * It is incremented each time that a dictionary is created and each + * time that a dictionary is modified. */ + uint64_t global_version; + uint32_t next_keys_version; +}; + + +#ifndef WITH_FREELISTS +// without freelists +# define PyDict_MAXFREELIST 0 +#endif + +#ifndef PyDict_MAXFREELIST +# define PyDict_MAXFREELIST 80 +#endif + +#define DICT_MAX_WATCHERS 8 + +struct _Py_dict_state { +#if PyDict_MAXFREELIST > 0 + /* Dictionary reuse scheme to save calls to malloc and free */ + PyDictObject *free_list[PyDict_MAXFREELIST]; + PyDictKeysObject *keys_free_list[PyDict_MAXFREELIST]; + int numfree; + int keys_numfree; +#endif + PyDict_WatchCallback watchers[DICT_MAX_WATCHERS]; +}; + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_DICT_STATE_H */ diff --git a/Include/internal/pycore_dtoa.h b/Include/internal/pycore_dtoa.h index c77cf6e46cc3c3..fdc6e74ecd25e3 100644 --- a/Include/internal/pycore_dtoa.h +++ b/Include/internal/pycore_dtoa.h @@ -1,3 +1,5 @@ +#ifndef Py_INTERNAL_DTOA_H +#define Py_INTERNAL_DTOA_H #ifdef __cplusplus extern "C" { #endif @@ -11,6 +13,46 @@ extern "C" { #if _PY_SHORT_FLOAT_REPR == 1 +typedef uint32_t ULong; + +struct +Bigint { + struct Bigint *next; + int k, maxwds, sign, wds; + ULong x[1]; +}; + +#ifdef Py_USING_MEMORY_DEBUGGER + +struct _dtoa_runtime_state { + int _not_used; +}; +#define _dtoa_runtime_state_INIT {0} + +#else // !Py_USING_MEMORY_DEBUGGER + +/* The size of the Bigint freelist */ +#define Bigint_Kmax 7 + +#ifndef PRIVATE_MEM +#define PRIVATE_MEM 2304 +#endif +#define Bigint_PREALLOC_SIZE \ + ((PRIVATE_MEM+sizeof(double)-1)/sizeof(double)) + +struct _dtoa_runtime_state { + struct Bigint *freelist[Bigint_Kmax+1]; + double preallocated[Bigint_PREALLOC_SIZE]; + double *preallocated_next; +}; +#define _dtoa_runtime_state_INIT(runtime) \ + { \ + .preallocated_next = runtime.dtoa.preallocated, \ + } + +#endif // !Py_USING_MEMORY_DEBUGGER + + /* These functions are used by modules compiled as C extension like math: they must be exported. */ @@ -26,3 +68,4 @@ PyAPI_FUNC(double) _Py_dg_infinity(int sign); #ifdef __cplusplus } #endif +#endif /* !Py_INTERNAL_DTOA_H */ diff --git a/Include/internal/pycore_fileutils.h b/Include/internal/pycore_fileutils.h index 3ce8108e4e04f1..ac89c43d569c07 100644 --- a/Include/internal/pycore_fileutils.h +++ b/Include/internal/pycore_fileutils.h @@ -10,6 +10,11 @@ extern "C" { #include /* struct lconv */ + +struct _fileutils_state { + int force_ascii; +}; + typedef enum { _Py_ERROR_UNKNOWN=0, _Py_ERROR_STRICT, diff --git a/Include/internal/pycore_floatobject.h b/Include/internal/pycore_floatobject.h index 8a655543329f33..27c63bc87f3ee3 100644 --- a/Include/internal/pycore_floatobject.h +++ b/Include/internal/pycore_floatobject.h @@ -19,6 +19,18 @@ extern void _PyFloat_FiniType(PyInterpreterState *); /* other API */ +enum _py_float_format_type { + _py_float_format_unknown, + _py_float_format_ieee_big_endian, + _py_float_format_ieee_little_endian, +}; + +struct _Py_float_runtime_state { + enum _py_float_format_type float_format; + enum _py_float_format_type double_format; +}; + + #ifndef WITH_FREELISTS // without freelists # define PyFloat_MAXFREELIST 0 diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 5bd0a7f2f517ef..7fa410d288c33a 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -42,17 +42,18 @@ typedef enum _framestate { enum _frameowner { FRAME_OWNED_BY_THREAD = 0, FRAME_OWNED_BY_GENERATOR = 1, - FRAME_OWNED_BY_FRAME_OBJECT = 2 + FRAME_OWNED_BY_FRAME_OBJECT = 2, + FRAME_OWNED_BY_CSTACK = 3, }; typedef struct _PyInterpreterFrame { /* "Specials" section */ - PyObject *f_funcobj; /* Strong reference */ - PyObject *f_globals; /* Borrowed reference */ - PyObject *f_builtins; /* Borrowed reference */ - PyObject *f_locals; /* Strong reference, may be NULL */ + PyObject *f_funcobj; /* Strong reference. Only valid if not on C stack */ + PyObject *f_globals; /* Borrowed reference. Only valid if not on C stack */ + PyObject *f_builtins; /* Borrowed reference. Only valid if not on C stack */ + PyObject *f_locals; /* Strong reference, may be NULL. Only valid if not on C stack */ PyCodeObject *f_code; /* Strong reference */ - PyFrameObject *frame_obj; /* Strong reference, may be NULL */ + PyFrameObject *frame_obj; /* Strong reference, may be NULL. Only valid if not on C stack */ /* Linkage section */ struct _PyInterpreterFrame *previous; // NOTE: This is not necessarily the last instruction started in the given @@ -60,8 +61,8 @@ typedef struct _PyInterpreterFrame { // example, it may be an inline CACHE entry, an instruction we just jumped // over, or (in the case of a newly-created frame) a totally invalid value: _Py_CODEUNIT *prev_instr; - int stacktop; /* Offset of TOS from localsplus */ - bool is_entry; // Whether this is the "root" frame for the current _PyCFrame. + int stacktop; /* Offset of TOS from localsplus */ + uint16_t yield_offset; char owner; /* Locals and stack */ PyObject *localsplus[1]; @@ -109,7 +110,7 @@ _PyFrame_InitializeSpecials( frame->stacktop = code->co_nlocalsplus; frame->frame_obj = NULL; frame->prev_instr = _PyCode_CODE(code) - 1; - frame->is_entry = false; + frame->yield_offset = 0; frame->owner = FRAME_OWNED_BY_THREAD; } diff --git a/Include/internal/pycore_function.h b/Include/internal/pycore_function.h index 1c87aa31ddb611..5cedb33d7e3afd 100644 --- a/Include/internal/pycore_function.h +++ b/Include/internal/pycore_function.h @@ -8,6 +8,12 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#define FUNC_MAX_WATCHERS 8 + +struct _py_func_runtime_state { + uint32_t next_version; +}; + extern PyFunctionObject* _PyFunction_FromConstructor(PyFrameConstructor *constr); extern uint32_t _PyFunction_GetVersionForCurrentState(PyFunctionObject *func); diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index bfab0adfffc9ff..b3abe2030a03da 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -202,6 +202,8 @@ extern void _PyList_ClearFreeList(PyInterpreterState *interp); extern void _PyDict_ClearFreeList(PyInterpreterState *interp); extern void _PyAsyncGen_ClearFreeLists(PyInterpreterState *interp); extern void _PyContext_ClearFreeList(PyInterpreterState *interp); +extern void _Py_ScheduleGC(PyInterpreterState *interp); +extern void _Py_RunGC(PyThreadState *tstate); #ifdef __cplusplus } diff --git a/Include/internal/pycore_global_objects.h b/Include/internal/pycore_global_objects.h index 82e89db7b1b750..80f6bb2c8a7c16 100644 --- a/Include/internal/pycore_global_objects.h +++ b/Include/internal/pycore_global_objects.h @@ -10,6 +10,9 @@ extern "C" { #include "pycore_gc.h" // PyGC_Head #include "pycore_global_strings.h" // struct _Py_global_strings +#include "pycore_hamt.h" // PyHamtNode_Bitmap +#include "pycore_context.h" // _PyContextTokenMissing +#include "pycore_typeobject.h" // pytype_slotdef // These would be in pycore_long.h if it weren't for an include cycle. @@ -20,6 +23,13 @@ extern "C" { // Only immutable objects should be considered runtime-global. // All others must be per-interpreter. +#define _Py_CACHED_OBJECT(NAME) \ + _PyRuntime.cached_objects.NAME + +struct _Py_cached_objects { + PyObject *str_replace_inf; +}; + #define _Py_GLOBAL_OBJECT(NAME) \ _PyRuntime.global_objects.NAME #define _Py_SINGLETON(NAME) \ @@ -44,11 +54,40 @@ struct _Py_global_objects { _PyGC_Head_UNUSED _tuple_empty_gc_not_used; PyTupleObject tuple_empty; + + _PyGC_Head_UNUSED _hamt_bitmap_node_empty_gc_not_used; + PyHamtNode_Bitmap hamt_bitmap_node_empty; + _PyContextTokenMissing context_token_missing; } singletons; PyObject *interned; }; +#define _Py_INTERP_CACHED_OBJECT(interp, NAME) \ + (interp)->cached_objects.NAME + +struct _Py_interp_cached_objects { + int _not_set; + /* object.__reduce__ */ + PyObject *objreduce; + PyObject *type_slots_pname; + pytype_slotdef *type_slots_ptrs[MAX_EQUIV]; +}; + +#define _Py_INTERP_STATIC_OBJECT(interp, NAME) \ + (interp)->static_objects.NAME +#define _Py_INTERP_SINGLETON(interp, NAME) \ + _Py_INTERP_STATIC_OBJECT(interp, singletons.NAME) + +struct _Py_interp_static_objects { + struct { + int _not_used; + // hamt_empty is here instead of global because of its weakreflist. + _PyGC_Head_UNUSED _hamt_empty_gc_not_used; + PyHamtObject hamt_empty; + } singletons; +}; + #ifdef __cplusplus } diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h new file mode 100644 index 00000000000000..9951fa9951e67a --- /dev/null +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -0,0 +1,1495 @@ +#ifndef Py_INTERNAL_GLOBAL_OBJECTS_FINI_GENERATED_INIT_H +#define Py_INTERNAL_GLOBAL_OBJECTS_FINI_GENERATED_INIT_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +#include "pycore_object.h" // _PyObject_IMMORTAL_REFCNT + +#ifdef Py_DEBUG +static inline void +_PyStaticObject_CheckRefcnt(PyObject *obj) { + if (Py_REFCNT(obj) < _PyObject_IMMORTAL_REFCNT) { + _PyObject_ASSERT_FAILED_MSG(obj, + "immortal object has less refcnt than expected " + "_PyObject_IMMORTAL_REFCNT"); + } +} +#endif + +/* The following is auto-generated by Tools/build/generate_global_objects.py. */ +#ifdef Py_DEBUG +static inline void +_PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { + /* generated runtime-global */ + // (see pycore_runtime_init_generated.h) + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -5]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -4]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -3]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -2]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -1]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 0]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 1]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 2]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 3]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 4]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 5]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 6]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 7]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 8]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 9]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 10]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 11]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 12]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 13]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 14]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 15]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 16]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 17]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 18]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 19]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 20]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 21]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 22]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 23]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 24]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 25]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 26]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 27]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 28]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 29]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 30]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 31]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 32]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 33]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 34]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 35]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 36]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 37]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 38]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 39]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 40]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 41]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 42]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 43]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 44]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 45]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 46]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 47]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 48]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 49]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 50]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 51]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 52]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 53]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 54]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 55]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 56]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 57]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 58]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 59]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 60]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 61]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 62]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 63]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 64]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 65]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 66]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 67]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 68]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 69]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 70]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 71]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 72]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 73]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 74]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 75]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 76]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 77]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 78]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 79]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 80]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 81]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 82]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 83]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 84]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 85]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 86]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 87]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 88]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 89]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 90]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 91]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 92]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 93]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 94]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 95]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 96]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 97]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 98]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 99]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 100]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 101]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 102]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 103]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 104]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 105]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 106]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 107]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 108]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 109]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 110]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 111]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 112]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 113]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 114]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 115]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 116]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 117]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 118]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 119]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 120]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 121]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 122]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 123]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 124]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 125]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 126]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 127]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 129]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 130]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 131]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 132]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 133]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 134]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 135]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 136]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 137]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 138]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 139]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 140]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 141]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 142]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 143]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 144]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 145]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 146]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 147]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 148]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 149]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 150]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 151]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 152]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 153]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 154]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 155]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 156]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 157]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 158]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 159]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 160]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 161]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 162]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 163]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 164]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 165]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 166]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 167]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 168]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 169]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 170]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 171]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 172]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 173]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 174]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 175]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 176]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 177]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 178]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 179]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 180]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 181]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 182]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 183]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 184]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 185]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 186]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 187]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 188]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 189]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 190]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 191]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 192]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 193]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 194]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 195]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 196]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 197]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 198]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 199]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 200]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 201]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 202]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 203]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 204]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 205]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 206]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 207]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 208]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 209]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 210]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 211]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 212]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 213]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 214]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 215]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 216]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 217]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 218]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 219]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 220]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 221]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 222]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 223]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 224]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 225]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 226]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 227]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 228]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 229]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 230]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 231]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 232]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 233]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 234]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 235]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 236]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 237]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 238]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 239]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 240]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 241]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 242]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 243]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 244]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 245]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 246]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 247]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 248]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 249]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 250]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 251]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 252]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 253]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 254]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 255]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 256]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[0]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[1]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[2]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[3]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[4]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[5]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[6]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[7]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[8]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[9]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[10]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[11]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[12]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[13]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[14]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[15]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[16]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[17]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[18]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[19]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[20]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[21]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[22]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[23]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[24]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[25]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[26]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[27]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[28]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[29]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[30]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[31]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[32]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[33]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[34]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[35]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[36]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[37]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[38]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[39]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[40]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[41]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[42]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[43]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[44]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[45]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[46]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[47]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[48]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[49]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[50]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[51]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[52]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[53]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[54]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[55]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[56]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[57]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[58]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[59]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[60]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[61]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[62]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[63]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[64]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[65]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[66]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[67]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[68]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[69]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[70]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[71]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[72]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[73]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[74]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[75]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[76]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[77]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[78]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[79]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[80]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[81]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[82]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[83]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[84]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[85]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[86]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[87]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[88]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[89]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[90]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[91]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[92]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[93]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[94]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[95]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[96]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[97]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[98]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[99]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[100]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[101]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[102]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[103]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[104]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[105]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[106]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[107]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[108]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[109]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[110]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[111]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[112]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[113]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[114]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[115]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[116]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[117]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[118]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[119]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[120]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[121]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[122]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[123]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[124]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[125]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[126]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[127]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[129]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[130]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[131]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[132]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[133]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[134]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[135]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[136]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[137]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[138]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[139]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[140]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[141]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[142]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[143]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[144]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[145]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[146]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[147]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[148]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[149]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[150]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[151]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[152]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[153]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[154]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[155]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[156]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[157]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[158]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[159]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[160]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[161]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[162]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[163]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[164]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[165]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[166]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[167]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[168]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[169]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[170]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[171]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[172]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[173]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[174]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[175]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[176]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[177]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[178]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[179]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[180]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[181]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[182]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[183]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[184]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[185]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[186]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[187]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[188]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[189]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[190]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[191]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[192]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[193]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[194]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[195]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[196]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[197]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[198]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[199]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[200]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[201]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[202]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[203]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[204]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[205]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[206]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[207]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[208]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[209]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[210]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[211]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[212]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[213]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[214]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[215]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[216]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[217]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[218]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[219]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[220]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[221]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[222]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[223]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[224]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[225]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[226]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[227]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[228]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[229]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[230]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[231]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[232]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[233]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[234]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[235]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[236]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[237]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[238]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[239]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[240]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[241]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[242]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[243]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[244]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[245]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[246]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[247]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[248]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[249]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[250]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[251]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[252]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[253]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[254]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_characters)[255]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_dictcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_genexpr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_lambda)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_listcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_module)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_setcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_string)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_unknown)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(close_br)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dbl_close_br)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dbl_open_br)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dbl_percent)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dot)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(dot_locals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(json_decoder)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(list_err)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(newline)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(open_br)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(percent)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(shim_name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(utf_8)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(CANCELLED)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(FINISHED)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(False)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(JSONDecodeError)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(PENDING)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(Py_Repr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(TextIOWrapper)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(True)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(WarningMessage)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__IOBase_closed)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__abc_tpflags__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__abs__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__abstractmethods__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__add__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__aenter__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__aexit__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__aiter__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__all__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__and__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__anext__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__annotations__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__args__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__asyncio_running_event_loop__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__await__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bases__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bool__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__build_class__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__builtins__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bytes__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__call__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__cantrace__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__class__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__class_getitem__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__classcell__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__complex__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__contains__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__copy__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ctypes_from_outparam__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__del__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__delattr__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__delete__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__delitem__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__dict__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__dictoffset__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__dir__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__divmod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__doc__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__enter__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__eq__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__exit__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__file__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__float__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__floordiv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__format__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__fspath__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ge__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__get__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getattr__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getattribute__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getinitargs__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getitem__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getnewargs__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getnewargs_ex__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__getstate__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__gt__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__hash__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__iadd__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__iand__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ifloordiv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ilshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__imatmul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__imod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__import__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__imul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__index__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__init__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__init_subclass__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__instancecheck__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__int__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__invert__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ior__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ipow__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__irshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__isabstractmethod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__isub__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__iter__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__itruediv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ixor__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__le__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__len__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__length_hint__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__lltrace__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__loader__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__lshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__lt__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__main__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__matmul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__missing__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__mod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__module__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__mro_entries__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__mul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__name__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ne__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__neg__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__new__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__newobj__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__newobj_ex__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__next__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__notes__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__or__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__orig_class__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__origin__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__package__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__parameters__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__path__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__pos__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__pow__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__prepare__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__qualname__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__radd__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rand__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rdivmod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__reduce__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__reduce_ex__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__repr__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__reversed__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rfloordiv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rlshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rmatmul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rmod__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rmul__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ror__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__round__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rpow__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rrshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rshift__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rsub__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rtruediv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rxor__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__set__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__set_name__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__setattr__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__setitem__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__setstate__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__sizeof__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__slotnames__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__slots__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__spec__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__str__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__sub__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__subclasscheck__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__subclasshook__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__truediv__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__trunc__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__typing_is_unpacked_typevartuple__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__typing_prepare_subst__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__typing_subst__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__typing_unpacked_tuple_args__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__warningregistry__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__weaklistoffset__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__weakref__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__xor__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_abc_impl)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_abstract_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_annotation)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_anonymous_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_argtypes_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_as_parameter_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_asyncio_future_blocking)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_blksize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_bootstrap)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_check_retval_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_dealloc_warn)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_feature_version)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_fields_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_finalizing)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_find_and_load)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_fix_up_module)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_flags_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_get_sourcefile)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_handle_fromlist)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_initializing)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_is_text_encoding)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_length_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_lock_unlock_module)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_loop)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_needs_com_addref_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_pack_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_restype_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_showwarnmsg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_shutdown)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_slotnames)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_strptime_datetime)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_swappedbytes_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_type_)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_uninitialized_submodules)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_warn_unawaited_coroutine)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_xoptions)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(a)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(abs_tol)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(access)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(add)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(add_done_callback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(after_in_child)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(after_in_parent)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(aggregate_class)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(append)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(argdefs)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(arguments)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(argv)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(as_integer_ratio)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ast)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(attribute)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(authorizer_callback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(autocommit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(b)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(backtick)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(base)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(before)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(big)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(binary_form)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(block)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffer)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffer_callback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffer_size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffering)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(buffers)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(bufsize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(builtins)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(byteorder)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(bytes)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(bytes_per_sep)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c_call)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c_exception)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c_return)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cached_statements)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cadata)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cafile)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(call)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(call_exception_handler)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(call_soon)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cancel)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(capath)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(category)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cb_type)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(certfile)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(check_same_thread)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(clear)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(close)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(closed)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(closefd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(closure)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_argcount)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_cellvars)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_code)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_consts)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_exceptiontable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_filename)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_firstlineno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_flags)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_freevars)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_kwonlyargcount)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_linetable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_names)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_nlocals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_posonlyargcount)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_qualname)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_stacksize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(co_varnames)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(code)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(command)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(comment_factory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(consts)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(context)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cookie)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(copy)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(copyreg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(coro)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(count)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cwd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(d)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(data)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(database)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(decode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(decoder)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(default)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(defaultaction)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(delete)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(depth)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(detect_types)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(deterministic)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(device)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dict)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dictcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(difference_update)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(digest)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(digest_size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(digestmod)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dir_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(discard)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dispatch_table)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(displayhook)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dklen)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(doc)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dont_inherit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dst)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dst_dir_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(duration)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(e)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(effective_ids)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(element_factory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(encode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(encoding)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_lineno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_offset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(endpos)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(env)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(errors)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(event)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(eventmask)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(exc_type)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(exc_value)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(excepthook)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(exception)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(exp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(extend)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(facility)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(factory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(false)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(family)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fanout)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fd2)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fdel)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fget)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(file)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(file_actions)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(filename)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fileno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(filepath)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fillvalue)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(filters)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(final)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(find_class)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fix_imports)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(flags)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(flush)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(follow_symlinks)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(format)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(frequency)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(from_param)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fromlist)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fromtimestamp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fromutc)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(fset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(func)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(future)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(generation)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(genexpr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get_debug)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get_event_loop)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get_loop)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(get_source)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(getattr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(getstate)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(gid)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(globals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(groupindex)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(groups)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(handle)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hash_name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(header)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(headers)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hi)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hook)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(id)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ident)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ignore)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(imag)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(importlib)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(in_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(incoming)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(indexgroup)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(inf)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(inheritable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(initial)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(initial_bytes)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(initial_value)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(initval)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(inner_size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(input)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(insert_comments)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(insert_pis)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(instructions)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(intern)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(intersection)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isatty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isinstance)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isoformat)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isolation_level)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(istext)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(item)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(items)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(iter)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(iterable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(iterations)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(join)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(jump)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(keepends)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(key)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(keyfile)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(keys)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kind)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw2)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(lambda)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_node)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_traceback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_type)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_value)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(latin1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(leaf_size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(len)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(length)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(level)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(limit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(line)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(line_buffering)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(lineno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(listcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(little)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(lo)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(locale)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(locals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(logoption)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(loop)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mapping)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(match)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(max_length)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxdigits)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxevents)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxmem)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxsplit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(maxvalue)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(memLevel)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(memlimit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(message)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(metaclass)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(method)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mod)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(module)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(module_globals)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(modules)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mro)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(msg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mycmp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n_arg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n_fields)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n_sequence_fields)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(n_unnamed_fields)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(name_from)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(namespace_separator)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(namespaces)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(narg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ndigits)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(new_limit)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(newline)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(newlines)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(next)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(node_depth)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(node_offset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ns)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(nstype)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(null)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(number)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(obj)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(object)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(offset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(offset_dst)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(offset_src)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(on_type_read)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(onceregistry)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(only_keys)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(oparg)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(opcode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(open)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(opener)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(operation)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(optimize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(options)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(order)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(out_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(outgoing)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(overlapped)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(owner)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(p)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pages)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(parent)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(password)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(path)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pattern)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(peek)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(persistent_id)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(persistent_load)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(person)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pi_factory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pid)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(policy)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos2)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(print_file_and_line)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(priority)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(progress)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(progress_handler)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(proto)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(protocol)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ps1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ps2)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(query)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(quotetabs)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(r)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(raw)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(read)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(read1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readall)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readinto)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readinto1)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readline)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(readonly)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(real)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reducer_override)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(registry)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(rel_tol)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reload)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(repl)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(replace)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reserved)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reset)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(resetids)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(return)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reverse)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reversed)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(s)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(salt)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sched_priority)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(scheduler)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(seek)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(seekable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(selectors)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(self)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(send)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sep)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sequence)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(server_hostname)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(server_side)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(session)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setcomp)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setpgroup)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setsid)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setsigdef)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setsigmask)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(setstate)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(shape)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(show_cmd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(signed)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(size)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sizehint)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sleep)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sock)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sort)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sound)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(source)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(source_traceback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(src)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(src_dir_fd)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(stacklevel)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(start)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(statement)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(status)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(stderr)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(stdin)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(stdout)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(step)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(store_name)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(strategy)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(strftime)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(strict)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(strict_mode)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(string)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sub_key)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(symmetric_difference_update)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tabsize)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tag)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(target)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(target_is_directory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(task)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tb_frame)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tb_lasti)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tb_lineno)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tb_next)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tell)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(template)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(term)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(text)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(threading)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(throw)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(timeout)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(times)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(timetuple)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(top)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(trace_callback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(traceback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(trailers)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(translate)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(true)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(truncate)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(twice)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(txt)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(type)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tz)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(tzname)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(uid)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(unlink)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(unraisablehook)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(uri)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(usedforsecurity)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(value)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(values)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(version)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(warnings)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(warnoptions)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(wbits)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(week)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(weekday)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(which)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(who)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(withdata)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(writable)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(write)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(write_through)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(x)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(year)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(zdict)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[0]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[1]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[2]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[3]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[4]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[5]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[6]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[7]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[8]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[9]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[10]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[11]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[12]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[13]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[14]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[15]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[16]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[17]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[18]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[19]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[20]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[21]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[22]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[23]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[24]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[25]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[26]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[27]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[28]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[29]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[30]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[31]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[32]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[33]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[34]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[35]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[36]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[37]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[38]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[39]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[40]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[41]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[42]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[43]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[44]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[45]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[46]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[47]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[48]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[49]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[50]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[51]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[52]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[53]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[54]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[55]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[56]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[57]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[58]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[59]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[60]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[61]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[62]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[63]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[64]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[65]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[66]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[67]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[68]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[69]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[70]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[71]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[72]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[73]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[74]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[75]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[76]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[77]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[78]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[79]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[80]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[81]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[82]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[83]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[84]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[85]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[86]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[87]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[88]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[89]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[90]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[91]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[92]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[93]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[94]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[95]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[96]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[97]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[98]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[99]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[100]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[101]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[102]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[103]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[104]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[105]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[106]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[107]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[108]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[109]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[110]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[111]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[112]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[113]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[114]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[115]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[116]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[117]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[118]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[119]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[120]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[121]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[122]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[123]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[124]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[125]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[126]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).ascii[127]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[128 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[129 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[130 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[131 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[132 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[133 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[134 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[135 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[136 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[137 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[138 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[139 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[140 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[141 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[142 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[143 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[144 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[145 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[146 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[147 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[148 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[149 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[150 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[151 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[152 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[153 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[154 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[155 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[156 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[157 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[158 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[159 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[160 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[161 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[162 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[163 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[164 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[165 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[166 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[167 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[168 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[169 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[170 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[171 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[172 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[173 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[174 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[175 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[176 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[177 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[178 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[179 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[180 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[181 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[182 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[183 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[184 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[185 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[186 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[187 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[188 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[189 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[190 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[191 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[192 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[193 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[194 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[195 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[196 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[197 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[198 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[199 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[200 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[201 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[202 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[203 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[204 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[205 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[206 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[207 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[208 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[209 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[210 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[211 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[212 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[213 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[214 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[215 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[216 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[217 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[218 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[219 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[220 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[221 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[222 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[223 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[224 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[225 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[226 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[227 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[228 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[229 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[230 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[231 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[232 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[233 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[234 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[235 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[236 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[237 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[238 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[239 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[240 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[241 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[242 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[243 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[244 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[245 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[246 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[247 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[248 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[249 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[250 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[251 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[252 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[253 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[254 - 128]); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(strings).latin1[255 - 128]); + /* non-generated */ + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(bytes_empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(tuple_empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(hamt_bitmap_node_empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_INTERP_SINGLETON(interp, hamt_empty)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_SINGLETON(context_token_missing)); +} +#endif // Py_DEBUG +/* End auto-generated code */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_GLOBAL_OBJECTS_FINI_GENERATED_INIT_H */ diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index 1523eef73931c9..12144b02f45574 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -8,10 +8,10 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif -// The data structure & init here are inspired by Tools/scripts/deepfreeze.py. +// The data structure & init here are inspired by Tools/build/deepfreeze.py. // All field names generated by ASCII_STR() have a common prefix, -// to help avoid collisions with keywords, etc. +// to help avoid collisions with keywords, macros, etc. #define STRUCT_FOR_ASCII_STR(LITERAL) \ struct { \ @@ -19,13 +19,13 @@ extern "C" { uint8_t _data[sizeof(LITERAL)]; \ } #define STRUCT_FOR_STR(NAME, LITERAL) \ - STRUCT_FOR_ASCII_STR(LITERAL) _ ## NAME; + STRUCT_FOR_ASCII_STR(LITERAL) _py_ ## NAME; #define STRUCT_FOR_ID(NAME) \ - STRUCT_FOR_ASCII_STR(#NAME) _ ## NAME; + STRUCT_FOR_ASCII_STR(#NAME) _py_ ## NAME; // XXX Order by frequency of use? -/* The following is auto-generated by Tools/scripts/generate_global_objects.py. */ +/* The following is auto-generated by Tools/build/generate_global_objects.py. */ struct _Py_global_strings { struct { STRUCT_FOR_STR(anon_dictcomp, "") @@ -37,22 +37,27 @@ struct _Py_global_strings { STRUCT_FOR_STR(anon_string, "") STRUCT_FOR_STR(anon_unknown, "") STRUCT_FOR_STR(close_br, "}") - STRUCT_FOR_STR(comma_sep, ", ") STRUCT_FOR_STR(dbl_close_br, "}}") STRUCT_FOR_STR(dbl_open_br, "{{") STRUCT_FOR_STR(dbl_percent, "%%") STRUCT_FOR_STR(dot, ".") STRUCT_FOR_STR(dot_locals, ".") STRUCT_FOR_STR(empty, "") + STRUCT_FOR_STR(json_decoder, "json.decoder") STRUCT_FOR_STR(list_err, "list index out of range") STRUCT_FOR_STR(newline, "\n") STRUCT_FOR_STR(open_br, "{") STRUCT_FOR_STR(percent, "%") + STRUCT_FOR_STR(shim_name, "") STRUCT_FOR_STR(utf_8, "utf-8") } literals; struct { + STRUCT_FOR_ID(CANCELLED) + STRUCT_FOR_ID(FINISHED) STRUCT_FOR_ID(False) + STRUCT_FOR_ID(JSONDecodeError) + STRUCT_FOR_ID(PENDING) STRUCT_FOR_ID(Py_Repr) STRUCT_FOR_ID(TextIOWrapper) STRUCT_FOR_ID(True) @@ -71,6 +76,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(__anext__) STRUCT_FOR_ID(__annotations__) STRUCT_FOR_ID(__args__) + STRUCT_FOR_ID(__asyncio_running_event_loop__) STRUCT_FOR_ID(__await__) STRUCT_FOR_ID(__bases__) STRUCT_FOR_ID(__bool__) @@ -85,6 +91,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(__complex__) STRUCT_FOR_ID(__contains__) STRUCT_FOR_ID(__copy__) + STRUCT_FOR_ID(__ctypes_from_outparam__) STRUCT_FOR_ID(__del__) STRUCT_FOR_ID(__delattr__) STRUCT_FOR_ID(__delete__) @@ -211,22 +218,38 @@ struct _Py_global_strings { STRUCT_FOR_ID(__weakref__) STRUCT_FOR_ID(__xor__) STRUCT_FOR_ID(_abc_impl) + STRUCT_FOR_ID(_abstract_) STRUCT_FOR_ID(_annotation) + STRUCT_FOR_ID(_anonymous_) + STRUCT_FOR_ID(_argtypes_) + STRUCT_FOR_ID(_as_parameter_) + STRUCT_FOR_ID(_asyncio_future_blocking) STRUCT_FOR_ID(_blksize) STRUCT_FOR_ID(_bootstrap) + STRUCT_FOR_ID(_check_retval_) STRUCT_FOR_ID(_dealloc_warn) STRUCT_FOR_ID(_feature_version) + STRUCT_FOR_ID(_fields_) STRUCT_FOR_ID(_finalizing) STRUCT_FOR_ID(_find_and_load) STRUCT_FOR_ID(_fix_up_module) + STRUCT_FOR_ID(_flags_) STRUCT_FOR_ID(_get_sourcefile) STRUCT_FOR_ID(_handle_fromlist) STRUCT_FOR_ID(_initializing) STRUCT_FOR_ID(_is_text_encoding) + STRUCT_FOR_ID(_length_) STRUCT_FOR_ID(_lock_unlock_module) + STRUCT_FOR_ID(_loop) + STRUCT_FOR_ID(_needs_com_addref_) + STRUCT_FOR_ID(_pack_) + STRUCT_FOR_ID(_restype_) STRUCT_FOR_ID(_showwarnmsg) STRUCT_FOR_ID(_shutdown) STRUCT_FOR_ID(_slotnames) + STRUCT_FOR_ID(_strptime_datetime) + STRUCT_FOR_ID(_swappedbytes_) + STRUCT_FOR_ID(_type_) STRUCT_FOR_ID(_uninitialized_submodules) STRUCT_FOR_ID(_warn_unawaited_coroutine) STRUCT_FOR_ID(_xoptions) @@ -234,6 +257,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(abs_tol) STRUCT_FOR_ID(access) STRUCT_FOR_ID(add) + STRUCT_FOR_ID(add_done_callback) STRUCT_FOR_ID(after_in_child) STRUCT_FOR_ID(after_in_parent) STRUCT_FOR_ID(aggregate_class) @@ -241,8 +265,11 @@ struct _Py_global_strings { STRUCT_FOR_ID(argdefs) STRUCT_FOR_ID(arguments) STRUCT_FOR_ID(argv) + STRUCT_FOR_ID(as_integer_ratio) + STRUCT_FOR_ID(ast) STRUCT_FOR_ID(attribute) STRUCT_FOR_ID(authorizer_callback) + STRUCT_FOR_ID(autocommit) STRUCT_FOR_ID(b) STRUCT_FOR_ID(backtick) STRUCT_FOR_ID(base) @@ -260,6 +287,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(byteorder) STRUCT_FOR_ID(bytes) STRUCT_FOR_ID(bytes_per_sep) + STRUCT_FOR_ID(c) STRUCT_FOR_ID(c_call) STRUCT_FOR_ID(c_exception) STRUCT_FOR_ID(c_return) @@ -267,6 +295,9 @@ struct _Py_global_strings { STRUCT_FOR_ID(cadata) STRUCT_FOR_ID(cafile) STRUCT_FOR_ID(call) + STRUCT_FOR_ID(call_exception_handler) + STRUCT_FOR_ID(call_soon) + STRUCT_FOR_ID(cancel) STRUCT_FOR_ID(capath) STRUCT_FOR_ID(category) STRUCT_FOR_ID(cb_type) @@ -306,6 +337,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(coro) STRUCT_FOR_ID(count) STRUCT_FOR_ID(cwd) + STRUCT_FOR_ID(d) STRUCT_FOR_ID(data) STRUCT_FOR_ID(database) STRUCT_FOR_ID(decode) @@ -324,6 +356,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(digest_size) STRUCT_FOR_ID(digestmod) STRUCT_FOR_ID(dir_fd) + STRUCT_FOR_ID(discard) STRUCT_FOR_ID(dispatch_table) STRUCT_FOR_ID(displayhook) STRUCT_FOR_ID(dklen) @@ -332,6 +365,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(dst) STRUCT_FOR_ID(dst_dir_fd) STRUCT_FOR_ID(duration) + STRUCT_FOR_ID(e) STRUCT_FOR_ID(effective_ids) STRUCT_FOR_ID(element_factory) STRUCT_FOR_ID(encode) @@ -350,7 +384,9 @@ struct _Py_global_strings { STRUCT_FOR_ID(exception) STRUCT_FOR_ID(exp) STRUCT_FOR_ID(extend) + STRUCT_FOR_ID(facility) STRUCT_FOR_ID(factory) + STRUCT_FOR_ID(false) STRUCT_FOR_ID(family) STRUCT_FOR_ID(fanout) STRUCT_FOR_ID(fd) @@ -372,12 +408,19 @@ struct _Py_global_strings { STRUCT_FOR_ID(follow_symlinks) STRUCT_FOR_ID(format) STRUCT_FOR_ID(frequency) + STRUCT_FOR_ID(from_param) STRUCT_FOR_ID(fromlist) + STRUCT_FOR_ID(fromtimestamp) + STRUCT_FOR_ID(fromutc) STRUCT_FOR_ID(fset) STRUCT_FOR_ID(func) + STRUCT_FOR_ID(future) STRUCT_FOR_ID(generation) STRUCT_FOR_ID(genexpr) STRUCT_FOR_ID(get) + STRUCT_FOR_ID(get_debug) + STRUCT_FOR_ID(get_event_loop) + STRUCT_FOR_ID(get_loop) STRUCT_FOR_ID(get_source) STRUCT_FOR_ID(getattr) STRUCT_FOR_ID(getstate) @@ -392,6 +435,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(hi) STRUCT_FOR_ID(hook) STRUCT_FOR_ID(id) + STRUCT_FOR_ID(ident) STRUCT_FOR_ID(ignore) STRUCT_FOR_ID(imag) STRUCT_FOR_ID(importlib) @@ -413,6 +457,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(intersection) STRUCT_FOR_ID(isatty) STRUCT_FOR_ID(isinstance) + STRUCT_FOR_ID(isoformat) STRUCT_FOR_ID(isolation_level) STRUCT_FOR_ID(istext) STRUCT_FOR_ID(item) @@ -427,6 +472,9 @@ struct _Py_global_strings { STRUCT_FOR_ID(keyfile) STRUCT_FOR_ID(keys) STRUCT_FOR_ID(kind) + STRUCT_FOR_ID(kw) + STRUCT_FOR_ID(kw1) + STRUCT_FOR_ID(kw2) STRUCT_FOR_ID(lambda) STRUCT_FOR_ID(last) STRUCT_FOR_ID(last_node) @@ -447,6 +495,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(lo) STRUCT_FOR_ID(locale) STRUCT_FOR_ID(locals) + STRUCT_FOR_ID(logoption) STRUCT_FOR_ID(loop) STRUCT_FOR_ID(mapping) STRUCT_FOR_ID(match) @@ -468,12 +517,14 @@ struct _Py_global_strings { STRUCT_FOR_ID(modules) STRUCT_FOR_ID(mro) STRUCT_FOR_ID(msg) + STRUCT_FOR_ID(mycmp) STRUCT_FOR_ID(n) STRUCT_FOR_ID(n_arg) STRUCT_FOR_ID(n_fields) STRUCT_FOR_ID(n_sequence_fields) STRUCT_FOR_ID(n_unnamed_fields) STRUCT_FOR_ID(name) + STRUCT_FOR_ID(name_from) STRUCT_FOR_ID(namespace_separator) STRUCT_FOR_ID(namespaces) STRUCT_FOR_ID(narg) @@ -485,6 +536,8 @@ struct _Py_global_strings { STRUCT_FOR_ID(node_depth) STRUCT_FOR_ID(node_offset) STRUCT_FOR_ID(ns) + STRUCT_FOR_ID(nstype) + STRUCT_FOR_ID(null) STRUCT_FOR_ID(number) STRUCT_FOR_ID(obj) STRUCT_FOR_ID(object) @@ -493,6 +546,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(offset_src) STRUCT_FOR_ID(on_type_read) STRUCT_FOR_ID(onceregistry) + STRUCT_FOR_ID(only_keys) STRUCT_FOR_ID(oparg) STRUCT_FOR_ID(opcode) STRUCT_FOR_ID(open) @@ -519,6 +573,8 @@ struct _Py_global_strings { STRUCT_FOR_ID(pid) STRUCT_FOR_ID(policy) STRUCT_FOR_ID(pos) + STRUCT_FOR_ID(pos1) + STRUCT_FOR_ID(pos2) STRUCT_FOR_ID(print_file_and_line) STRUCT_FOR_ID(priority) STRUCT_FOR_ID(progress) @@ -559,6 +615,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(seek) STRUCT_FOR_ID(seekable) STRUCT_FOR_ID(selectors) + STRUCT_FOR_ID(self) STRUCT_FOR_ID(send) STRUCT_FOR_ID(sep) STRUCT_FOR_ID(sequence) @@ -581,6 +638,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(sort) STRUCT_FOR_ID(sound) STRUCT_FOR_ID(source) + STRUCT_FOR_ID(source_traceback) STRUCT_FOR_ID(src) STRUCT_FOR_ID(src_dir_fd) STRUCT_FOR_ID(stacklevel) @@ -593,6 +651,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(step) STRUCT_FOR_ID(store_name) STRUCT_FOR_ID(strategy) + STRUCT_FOR_ID(strftime) STRUCT_FOR_ID(strict) STRUCT_FOR_ID(strict_mode) STRUCT_FOR_ID(string) @@ -615,16 +674,19 @@ struct _Py_global_strings { STRUCT_FOR_ID(throw) STRUCT_FOR_ID(timeout) STRUCT_FOR_ID(times) + STRUCT_FOR_ID(timetuple) STRUCT_FOR_ID(top) STRUCT_FOR_ID(trace_callback) STRUCT_FOR_ID(traceback) STRUCT_FOR_ID(trailers) STRUCT_FOR_ID(translate) + STRUCT_FOR_ID(true) STRUCT_FOR_ID(truncate) STRUCT_FOR_ID(twice) STRUCT_FOR_ID(txt) STRUCT_FOR_ID(type) STRUCT_FOR_ID(tz) + STRUCT_FOR_ID(tzname) STRUCT_FOR_ID(uid) STRUCT_FOR_ID(unlink) STRUCT_FOR_ID(unraisablehook) @@ -664,9 +726,9 @@ struct _Py_global_strings { #define _Py_ID(NAME) \ - (_Py_SINGLETON(strings.identifiers._ ## NAME._ascii.ob_base)) + (_Py_SINGLETON(strings.identifiers._py_ ## NAME._ascii.ob_base)) #define _Py_STR(NAME) \ - (_Py_SINGLETON(strings.literals._ ## NAME._ascii.ob_base)) + (_Py_SINGLETON(strings.literals._py_ ## NAME._ascii.ob_base)) /* _Py_DECLARE_STR() should precede all uses of _Py_STR() in a function. diff --git a/Include/internal/pycore_hamt.h b/Include/internal/pycore_hamt.h index 7a674b4ee4ac0e..d8742c7cb63578 100644 --- a/Include/internal/pycore_hamt.h +++ b/Include/internal/pycore_hamt.h @@ -28,10 +28,6 @@ extern PyTypeObject _PyHamtKeys_Type; extern PyTypeObject _PyHamtValues_Type; extern PyTypeObject _PyHamtItems_Type; -/* runtime lifecycle */ - -void _PyHamt_Fini(PyInterpreterState *); - /* other API */ @@ -53,6 +49,13 @@ typedef struct { } PyHamtObject; +typedef struct { + PyObject_VAR_HEAD + uint32_t b_bitmap; + PyObject *b_array[1]; +} PyHamtNode_Bitmap; + + /* A struct to hold the state of depth-first traverse of the tree. HAMT is an immutable collection. Iterators will hold a strong reference diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index aee1f66a3ea171..9036dff6725330 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -5,6 +5,38 @@ extern "C" { #endif + +struct _import_runtime_state { + /* The builtin modules (defined in config.c). */ + struct _inittab *inittab; + /* The most recent value assigned to a PyModuleDef.m_base.m_index. + This is incremented each time PyModuleDef_Init() is called, + which is just about every time an extension module is imported. + See PyInterpreterState.modules_by_index for more info. */ + Py_ssize_t last_module_index; + /* A dict mapping (filename, name) to PyModuleDef for modules. + Only legacy (single-phase init) extension modules are added + and only if they support multiple initialization (m_size >- 0) + or are imported in the main interpreter. + This is initialized lazily in _PyImport_FixupExtensionObject(). + Modules are added there and looked up in _imp.find_extension(). */ + PyObject *extensions; + /* The global import lock. */ + struct { + PyThread_type_lock mutex; + unsigned long thread; + int level; + } lock; + struct { + int import_level; + _PyTime_t accumulated; + int header; + } find_and_load; + /* Package context -- the full module name for package imports */ + const char * pkgcontext; +}; + + #ifdef HAVE_FORK extern PyStatus _PyImport_ReInitLock(void); #endif diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index b21708a388b339..532b28499080f2 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -14,18 +14,22 @@ extern "C" { #include "pycore_ast_state.h" // struct ast_state #include "pycore_code.h" // struct callable_cache #include "pycore_context.h" // struct _Py_context_state -#include "pycore_dict.h" // struct _Py_dict_state +#include "pycore_dict_state.h" // struct _Py_dict_state #include "pycore_exceptions.h" // struct _Py_exc_state #include "pycore_floatobject.h" // struct _Py_float_state +#include "pycore_function.h" // FUNC_MAX_WATCHERS #include "pycore_genobject.h" // struct _Py_async_gen_state #include "pycore_gc.h" // struct _gc_runtime_state #include "pycore_list.h" // struct _Py_list_state +#include "pycore_global_objects.h" // struct _Py_interp_static_objects #include "pycore_tuple.h" // struct _Py_tuple_state #include "pycore_typeobject.h" // struct type_cache #include "pycore_unicodeobject.h" // struct _Py_unicode_state #include "pycore_warnings.h" // struct _warnings_runtime_state + struct _pending_calls { + int busy; PyThread_type_lock lock; /* Request for running pending calls. */ _Py_atomic_int calls_to_do; @@ -49,6 +53,8 @@ struct _ceval_state { _Py_atomic_int eval_breaker; /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; + /* The GC is ready to be executed */ + _Py_atomic_int gc_scheduled; struct _pending_calls pending; }; @@ -67,6 +73,11 @@ struct atexit_state { }; +struct _Py_long_state { + int max_str_digits; +}; + + /* interpreter state */ /* PyInterpreterState holds the global state for one of the runtime's @@ -108,14 +119,30 @@ struct _is { int _initialized; int finalizing; - /* Was this interpreter statically allocated? */ - bool _static; - struct _ceval_state ceval; struct _gc_runtime_state gc; // sys.modules dictionary PyObject *modules; + /* This is the list of module objects for all legacy (single-phase init) + extension modules ever loaded in this process (i.e. imported + in this interpreter or in any other). Py_None stands in for + modules that haven't actually been imported in this interpreter. + + A module's index (PyModuleDef.m_base.m_index) is used to look up + the corresponding module object for this interpreter, if any. + (See PyState_FindModule().) When any extension module + is initialized during import, its moduledef gets initialized by + PyModuleDef_Init(), and the first time that happens for each + PyModuleDef, its index gets set to the current value of + a global counter (see _PyRuntimeState.imports.last_module_index). + The entry for that index in this interpreter remains unset until + the module is actually imported here. (Py_None is used as + a placeholder.) Note that multi-phase init modules always get + an index for which there will never be a module set. + + This is initialized lazily in _PyState_AddModule(), which is also + where modules get added. */ PyObject *modules_by_index; // Dictionary of the sys module PyObject *sysdict; @@ -136,6 +163,7 @@ struct _is { #ifdef HAVE_DLOPEN int dlopenflags; #endif + unsigned long feature_flags; PyObject *dict; /* Stores per-interpreter state */ @@ -144,6 +172,11 @@ struct _is { // Initialized to _PyEval_EvalFrameDefault(). _PyFrameEvalFunction eval_frame; + PyDict_WatchCallback dict_watchers[DICT_MAX_WATCHERS]; + PyFunction_WatchCallback func_watchers[FUNC_MAX_WATCHERS]; + // One bit is set for each non-NULL entry in func_watchers + uint8_t active_func_watchers; + Py_ssize_t co_extra_user_count; freefunc co_extra_freefuncs[MAX_CO_EXTRA_USERS]; @@ -157,9 +190,11 @@ struct _is { struct atexit_state atexit; PyObject *audit_hooks; + PyType_WatchCallback type_watchers[TYPE_MAX_WATCHERS]; struct _Py_unicode_state unicode; struct _Py_float_state float_state; + struct _Py_long_state long_state; /* Using a cache is very effective since typically only a single slice is created and then deleted again. */ PySliceObject *slice_cache; @@ -174,6 +209,10 @@ struct _is { struct ast_state ast; struct types_state types; struct callable_cache callable_cache; + PyCodeObject *interpreter_trampoline; + + struct _Py_interp_cached_objects cached_objects; + struct _Py_interp_static_objects static_objects; /* The following fields are here to avoid allocation during init. The data is exposed through PyInterpreterState pointer fields. diff --git a/Include/internal/pycore_obmalloc.h b/Include/internal/pycore_obmalloc.h new file mode 100644 index 00000000000000..93349d89c6ab52 --- /dev/null +++ b/Include/internal/pycore_obmalloc.h @@ -0,0 +1,689 @@ +#ifndef Py_INTERNAL_OBMALLOC_H +#define Py_INTERNAL_OBMALLOC_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +typedef unsigned int pymem_uint; /* assuming >= 16 bits */ + +#undef uint +#define uint pymem_uint + + +/* An object allocator for Python. + + Here is an introduction to the layers of the Python memory architecture, + showing where the object allocator is actually used (layer +2), It is + called for every object allocation and deallocation (PyObject_New/Del), + unless the object-specific allocators implement a proprietary allocation + scheme (ex.: ints use a simple free list). This is also the place where + the cyclic garbage collector operates selectively on container objects. + + + Object-specific allocators + _____ ______ ______ ________ + [ int ] [ dict ] [ list ] ... [ string ] Python core | ++3 | <----- Object-specific memory -----> | <-- Non-object memory --> | + _______________________________ | | + [ Python's object allocator ] | | ++2 | ####### Object memory ####### | <------ Internal buffers ------> | + ______________________________________________________________ | + [ Python's raw memory allocator (PyMem_ API) ] | ++1 | <----- Python memory (under PyMem manager's control) ------> | | + __________________________________________________________________ + [ Underlying general-purpose allocator (ex: C library malloc) ] + 0 | <------ Virtual memory allocated for the python process -------> | + + ========================================================================= + _______________________________________________________________________ + [ OS-specific Virtual Memory Manager (VMM) ] +-1 | <--- Kernel dynamic storage allocation & management (page-based) ---> | + __________________________________ __________________________________ + [ ] [ ] +-2 | <-- Physical memory: ROM/RAM --> | | <-- Secondary storage (swap) --> | + +*/ +/*==========================================================================*/ + +/* A fast, special-purpose memory allocator for small blocks, to be used + on top of a general-purpose malloc -- heavily based on previous art. */ + +/* Vladimir Marangozov -- August 2000 */ + +/* + * "Memory management is where the rubber meets the road -- if we do the wrong + * thing at any level, the results will not be good. And if we don't make the + * levels work well together, we are in serious trouble." (1) + * + * (1) Paul R. Wilson, Mark S. Johnstone, Michael Neely, and David Boles, + * "Dynamic Storage Allocation: A Survey and Critical Review", + * in Proc. 1995 Int'l. Workshop on Memory Management, September 1995. + */ + +/* #undef WITH_MEMORY_LIMITS */ /* disable mem limit checks */ + +/*==========================================================================*/ + +/* + * Allocation strategy abstract: + * + * For small requests, the allocator sub-allocates blocks of memory. + * Requests greater than SMALL_REQUEST_THRESHOLD bytes are routed to the + * system's allocator. + * + * Small requests are grouped in size classes spaced 8 bytes apart, due + * to the required valid alignment of the returned address. Requests of + * a particular size are serviced from memory pools of 4K (one VMM page). + * Pools are fragmented on demand and contain free lists of blocks of one + * particular size class. In other words, there is a fixed-size allocator + * for each size class. Free pools are shared by the different allocators + * thus minimizing the space reserved for a particular size class. + * + * This allocation strategy is a variant of what is known as "simple + * segregated storage based on array of free lists". The main drawback of + * simple segregated storage is that we might end up with lot of reserved + * memory for the different free lists, which degenerate in time. To avoid + * this, we partition each free list in pools and we share dynamically the + * reserved space between all free lists. This technique is quite efficient + * for memory intensive programs which allocate mainly small-sized blocks. + * + * For small requests we have the following table: + * + * Request in bytes Size of allocated block Size class idx + * ---------------------------------------------------------------- + * 1-8 8 0 + * 9-16 16 1 + * 17-24 24 2 + * 25-32 32 3 + * 33-40 40 4 + * 41-48 48 5 + * 49-56 56 6 + * 57-64 64 7 + * 65-72 72 8 + * ... ... ... + * 497-504 504 62 + * 505-512 512 63 + * + * 0, SMALL_REQUEST_THRESHOLD + 1 and up: routed to the underlying + * allocator. + */ + +/*==========================================================================*/ + +/* + * -- Main tunable settings section -- + */ + +/* + * Alignment of addresses returned to the user. 8-bytes alignment works + * on most current architectures (with 32-bit or 64-bit address buses). + * The alignment value is also used for grouping small requests in size + * classes spaced ALIGNMENT bytes apart. + * + * You shouldn't change this unless you know what you are doing. + */ + +#if SIZEOF_VOID_P > 4 +#define ALIGNMENT 16 /* must be 2^N */ +#define ALIGNMENT_SHIFT 4 +#else +#define ALIGNMENT 8 /* must be 2^N */ +#define ALIGNMENT_SHIFT 3 +#endif + +/* Return the number of bytes in size class I, as a uint. */ +#define INDEX2SIZE(I) (((pymem_uint)(I) + 1) << ALIGNMENT_SHIFT) + +/* + * Max size threshold below which malloc requests are considered to be + * small enough in order to use preallocated memory pools. You can tune + * this value according to your application behaviour and memory needs. + * + * Note: a size threshold of 512 guarantees that newly created dictionaries + * will be allocated from preallocated memory pools on 64-bit. + * + * The following invariants must hold: + * 1) ALIGNMENT <= SMALL_REQUEST_THRESHOLD <= 512 + * 2) SMALL_REQUEST_THRESHOLD is evenly divisible by ALIGNMENT + * + * Although not required, for better performance and space efficiency, + * it is recommended that SMALL_REQUEST_THRESHOLD is set to a power of 2. + */ +#define SMALL_REQUEST_THRESHOLD 512 +#define NB_SMALL_SIZE_CLASSES (SMALL_REQUEST_THRESHOLD / ALIGNMENT) + +/* + * The system's VMM page size can be obtained on most unices with a + * getpagesize() call or deduced from various header files. To make + * things simpler, we assume that it is 4K, which is OK for most systems. + * It is probably better if this is the native page size, but it doesn't + * have to be. In theory, if SYSTEM_PAGE_SIZE is larger than the native page + * size, then `POOL_ADDR(p)->arenaindex' could rarely cause a segmentation + * violation fault. 4K is apparently OK for all the platforms that python + * currently targets. + */ +#define SYSTEM_PAGE_SIZE (4 * 1024) + +/* + * Maximum amount of memory managed by the allocator for small requests. + */ +#ifdef WITH_MEMORY_LIMITS +#ifndef SMALL_MEMORY_LIMIT +#define SMALL_MEMORY_LIMIT (64 * 1024 * 1024) /* 64 MB -- more? */ +#endif +#endif + +#if !defined(WITH_PYMALLOC_RADIX_TREE) +/* Use radix-tree to track arena memory regions, for address_in_range(). + * Enable by default since it allows larger pool sizes. Can be disabled + * using -DWITH_PYMALLOC_RADIX_TREE=0 */ +#define WITH_PYMALLOC_RADIX_TREE 1 +#endif + +#if SIZEOF_VOID_P > 4 +/* on 64-bit platforms use larger pools and arenas if we can */ +#define USE_LARGE_ARENAS +#if WITH_PYMALLOC_RADIX_TREE +/* large pools only supported if radix-tree is enabled */ +#define USE_LARGE_POOLS +#endif +#endif + +/* + * The allocator sub-allocates blocks of memory (called arenas) aligned + * on a page boundary. This is a reserved virtual address space for the + * current process (obtained through a malloc()/mmap() call). In no way this + * means that the memory arenas will be used entirely. A malloc() is + * usually an address range reservation for bytes, unless all pages within + * this space are referenced subsequently. So malloc'ing big blocks and not + * using them does not mean "wasting memory". It's an addressable range + * wastage... + * + * Arenas are allocated with mmap() on systems supporting anonymous memory + * mappings to reduce heap fragmentation. + */ +#ifdef USE_LARGE_ARENAS +#define ARENA_BITS 20 /* 1 MiB */ +#else +#define ARENA_BITS 18 /* 256 KiB */ +#endif +#define ARENA_SIZE (1 << ARENA_BITS) +#define ARENA_SIZE_MASK (ARENA_SIZE - 1) + +#ifdef WITH_MEMORY_LIMITS +#define MAX_ARENAS (SMALL_MEMORY_LIMIT / ARENA_SIZE) +#endif + +/* + * Size of the pools used for small blocks. Must be a power of 2. + */ +#ifdef USE_LARGE_POOLS +#define POOL_BITS 14 /* 16 KiB */ +#else +#define POOL_BITS 12 /* 4 KiB */ +#endif +#define POOL_SIZE (1 << POOL_BITS) +#define POOL_SIZE_MASK (POOL_SIZE - 1) + +#if !WITH_PYMALLOC_RADIX_TREE +#if POOL_SIZE != SYSTEM_PAGE_SIZE +# error "pool size must be equal to system page size" +#endif +#endif + +#define MAX_POOLS_IN_ARENA (ARENA_SIZE / POOL_SIZE) +#if MAX_POOLS_IN_ARENA * POOL_SIZE != ARENA_SIZE +# error "arena size not an exact multiple of pool size" +#endif + +/* + * -- End of tunable settings section -- + */ + +/*==========================================================================*/ + +/* When you say memory, my mind reasons in terms of (pointers to) blocks */ +typedef uint8_t pymem_block; + +/* Pool for small blocks. */ +struct pool_header { + union { pymem_block *_padding; + uint count; } ref; /* number of allocated blocks */ + pymem_block *freeblock; /* pool's free list head */ + struct pool_header *nextpool; /* next pool of this size class */ + struct pool_header *prevpool; /* previous pool "" */ + uint arenaindex; /* index into arenas of base adr */ + uint szidx; /* block size class index */ + uint nextoffset; /* bytes to virgin block */ + uint maxnextoffset; /* largest valid nextoffset */ +}; + +typedef struct pool_header *poolp; + +/* Record keeping for arenas. */ +struct arena_object { + /* The address of the arena, as returned by malloc. Note that 0 + * will never be returned by a successful malloc, and is used + * here to mark an arena_object that doesn't correspond to an + * allocated arena. + */ + uintptr_t address; + + /* Pool-aligned pointer to the next pool to be carved off. */ + pymem_block* pool_address; + + /* The number of available pools in the arena: free pools + never- + * allocated pools. + */ + uint nfreepools; + + /* The total number of pools in the arena, whether or not available. */ + uint ntotalpools; + + /* Singly-linked list of available pools. */ + struct pool_header* freepools; + + /* Whenever this arena_object is not associated with an allocated + * arena, the nextarena member is used to link all unassociated + * arena_objects in the singly-linked `unused_arena_objects` list. + * The prevarena member is unused in this case. + * + * When this arena_object is associated with an allocated arena + * with at least one available pool, both members are used in the + * doubly-linked `usable_arenas` list, which is maintained in + * increasing order of `nfreepools` values. + * + * Else this arena_object is associated with an allocated arena + * all of whose pools are in use. `nextarena` and `prevarena` + * are both meaningless in this case. + */ + struct arena_object* nextarena; + struct arena_object* prevarena; +}; + +#define POOL_OVERHEAD _Py_SIZE_ROUND_UP(sizeof(struct pool_header), ALIGNMENT) + +#define DUMMY_SIZE_IDX 0xffff /* size class of newly cached pools */ + +/* Round pointer P down to the closest pool-aligned address <= P, as a poolp */ +#define POOL_ADDR(P) ((poolp)_Py_ALIGN_DOWN((P), POOL_SIZE)) + +/* Return total number of blocks in pool of size index I, as a uint. */ +#define NUMBLOCKS(I) ((pymem_uint)(POOL_SIZE - POOL_OVERHEAD) / INDEX2SIZE(I)) + +/*==========================================================================*/ + +/* + * Pool table -- headed, circular, doubly-linked lists of partially used pools. + +This is involved. For an index i, usedpools[i+i] is the header for a list of +all partially used pools holding small blocks with "size class idx" i. So +usedpools[0] corresponds to blocks of size 8, usedpools[2] to blocks of size +16, and so on: index 2*i <-> blocks of size (i+1)<freeblock points to +the start of a singly-linked list of free blocks within the pool. When a +block is freed, it's inserted at the front of its pool's freeblock list. Note +that the available blocks in a pool are *not* linked all together when a pool +is initialized. Instead only "the first two" (lowest addresses) blocks are +set up, returning the first such block, and setting pool->freeblock to a +one-block list holding the second such block. This is consistent with that +pymalloc strives at all levels (arena, pool, and block) never to touch a piece +of memory until it's actually needed. + +So long as a pool is in the used state, we're certain there *is* a block +available for allocating, and pool->freeblock is not NULL. If pool->freeblock +points to the end of the free list before we've carved the entire pool into +blocks, that means we simply haven't yet gotten to one of the higher-address +blocks. The offset from the pool_header to the start of "the next" virgin +block is stored in the pool_header nextoffset member, and the largest value +of nextoffset that makes sense is stored in the maxnextoffset member when a +pool is initialized. All the blocks in a pool have been passed out at least +once when and only when nextoffset > maxnextoffset. + + +Major obscurity: While the usedpools vector is declared to have poolp +entries, it doesn't really. It really contains two pointers per (conceptual) +poolp entry, the nextpool and prevpool members of a pool_header. The +excruciating initialization code below fools C so that + + usedpool[i+i] + +"acts like" a genuine poolp, but only so long as you only reference its +nextpool and prevpool members. The "- 2*sizeof(pymem_block *)" gibberish is +compensating for that a pool_header's nextpool and prevpool members +immediately follow a pool_header's first two members: + + union { pymem_block *_padding; + uint count; } ref; + pymem_block *freeblock; + +each of which consume sizeof(pymem_block *) bytes. So what usedpools[i+i] really +contains is a fudged-up pointer p such that *if* C believes it's a poolp +pointer, then p->nextpool and p->prevpool are both p (meaning that the headed +circular list is empty). + +It's unclear why the usedpools setup is so convoluted. It could be to +minimize the amount of cache required to hold this heavily-referenced table +(which only *needs* the two interpool pointer members of a pool_header). OTOH, +referencing code has to remember to "double the index" and doing so isn't +free, usedpools[0] isn't a strictly legal pointer, and we're crucially relying +on that C doesn't insert any padding anywhere in a pool_header at or before +the prevpool member. +**************************************************************************** */ + +#define OBMALLOC_USED_POOLS_SIZE (2 * ((NB_SMALL_SIZE_CLASSES + 7) / 8) * 8) + +struct _obmalloc_pools { + poolp used[OBMALLOC_USED_POOLS_SIZE]; +}; + + +/*========================================================================== +Arena management. + +`arenas` is a vector of arena_objects. It contains maxarenas entries, some of +which may not be currently used (== they're arena_objects that aren't +currently associated with an allocated arena). Note that arenas proper are +separately malloc'ed. + +Prior to Python 2.5, arenas were never free()'ed. Starting with Python 2.5, +we do try to free() arenas, and use some mild heuristic strategies to increase +the likelihood that arenas eventually can be freed. + +unused_arena_objects + + This is a singly-linked list of the arena_objects that are currently not + being used (no arena is associated with them). Objects are taken off the + head of the list in new_arena(), and are pushed on the head of the list in + PyObject_Free() when the arena is empty. Key invariant: an arena_object + is on this list if and only if its .address member is 0. + +usable_arenas + + This is a doubly-linked list of the arena_objects associated with arenas + that have pools available. These pools are either waiting to be reused, + or have not been used before. The list is sorted to have the most- + allocated arenas first (ascending order based on the nfreepools member). + This means that the next allocation will come from a heavily used arena, + which gives the nearly empty arenas a chance to be returned to the system. + In my unscientific tests this dramatically improved the number of arenas + that could be freed. + +Note that an arena_object associated with an arena all of whose pools are +currently in use isn't on either list. + +Changed in Python 3.8: keeping usable_arenas sorted by number of free pools +used to be done by one-at-a-time linear search when an arena's number of +free pools changed. That could, overall, consume time quadratic in the +number of arenas. That didn't really matter when there were only a few +hundred arenas (typical!), but could be a timing disaster when there were +hundreds of thousands. See bpo-37029. + +Now we have a vector of "search fingers" to eliminate the need to search: +nfp2lasta[nfp] returns the last ("rightmost") arena in usable_arenas +with nfp free pools. This is NULL if and only if there is no arena with +nfp free pools in usable_arenas. +*/ + +/* How many arena_objects do we initially allocate? + * 16 = can allocate 16 arenas = 16 * ARENA_SIZE = 4MB before growing the + * `arenas` vector. + */ +#define INITIAL_ARENA_OBJECTS 16 + +struct _obmalloc_mgmt { + /* Array of objects used to track chunks of memory (arenas). */ + struct arena_object* arenas; + /* Number of slots currently allocated in the `arenas` vector. */ + uint maxarenas; + + /* The head of the singly-linked, NULL-terminated list of available + * arena_objects. + */ + struct arena_object* unused_arena_objects; + + /* The head of the doubly-linked, NULL-terminated at each end, list of + * arena_objects associated with arenas that have pools available. + */ + struct arena_object* usable_arenas; + + /* nfp2lasta[nfp] is the last arena in usable_arenas with nfp free pools */ + struct arena_object* nfp2lasta[MAX_POOLS_IN_ARENA + 1]; + + /* Number of arenas allocated that haven't been free()'d. */ + size_t narenas_currently_allocated; + + /* Total number of times malloc() called to allocate an arena. */ + size_t ntimes_arena_allocated; + /* High water mark (max value ever seen) for narenas_currently_allocated. */ + size_t narenas_highwater; + + Py_ssize_t raw_allocated_blocks; +}; + + +#if WITH_PYMALLOC_RADIX_TREE +/*==========================================================================*/ +/* radix tree for tracking arena usage. If enabled, used to implement + address_in_range(). + + memory address bit allocation for keys + + 64-bit pointers, IGNORE_BITS=0 and 2^20 arena size: + 15 -> MAP_TOP_BITS + 15 -> MAP_MID_BITS + 14 -> MAP_BOT_BITS + 20 -> ideal aligned arena + ---- + 64 + + 64-bit pointers, IGNORE_BITS=16, and 2^20 arena size: + 16 -> IGNORE_BITS + 10 -> MAP_TOP_BITS + 10 -> MAP_MID_BITS + 8 -> MAP_BOT_BITS + 20 -> ideal aligned arena + ---- + 64 + + 32-bit pointers and 2^18 arena size: + 14 -> MAP_BOT_BITS + 18 -> ideal aligned arena + ---- + 32 + +*/ + +#if SIZEOF_VOID_P == 8 + +/* number of bits in a pointer */ +#define POINTER_BITS 64 + +/* High bits of memory addresses that will be ignored when indexing into the + * radix tree. Setting this to zero is the safe default. For most 64-bit + * machines, setting this to 16 would be safe. The kernel would not give + * user-space virtual memory addresses that have significant information in + * those high bits. The main advantage to setting IGNORE_BITS > 0 is that less + * virtual memory will be used for the top and middle radix tree arrays. Those + * arrays are allocated in the BSS segment and so will typically consume real + * memory only if actually accessed. + */ +#define IGNORE_BITS 0 + +/* use the top and mid layers of the radix tree */ +#define USE_INTERIOR_NODES + +#elif SIZEOF_VOID_P == 4 + +#define POINTER_BITS 32 +#define IGNORE_BITS 0 + +#else + + /* Currently this code works for 64-bit or 32-bit pointers only. */ +#error "obmalloc radix tree requires 64-bit or 32-bit pointers." + +#endif /* SIZEOF_VOID_P */ + +/* arena_coverage_t members require this to be true */ +#if ARENA_BITS >= 32 +# error "arena size must be < 2^32" +#endif + +/* the lower bits of the address that are not ignored */ +#define ADDRESS_BITS (POINTER_BITS - IGNORE_BITS) + +#ifdef USE_INTERIOR_NODES +/* number of bits used for MAP_TOP and MAP_MID nodes */ +#define INTERIOR_BITS ((ADDRESS_BITS - ARENA_BITS + 2) / 3) +#else +#define INTERIOR_BITS 0 +#endif + +#define MAP_TOP_BITS INTERIOR_BITS +#define MAP_TOP_LENGTH (1 << MAP_TOP_BITS) +#define MAP_TOP_MASK (MAP_TOP_LENGTH - 1) + +#define MAP_MID_BITS INTERIOR_BITS +#define MAP_MID_LENGTH (1 << MAP_MID_BITS) +#define MAP_MID_MASK (MAP_MID_LENGTH - 1) + +#define MAP_BOT_BITS (ADDRESS_BITS - ARENA_BITS - 2*INTERIOR_BITS) +#define MAP_BOT_LENGTH (1 << MAP_BOT_BITS) +#define MAP_BOT_MASK (MAP_BOT_LENGTH - 1) + +#define MAP_BOT_SHIFT ARENA_BITS +#define MAP_MID_SHIFT (MAP_BOT_BITS + MAP_BOT_SHIFT) +#define MAP_TOP_SHIFT (MAP_MID_BITS + MAP_MID_SHIFT) + +#define AS_UINT(p) ((uintptr_t)(p)) +#define MAP_BOT_INDEX(p) ((AS_UINT(p) >> MAP_BOT_SHIFT) & MAP_BOT_MASK) +#define MAP_MID_INDEX(p) ((AS_UINT(p) >> MAP_MID_SHIFT) & MAP_MID_MASK) +#define MAP_TOP_INDEX(p) ((AS_UINT(p) >> MAP_TOP_SHIFT) & MAP_TOP_MASK) + +#if IGNORE_BITS > 0 +/* Return the ignored part of the pointer address. Those bits should be same + * for all valid pointers if IGNORE_BITS is set correctly. + */ +#define HIGH_BITS(p) (AS_UINT(p) >> ADDRESS_BITS) +#else +#define HIGH_BITS(p) 0 +#endif + + +/* This is the leaf of the radix tree. See arena_map_mark_used() for the + * meaning of these members. */ +typedef struct { + int32_t tail_hi; + int32_t tail_lo; +} arena_coverage_t; + +typedef struct arena_map_bot { + /* The members tail_hi and tail_lo are accessed together. So, it + * better to have them as an array of structs, rather than two + * arrays. + */ + arena_coverage_t arenas[MAP_BOT_LENGTH]; +} arena_map_bot_t; + +#ifdef USE_INTERIOR_NODES +typedef struct arena_map_mid { + struct arena_map_bot *ptrs[MAP_MID_LENGTH]; +} arena_map_mid_t; + +typedef struct arena_map_top { + struct arena_map_mid *ptrs[MAP_TOP_LENGTH]; +} arena_map_top_t; +#endif + +struct _obmalloc_usage { + /* The root of radix tree. Note that by initializing like this, the memory + * should be in the BSS. The OS will only memory map pages as the MAP_MID + * nodes get used (OS pages are demand loaded as needed). + */ +#ifdef USE_INTERIOR_NODES + arena_map_top_t arena_map_root; + /* accounting for number of used interior nodes */ + int arena_map_mid_count; + int arena_map_bot_count; +#else + arena_map_bot_t arena_map_root; +#endif +}; + +#endif /* WITH_PYMALLOC_RADIX_TREE */ + + +struct _obmalloc_state { + struct _obmalloc_pools pools; + struct _obmalloc_mgmt mgmt; + struct _obmalloc_usage usage; +}; + + +#undef uint + + +/* Allocate memory directly from the O/S virtual memory system, + * where supported. Otherwise fallback on malloc */ +void *_PyObject_VirtualAlloc(size_t size); +void _PyObject_VirtualFree(void *, size_t size); + + +/* This function returns the number of allocated memory blocks, regardless of size */ +PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedBlocks(void); + + +#ifdef WITH_PYMALLOC +// Export the symbol for the 3rd party guppy3 project +PyAPI_FUNC(int) _PyObject_DebugMallocStats(FILE *out); +#endif + + +#ifdef __cplusplus +} +#endif +#endif // !Py_INTERNAL_OBMALLOC_H diff --git a/Include/internal/pycore_obmalloc_init.h b/Include/internal/pycore_obmalloc_init.h new file mode 100644 index 00000000000000..c0fb057d06652b --- /dev/null +++ b/Include/internal/pycore_obmalloc_init.h @@ -0,0 +1,68 @@ +#ifndef Py_INTERNAL_OBMALLOC_INIT_H +#define Py_INTERNAL_OBMALLOC_INIT_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +/****************************************************/ +/* the default object allocator's state initializer */ + +#define PTA(pools, x) \ + ((poolp )((uint8_t *)&(pools.used[2*(x)]) - 2*sizeof(pymem_block *))) +#define PT(p, x) PTA(p, x), PTA(p, x) + +#define PT_8(p, start) \ + PT(p, start), \ + PT(p, start+1), \ + PT(p, start+2), \ + PT(p, start+3), \ + PT(p, start+4), \ + PT(p, start+5), \ + PT(p, start+6), \ + PT(p, start+7) + +#if NB_SMALL_SIZE_CLASSES <= 8 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0) } +#elif NB_SMALL_SIZE_CLASSES <= 16 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8) } +#elif NB_SMALL_SIZE_CLASSES <= 24 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16) } +#elif NB_SMALL_SIZE_CLASSES <= 32 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24) } +#elif NB_SMALL_SIZE_CLASSES <= 40 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24), PT_8(p, 32) } +#elif NB_SMALL_SIZE_CLASSES <= 48 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24), PT_8(p, 32), PT_8(p, 40) } +#elif NB_SMALL_SIZE_CLASSES <= 56 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24), PT_8(p, 32), PT_8(p, 40), PT_8(p, 48) } +#elif NB_SMALL_SIZE_CLASSES <= 64 +# define _obmalloc_pools_INIT(p) \ + { PT_8(p, 0), PT_8(p, 8), PT_8(p, 16), PT_8(p, 24), PT_8(p, 32), PT_8(p, 40), PT_8(p, 48), PT_8(p, 56) } +#else +# error "NB_SMALL_SIZE_CLASSES should be less than 64" +#endif + +#define _obmalloc_state_INIT(obmalloc) \ + { \ + .pools = { \ + .used = _obmalloc_pools_INIT(obmalloc.pools), \ + }, \ + } + + +#ifdef __cplusplus +} +#endif +#endif // !Py_INTERNAL_OBMALLOC_INIT_H diff --git a/Include/internal/pycore_opcode.h b/Include/internal/pycore_opcode.h index 15925511cc1f41..0d31ca166a7d2e 100644 --- a/Include/internal/pycore_opcode.h +++ b/Include/internal/pycore_opcode.h @@ -1,4 +1,4 @@ -// Auto-generated by Tools/scripts/generate_opcode_h.py from Lib/opcode.py +// Auto-generated by Tools/build/generate_opcode_h.py from Lib/opcode.py #ifndef Py_INTERNAL_OPCODE_H #define Py_INTERNAL_OPCODE_H @@ -58,7 +58,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [BEFORE_ASYNC_WITH] = BEFORE_ASYNC_WITH, [BEFORE_WITH] = BEFORE_WITH, [BINARY_OP] = BINARY_OP, - [BINARY_OP_ADAPTIVE] = BINARY_OP, [BINARY_OP_ADD_FLOAT] = BINARY_OP, [BINARY_OP_ADD_INT] = BINARY_OP, [BINARY_OP_ADD_UNICODE] = BINARY_OP, @@ -69,7 +68,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [BINARY_OP_SUBTRACT_INT] = BINARY_OP, [BINARY_SLICE] = BINARY_SLICE, [BINARY_SUBSCR] = BINARY_SUBSCR, - [BINARY_SUBSCR_ADAPTIVE] = BINARY_SUBSCR, [BINARY_SUBSCR_DICT] = BINARY_SUBSCR, [BINARY_SUBSCR_GETITEM] = BINARY_SUBSCR, [BINARY_SUBSCR_LIST_INT] = BINARY_SUBSCR, @@ -83,7 +81,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [BUILD_TUPLE] = BUILD_TUPLE, [CACHE] = CACHE, [CALL] = CALL, - [CALL_ADAPTIVE] = CALL, [CALL_BOUND_METHOD_EXACT_ARGS] = CALL, [CALL_BUILTIN_CLASS] = CALL, [CALL_BUILTIN_FAST_WITH_KEYWORDS] = CALL, @@ -106,7 +103,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [CHECK_EXC_MATCH] = CHECK_EXC_MATCH, [CLEANUP_THROW] = CLEANUP_THROW, [COMPARE_OP] = COMPARE_OP, - [COMPARE_OP_ADAPTIVE] = COMPARE_OP, [COMPARE_OP_FLOAT_JUMP] = COMPARE_OP, [COMPARE_OP_INT_JUMP] = COMPARE_OP, [COMPARE_OP_STR_JUMP] = COMPARE_OP, @@ -122,11 +118,11 @@ const uint8_t _PyOpcode_Deopt[256] = { [DICT_MERGE] = DICT_MERGE, [DICT_UPDATE] = DICT_UPDATE, [END_ASYNC_FOR] = END_ASYNC_FOR, + [END_FOR] = END_FOR, [EXTENDED_ARG] = EXTENDED_ARG, - [EXTENDED_ARG_QUICK] = EXTENDED_ARG, [FORMAT_VALUE] = FORMAT_VALUE, [FOR_ITER] = FOR_ITER, - [FOR_ITER_ADAPTIVE] = FOR_ITER, + [FOR_ITER_GEN] = FOR_ITER, [FOR_ITER_LIST] = FOR_ITER, [FOR_ITER_RANGE] = FOR_ITER, [GET_AITER] = GET_AITER, @@ -138,10 +134,10 @@ const uint8_t _PyOpcode_Deopt[256] = { [IMPORT_FROM] = IMPORT_FROM, [IMPORT_NAME] = IMPORT_NAME, [IMPORT_STAR] = IMPORT_STAR, + [INTERPRETER_EXIT] = INTERPRETER_EXIT, [IS_OP] = IS_OP, [JUMP_BACKWARD] = JUMP_BACKWARD, [JUMP_BACKWARD_NO_INTERRUPT] = JUMP_BACKWARD_NO_INTERRUPT, - [JUMP_BACKWARD_QUICK] = JUMP_BACKWARD, [JUMP_FORWARD] = JUMP_FORWARD, [JUMP_IF_FALSE_OR_POP] = JUMP_IF_FALSE_OR_POP, [JUMP_IF_TRUE_OR_POP] = JUMP_IF_TRUE_OR_POP, @@ -151,7 +147,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [LIST_TO_TUPLE] = LIST_TO_TUPLE, [LOAD_ASSERTION_ERROR] = LOAD_ASSERTION_ERROR, [LOAD_ATTR] = LOAD_ATTR, - [LOAD_ATTR_ADAPTIVE] = LOAD_ATTR, [LOAD_ATTR_CLASS] = LOAD_ATTR, [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = LOAD_ATTR, [LOAD_ATTR_INSTANCE_VALUE] = LOAD_ATTR, @@ -174,7 +169,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [LOAD_FAST__LOAD_CONST] = LOAD_FAST, [LOAD_FAST__LOAD_FAST] = LOAD_FAST, [LOAD_GLOBAL] = LOAD_GLOBAL, - [LOAD_GLOBAL_ADAPTIVE] = LOAD_GLOBAL, [LOAD_GLOBAL_BUILTIN] = LOAD_GLOBAL, [LOAD_GLOBAL_MODULE] = LOAD_GLOBAL, [LOAD_NAME] = LOAD_NAME, @@ -199,15 +193,14 @@ const uint8_t _PyOpcode_Deopt[256] = { [RAISE_VARARGS] = RAISE_VARARGS, [RERAISE] = RERAISE, [RESUME] = RESUME, - [RESUME_QUICK] = RESUME, [RETURN_GENERATOR] = RETURN_GENERATOR, [RETURN_VALUE] = RETURN_VALUE, [SEND] = SEND, [SETUP_ANNOTATIONS] = SETUP_ANNOTATIONS, [SET_ADD] = SET_ADD, [SET_UPDATE] = SET_UPDATE, + [STOPITERATION_ERROR] = STOPITERATION_ERROR, [STORE_ATTR] = STORE_ATTR, - [STORE_ATTR_ADAPTIVE] = STORE_ATTR, [STORE_ATTR_INSTANCE_VALUE] = STORE_ATTR, [STORE_ATTR_SLOT] = STORE_ATTR, [STORE_ATTR_WITH_HINT] = STORE_ATTR, @@ -219,7 +212,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [STORE_NAME] = STORE_NAME, [STORE_SLICE] = STORE_SLICE, [STORE_SUBSCR] = STORE_SUBSCR, - [STORE_SUBSCR_ADAPTIVE] = STORE_SUBSCR, [STORE_SUBSCR_DICT] = STORE_SUBSCR, [STORE_SUBSCR_LIST_INT] = STORE_SUBSCR, [SWAP] = SWAP, @@ -229,7 +221,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [UNARY_POSITIVE] = UNARY_POSITIVE, [UNPACK_EX] = UNPACK_EX, [UNPACK_SEQUENCE] = UNPACK_SEQUENCE, - [UNPACK_SEQUENCE_ADAPTIVE] = UNPACK_SEQUENCE, [UNPACK_SEQUENCE_LIST] = UNPACK_SEQUENCE, [UNPACK_SEQUENCE_TUPLE] = UNPACK_SEQUENCE, [UNPACK_SEQUENCE_TWO_TUPLE] = UNPACK_SEQUENCE, @@ -243,42 +234,41 @@ static const char *const _PyOpcode_OpName[263] = { [CACHE] = "CACHE", [POP_TOP] = "POP_TOP", [PUSH_NULL] = "PUSH_NULL", - [BINARY_OP_ADAPTIVE] = "BINARY_OP_ADAPTIVE", + [INTERPRETER_EXIT] = "INTERPRETER_EXIT", + [END_FOR] = "END_FOR", [BINARY_OP_ADD_FLOAT] = "BINARY_OP_ADD_FLOAT", [BINARY_OP_ADD_INT] = "BINARY_OP_ADD_INT", [BINARY_OP_ADD_UNICODE] = "BINARY_OP_ADD_UNICODE", [BINARY_OP_INPLACE_ADD_UNICODE] = "BINARY_OP_INPLACE_ADD_UNICODE", - [BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT", [NOP] = "NOP", [UNARY_POSITIVE] = "UNARY_POSITIVE", [UNARY_NEGATIVE] = "UNARY_NEGATIVE", [UNARY_NOT] = "UNARY_NOT", + [BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT", [BINARY_OP_MULTIPLY_INT] = "BINARY_OP_MULTIPLY_INT", - [BINARY_OP_SUBTRACT_FLOAT] = "BINARY_OP_SUBTRACT_FLOAT", [UNARY_INVERT] = "UNARY_INVERT", + [BINARY_OP_SUBTRACT_FLOAT] = "BINARY_OP_SUBTRACT_FLOAT", [BINARY_OP_SUBTRACT_INT] = "BINARY_OP_SUBTRACT_INT", - [BINARY_SUBSCR_ADAPTIVE] = "BINARY_SUBSCR_ADAPTIVE", [BINARY_SUBSCR_DICT] = "BINARY_SUBSCR_DICT", [BINARY_SUBSCR_GETITEM] = "BINARY_SUBSCR_GETITEM", [BINARY_SUBSCR_LIST_INT] = "BINARY_SUBSCR_LIST_INT", [BINARY_SUBSCR_TUPLE_INT] = "BINARY_SUBSCR_TUPLE_INT", - [CALL_ADAPTIVE] = "CALL_ADAPTIVE", [CALL_PY_EXACT_ARGS] = "CALL_PY_EXACT_ARGS", [CALL_PY_WITH_DEFAULTS] = "CALL_PY_WITH_DEFAULTS", + [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", [BINARY_SUBSCR] = "BINARY_SUBSCR", [BINARY_SLICE] = "BINARY_SLICE", [STORE_SLICE] = "STORE_SLICE", - [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", [CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS", + [CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS", [GET_LEN] = "GET_LEN", [MATCH_MAPPING] = "MATCH_MAPPING", [MATCH_SEQUENCE] = "MATCH_SEQUENCE", [MATCH_KEYS] = "MATCH_KEYS", - [CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS", + [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS", [PUSH_EXC_INFO] = "PUSH_EXC_INFO", [CHECK_EXC_MATCH] = "CHECK_EXC_MATCH", [CHECK_EG_MATCH] = "CHECK_EG_MATCH", - [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS", [CALL_NO_KW_BUILTIN_FAST] = "CALL_NO_KW_BUILTIN_FAST", [CALL_NO_KW_BUILTIN_O] = "CALL_NO_KW_BUILTIN_O", [CALL_NO_KW_ISINSTANCE] = "CALL_NO_KW_ISINSTANCE", @@ -289,6 +279,7 @@ static const char *const _PyOpcode_OpName[263] = { [CALL_NO_KW_METHOD_DESCRIPTOR_O] = "CALL_NO_KW_METHOD_DESCRIPTOR_O", [CALL_NO_KW_STR_1] = "CALL_NO_KW_STR_1", [CALL_NO_KW_TUPLE_1] = "CALL_NO_KW_TUPLE_1", + [CALL_NO_KW_TYPE_1] = "CALL_NO_KW_TYPE_1", [WITH_EXCEPT_START] = "WITH_EXCEPT_START", [GET_AITER] = "GET_AITER", [GET_ANEXT] = "GET_ANEXT", @@ -296,37 +287,37 @@ static const char *const _PyOpcode_OpName[263] = { [BEFORE_WITH] = "BEFORE_WITH", [END_ASYNC_FOR] = "END_ASYNC_FOR", [CLEANUP_THROW] = "CLEANUP_THROW", - [CALL_NO_KW_TYPE_1] = "CALL_NO_KW_TYPE_1", - [COMPARE_OP_ADAPTIVE] = "COMPARE_OP_ADAPTIVE", [COMPARE_OP_FLOAT_JUMP] = "COMPARE_OP_FLOAT_JUMP", [COMPARE_OP_INT_JUMP] = "COMPARE_OP_INT_JUMP", - [STORE_SUBSCR] = "STORE_SUBSCR", - [DELETE_SUBSCR] = "DELETE_SUBSCR", [COMPARE_OP_STR_JUMP] = "COMPARE_OP_STR_JUMP", - [EXTENDED_ARG_QUICK] = "EXTENDED_ARG_QUICK", - [FOR_ITER_ADAPTIVE] = "FOR_ITER_ADAPTIVE", [FOR_ITER_LIST] = "FOR_ITER_LIST", + [STORE_SUBSCR] = "STORE_SUBSCR", + [DELETE_SUBSCR] = "DELETE_SUBSCR", [FOR_ITER_RANGE] = "FOR_ITER_RANGE", - [JUMP_BACKWARD_QUICK] = "JUMP_BACKWARD_QUICK", + [STOPITERATION_ERROR] = "STOPITERATION_ERROR", + [FOR_ITER_GEN] = "FOR_ITER_GEN", + [LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS", + [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", + [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE", [GET_ITER] = "GET_ITER", [GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER", [PRINT_EXPR] = "PRINT_EXPR", [LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS", - [LOAD_ATTR_ADAPTIVE] = "LOAD_ATTR_ADAPTIVE", - [LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS", - [LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR", - [RETURN_GENERATOR] = "RETURN_GENERATOR", - [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", - [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE", [LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE", [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY", + [LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR", + [RETURN_GENERATOR] = "RETURN_GENERATOR", [LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT", [LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT", + [LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT", + [LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT", + [LOAD_ATTR_METHOD_WITH_DICT] = "LOAD_ATTR_METHOD_WITH_DICT", + [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES", [LIST_TO_TUPLE] = "LIST_TO_TUPLE", [RETURN_VALUE] = "RETURN_VALUE", [IMPORT_STAR] = "IMPORT_STAR", [SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS", - [LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT", + [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST", [ASYNC_GEN_WRAP] = "ASYNC_GEN_WRAP", [PREP_RERAISE_STAR] = "PREP_RERAISE_STAR", [POP_EXCEPT] = "POP_EXCEPT", @@ -353,7 +344,7 @@ static const char *const _PyOpcode_OpName[263] = { [JUMP_FORWARD] = "JUMP_FORWARD", [JUMP_IF_FALSE_OR_POP] = "JUMP_IF_FALSE_OR_POP", [JUMP_IF_TRUE_OR_POP] = "JUMP_IF_TRUE_OR_POP", - [LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT", + [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST", [POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE", [POP_JUMP_IF_TRUE] = "POP_JUMP_IF_TRUE", [LOAD_GLOBAL] = "LOAD_GLOBAL", @@ -361,7 +352,7 @@ static const char *const _PyOpcode_OpName[263] = { [CONTAINS_OP] = "CONTAINS_OP", [RERAISE] = "RERAISE", [COPY] = "COPY", - [LOAD_ATTR_METHOD_WITH_DICT] = "LOAD_ATTR_METHOD_WITH_DICT", + [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST", [BINARY_OP] = "BINARY_OP", [SEND] = "SEND", [LOAD_FAST] = "LOAD_FAST", @@ -381,9 +372,9 @@ static const char *const _PyOpcode_OpName[263] = { [STORE_DEREF] = "STORE_DEREF", [DELETE_DEREF] = "DELETE_DEREF", [JUMP_BACKWARD] = "JUMP_BACKWARD", - [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES", + [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN", [CALL_FUNCTION_EX] = "CALL_FUNCTION_EX", - [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST", + [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE", [EXTENDED_ARG] = "EXTENDED_ARG", [LIST_APPEND] = "LIST_APPEND", [SET_ADD] = "SET_ADD", @@ -393,34 +384,34 @@ static const char *const _PyOpcode_OpName[263] = { [YIELD_VALUE] = "YIELD_VALUE", [RESUME] = "RESUME", [MATCH_CLASS] = "MATCH_CLASS", - [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST", - [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST", + [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", + [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT", [FORMAT_VALUE] = "FORMAT_VALUE", [BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP", [BUILD_STRING] = "BUILD_STRING", - [LOAD_GLOBAL_ADAPTIVE] = "LOAD_GLOBAL_ADAPTIVE", - [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN", - [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE", - [RESUME_QUICK] = "RESUME_QUICK", - [LIST_EXTEND] = "LIST_EXTEND", - [SET_UPDATE] = "SET_UPDATE", - [DICT_MERGE] = "DICT_MERGE", - [DICT_UPDATE] = "DICT_UPDATE", - [STORE_ATTR_ADAPTIVE] = "STORE_ATTR_ADAPTIVE", - [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", - [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT", [STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT", [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST", - [CALL] = "CALL", - [KW_NAMES] = "KW_NAMES", [STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST", - [STORE_SUBSCR_ADAPTIVE] = "STORE_SUBSCR_ADAPTIVE", [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT", + [LIST_EXTEND] = "LIST_EXTEND", + [SET_UPDATE] = "SET_UPDATE", + [DICT_MERGE] = "DICT_MERGE", + [DICT_UPDATE] = "DICT_UPDATE", [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT", - [UNPACK_SEQUENCE_ADAPTIVE] = "UNPACK_SEQUENCE_ADAPTIVE", [UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST", [UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE", [UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE", + [170] = "<170>", + [CALL] = "CALL", + [KW_NAMES] = "KW_NAMES", + [173] = "<173>", + [174] = "<174>", + [175] = "<175>", + [176] = "<176>", + [177] = "<177>", + [178] = "<178>", + [179] = "<179>", + [180] = "<180>", [181] = "<181>", [182] = "<182>", [183] = "<183>", @@ -507,6 +498,15 @@ static const char *const _PyOpcode_OpName[263] = { #endif #define EXTRA_CASES \ + case 170: \ + case 173: \ + case 174: \ + case 175: \ + case 176: \ + case 177: \ + case 178: \ + case 179: \ + case 180: \ case 181: \ case 182: \ case 183: \ diff --git a/Include/internal/pycore_pyhash.h b/Include/internal/pycore_pyhash.h index a229f8d8b7f0a2..34dfa53771288e 100644 --- a/Include/internal/pycore_pyhash.h +++ b/Include/internal/pycore_pyhash.h @@ -5,6 +5,36 @@ # error "this header requires Py_BUILD_CORE define" #endif -uint64_t _Py_KeyedHash(uint64_t, const char *, Py_ssize_t); +struct pyhash_runtime_state { + struct { +#ifndef MS_WINDOWS + int fd; + dev_t st_dev; + ino_t st_ino; +#else + // This is a placeholder so the struct isn't empty on Windows. + int _not_used; +#endif + } urandom_cache; +}; + +#ifndef MS_WINDOWS +# define _py_urandom_cache_INIT \ + { \ + .fd = -1, \ + } +#else +# define _py_urandom_cache_INIT {0} #endif + +#define pyhash_state_INIT \ + { \ + .urandom_cache = _py_urandom_cache_INIT, \ + } + + +uint64_t _Py_KeyedHash(uint64_t, const char *, Py_ssize_t); + + +#endif // Py_INTERNAL_HASH_H diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index b4718b8ade2354..4c0ffa7a9b1ab7 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -14,10 +14,6 @@ extern "C" { struct _PyArgv; struct pyruntimestate; -/* True if the main interpreter thread exited due to an unhandled - * KeyboardInterrupt exception, suggesting the user pressed ^C. */ -PyAPI_DATA(int) _Py_UnhandledKeyboardInterrupt; - extern int _Py_SetFileSystemEncoding( const char *encoding, const char *errors); @@ -33,6 +29,8 @@ PyAPI_FUNC(int) _Py_IsLocaleCoercionTarget(const char *ctype_loc); /* Various one-time initializers */ +extern void _Py_InitVersion(void); +extern PyStatus _PyImport_Init(void); extern PyStatus _PyFaulthandler_Init(int enable); extern int _PyTraceMalloc_Init(int enable); extern PyObject * _PyBuiltin_Init(PyInterpreterState *interp); diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index b9eea9d4b30ad1..5749af7465f6f0 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -11,6 +11,27 @@ extern "C" { #include "pymem.h" // PyMemAllocatorName +typedef struct { + /* We tag each block with an API ID in order to tag API violations */ + char api_id; + PyMemAllocatorEx alloc; +} debug_alloc_api_t; + +struct _pymem_allocators { + struct { + PyMemAllocatorEx raw; + PyMemAllocatorEx mem; + PyMemAllocatorEx obj; + } standard; + struct { + debug_alloc_api_t raw; + debug_alloc_api_t mem; + debug_alloc_api_t obj; + } debug; + PyObjectArenaAllocator obj_arena; +}; + + /* Set the memory allocator of the specified domain to the default. Save the old allocator into *old_alloc if it's non-NULL. Return on success, or return -1 if the domain is unknown. */ @@ -92,22 +113,6 @@ struct _PyTraceMalloc_Config { .tracing = 0, \ .max_nframe = 1} -PyAPI_DATA(struct _PyTraceMalloc_Config) _Py_tracemalloc_config; - -/* Allocate memory directly from the O/S virtual memory system, - * where supported. Otherwise fallback on malloc */ -void *_PyObject_VirtualAlloc(size_t size); -void _PyObject_VirtualFree(void *, size_t size); - -/* This function returns the number of allocated memory blocks, regardless of size */ -PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedBlocks(void); - -/* Macros */ -#ifdef WITH_PYMALLOC -// Export the symbol for the 3rd party guppy3 project -PyAPI_FUNC(int) _PyObject_DebugMallocStats(FILE *out); -#endif - #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_pymem_init.h b/Include/internal/pycore_pymem_init.h new file mode 100644 index 00000000000000..78232738cb09d5 --- /dev/null +++ b/Include/internal/pycore_pymem_init.h @@ -0,0 +1,85 @@ +#ifndef Py_INTERNAL_PYMEM_INIT_H +#define Py_INTERNAL_PYMEM_INIT_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +#include "pycore_pymem.h" + + +/********************************/ +/* the allocators' initializers */ + +extern void * _PyMem_RawMalloc(void *, size_t); +extern void * _PyMem_RawCalloc(void *, size_t, size_t); +extern void * _PyMem_RawRealloc(void *, void *, size_t); +extern void _PyMem_RawFree(void *, void *); +#define PYRAW_ALLOC {NULL, _PyMem_RawMalloc, _PyMem_RawCalloc, _PyMem_RawRealloc, _PyMem_RawFree} + +#ifdef WITH_PYMALLOC +extern void* _PyObject_Malloc(void *, size_t); +extern void* _PyObject_Calloc(void *, size_t, size_t); +extern void _PyObject_Free(void *, void *); +extern void* _PyObject_Realloc(void *, void *, size_t); +# define PYOBJ_ALLOC {NULL, _PyObject_Malloc, _PyObject_Calloc, _PyObject_Realloc, _PyObject_Free} +#else +# define PYOBJ_ALLOC PYRAW_ALLOC +#endif // WITH_PYMALLOC + +#define PYMEM_ALLOC PYOBJ_ALLOC + +extern void* _PyMem_DebugRawMalloc(void *, size_t); +extern void* _PyMem_DebugRawCalloc(void *, size_t, size_t); +extern void* _PyMem_DebugRawRealloc(void *, void *, size_t); +extern void _PyMem_DebugRawFree(void *, void *); + +extern void* _PyMem_DebugMalloc(void *, size_t); +extern void* _PyMem_DebugCalloc(void *, size_t, size_t); +extern void* _PyMem_DebugRealloc(void *, void *, size_t); +extern void _PyMem_DebugFree(void *, void *); + +#define PYDBGRAW_ALLOC(runtime) \ + {&(runtime).allocators.debug.raw, _PyMem_DebugRawMalloc, _PyMem_DebugRawCalloc, _PyMem_DebugRawRealloc, _PyMem_DebugRawFree} +#define PYDBGMEM_ALLOC(runtime) \ + {&(runtime).allocators.debug.mem, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} +#define PYDBGOBJ_ALLOC(runtime) \ + {&(runtime).allocators.debug.obj, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} + +extern void * _PyMem_ArenaAlloc(void *, size_t); +extern void _PyMem_ArenaFree(void *, void *, size_t); + +#ifdef Py_DEBUG +# define _pymem_allocators_standard_INIT(runtime) \ + { \ + PYDBGRAW_ALLOC(runtime), \ + PYDBGMEM_ALLOC(runtime), \ + PYDBGOBJ_ALLOC(runtime), \ + } +#else +# define _pymem_allocators_standard_INIT(runtime) \ + { \ + PYRAW_ALLOC, \ + PYMEM_ALLOC, \ + PYOBJ_ALLOC, \ + } +#endif + +#define _pymem_allocators_debug_INIT \ + { \ + {'r', PYRAW_ALLOC}, \ + {'m', PYMEM_ALLOC}, \ + {'o', PYOBJ_ALLOC}, \ + } + +# define _pymem_allocators_obj_arena_INIT \ + { NULL, _PyMem_ArenaAlloc, _PyMem_ArenaFree } + + +#ifdef __cplusplus +} +#endif +#endif // !Py_INTERNAL_PYMEM_INIT_H diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index d1fbc09f1ea206..c1829cb1bdadeb 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -9,13 +9,22 @@ extern "C" { #endif #include "pycore_atomic.h" /* _Py_atomic_address */ +#include "pycore_dict_state.h" // struct _Py_dict_runtime_state +#include "pycore_dtoa.h" // struct _dtoa_runtime_state +#include "pycore_floatobject.h" // struct _Py_float_runtime_state +#include "pycore_function.h" // struct _func_runtime_state #include "pycore_gil.h" // struct _gil_runtime_state #include "pycore_global_objects.h" // struct _Py_global_objects +#include "pycore_import.h" // struct _import_runtime_state #include "pycore_interp.h" // PyInterpreterState +#include "pycore_pymem.h" // struct _pymem_allocators +#include "pycore_pyhash.h" // struct pyhash_runtime_state +#include "pycore_obmalloc.h" // struct obmalloc_state #include "pycore_unicodeobject.h" // struct _Py_unicode_runtime_ids struct _getargs_runtime_state { - PyThread_type_lock mutex; + PyThread_type_lock mutex; + struct _PyArg_Parser *static_parsers; }; /* ceval state */ @@ -85,6 +94,15 @@ typedef struct pyruntimestate { to access it, don't access it directly. */ _Py_atomic_address _finalizing; + struct _pymem_allocators allocators; + struct _obmalloc_state obmalloc; + struct pyhash_runtime_state pyhash_state; + struct { + /* True if the main interpreter thread exited due to an unhandled + * KeyboardInterrupt exception, suggesting the user pressed ^C. */ + int unhandled_keyboard_interrupt; + } signals; + struct pyinterpreters { PyThread_type_lock mutex; /* The linked list of interpreters, newest first. */ @@ -115,9 +133,15 @@ typedef struct pyruntimestate { void (*exitfuncs[NEXITFUNCS])(void); int nexitfuncs; + struct _import_runtime_state imports; struct _ceval_runtime_state ceval; struct _gilstate_runtime_state gilstate; struct _getargs_runtime_state getargs; + struct { + struct _PyTraceMalloc_Config config; + } tracemalloc; + struct _dtoa_runtime_state dtoa; + struct _fileutils_state fileutils; PyPreConfig preconfig; @@ -127,9 +151,20 @@ typedef struct pyruntimestate { void *open_code_userdata; _Py_AuditHookEntry *audit_hook_head; - struct _Py_unicode_runtime_ids unicode_ids; + struct _Py_float_runtime_state float_state; + struct _Py_unicode_runtime_state unicode_state; + struct _Py_dict_runtime_state dict_state; + struct _py_func_runtime_state func_state; + + struct { + /* Used to set PyTypeObject.tp_version_tag */ + // bpo-42745: next_version_tag remains shared by all interpreters + // because of static types. + unsigned int next_version_tag; + } types; /* All the objects that are shared by the runtime's interpreters. */ + struct _Py_cached_objects cached_objects; struct _Py_global_objects global_objects; /* The following fields are here to avoid allocation during init. diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index 8dd7a3128c6665..ab53876e355fd8 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -9,26 +9,86 @@ extern "C" { #endif #include "pycore_object.h" +#include "pycore_pymem_init.h" +#include "pycore_obmalloc_init.h" /* The static initializers defined here should only be used in the runtime init code (in pystate.c and pylifecycle.c). */ -#define _PyRuntimeState_INIT \ +#define _PyRuntimeState_INIT(runtime) \ { \ + .allocators = { \ + _pymem_allocators_standard_INIT(runtime), \ + _pymem_allocators_debug_INIT, \ + _pymem_allocators_obj_arena_INIT, \ + }, \ + .obmalloc = _obmalloc_state_INIT(runtime.obmalloc), \ + .pyhash_state = pyhash_state_INIT, \ + .interpreters = { \ + /* This prevents interpreters from getting created \ + until _PyInterpreterState_Enable() is called. */ \ + .next_id = -1, \ + }, \ + .imports = { \ + .lock = { \ + .mutex = NULL, \ + .thread = PYTHREAD_INVALID_THREAD_ID, \ + .level = 0, \ + }, \ + .find_and_load = { \ + .header = 1, \ + }, \ + }, \ .gilstate = { \ .check_enabled = 1, \ /* A TSS key must be initialized with Py_tss_NEEDS_INIT \ in accordance with the specification. */ \ .autoTSSkey = Py_tss_NEEDS_INIT, \ }, \ - .interpreters = { \ - /* This prevents interpreters from getting created \ - until _PyInterpreterState_Enable() is called. */ \ - .next_id = -1, \ + .tracemalloc = { \ + .config = _PyTraceMalloc_Config_INIT, \ + }, \ + .dtoa = _dtoa_runtime_state_INIT(runtime), \ + .fileutils = { \ + .force_ascii = -1, \ + }, \ + .float_state = { \ + .float_format = _py_float_format_unknown, \ + .double_format = _py_float_format_unknown, \ + }, \ + .dict_state = { \ + .next_keys_version = 2, \ + }, \ + .func_state = { \ + .next_version = 1, \ + }, \ + .types = { \ + .next_version_tag = 1, \ + }, \ + .global_objects = { \ + .singletons = { \ + .small_ints = _Py_small_ints_INIT, \ + .bytes_empty = _PyBytes_SIMPLE_INIT(0, 0), \ + .bytes_characters = _Py_bytes_characters_INIT, \ + .strings = { \ + .literals = _Py_str_literals_INIT, \ + .identifiers = _Py_str_identifiers_INIT, \ + .ascii = _Py_str_ascii_INIT, \ + .latin1 = _Py_str_latin1_INIT, \ + }, \ + .tuple_empty = { \ + .ob_base = _PyVarObject_IMMORTAL_INIT(&PyTuple_Type, 0) \ + }, \ + .hamt_bitmap_node_empty = { \ + .ob_base = _PyVarObject_IMMORTAL_INIT(&_PyHamt_BitmapNode_Type, 0) \ + }, \ + .context_token_missing = { \ + .ob_base = _PyObject_IMMORTAL_INIT(&_PyContextTokenMissing_Type), \ + }, \ + }, \ }, \ - .global_objects = _Py_global_objects_INIT, \ ._main_interpreter = _PyInterpreterState_INIT, \ } @@ -47,7 +107,6 @@ extern "C" { #define _PyInterpreterState_INIT \ { \ - ._static = 1, \ .id_refcount = -1, \ DLOPENFLAGS_INIT \ .ceval = { \ @@ -62,12 +121,20 @@ extern "C" { { .threshold = 10, }, \ }, \ }, \ + .static_objects = { \ + .singletons = { \ + ._not_used = 1, \ + .hamt_empty = { \ + .ob_base = _PyObject_IMMORTAL_INIT(&_PyHamt_Type), \ + .h_root = (PyHamtNode*)&_Py_SINGLETON(hamt_bitmap_node_empty), \ + }, \ + }, \ + }, \ ._initial_thread = _PyThreadState_INIT, \ } #define _PyThreadState_INIT \ { \ - ._static = 1, \ .py_recursion_limit = Py_DEFAULT_RECURSION_LIMIT, \ .context_ver = 1, \ } @@ -110,9 +177,9 @@ extern "C" { ._data = (LITERAL) \ } #define INIT_STR(NAME, LITERAL) \ - ._ ## NAME = _PyASCIIObject_INIT(LITERAL) + ._py_ ## NAME = _PyASCIIObject_INIT(LITERAL) #define INIT_ID(NAME) \ - ._ ## NAME = _PyASCIIObject_INIT(#NAME) + ._py_ ## NAME = _PyASCIIObject_INIT(#NAME) #define _PyUnicode_LATIN1_INIT(LITERAL, UTF8) \ { \ ._latin1 = { \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index 32ff57b731e1a0..87b0f2ed8dfa8c 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -8,8192 +8,1478 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif -/* The following is auto-generated by Tools/scripts/generate_global_objects.py. */ -#define _Py_global_objects_INIT { \ - .singletons = { \ - .small_ints = { \ - _PyLong_DIGIT_INIT(-5), \ - _PyLong_DIGIT_INIT(-4), \ - _PyLong_DIGIT_INIT(-3), \ - _PyLong_DIGIT_INIT(-2), \ - _PyLong_DIGIT_INIT(-1), \ - _PyLong_DIGIT_INIT(0), \ - _PyLong_DIGIT_INIT(1), \ - _PyLong_DIGIT_INIT(2), \ - _PyLong_DIGIT_INIT(3), \ - _PyLong_DIGIT_INIT(4), \ - _PyLong_DIGIT_INIT(5), \ - _PyLong_DIGIT_INIT(6), \ - _PyLong_DIGIT_INIT(7), \ - _PyLong_DIGIT_INIT(8), \ - _PyLong_DIGIT_INIT(9), \ - _PyLong_DIGIT_INIT(10), \ - _PyLong_DIGIT_INIT(11), \ - _PyLong_DIGIT_INIT(12), \ - _PyLong_DIGIT_INIT(13), \ - _PyLong_DIGIT_INIT(14), \ - _PyLong_DIGIT_INIT(15), \ - _PyLong_DIGIT_INIT(16), \ - _PyLong_DIGIT_INIT(17), \ - _PyLong_DIGIT_INIT(18), \ - _PyLong_DIGIT_INIT(19), \ - _PyLong_DIGIT_INIT(20), \ - _PyLong_DIGIT_INIT(21), \ - _PyLong_DIGIT_INIT(22), \ - _PyLong_DIGIT_INIT(23), \ - _PyLong_DIGIT_INIT(24), \ - _PyLong_DIGIT_INIT(25), \ - _PyLong_DIGIT_INIT(26), \ - _PyLong_DIGIT_INIT(27), \ - _PyLong_DIGIT_INIT(28), \ - _PyLong_DIGIT_INIT(29), \ - _PyLong_DIGIT_INIT(30), \ - _PyLong_DIGIT_INIT(31), \ - _PyLong_DIGIT_INIT(32), \ - _PyLong_DIGIT_INIT(33), \ - _PyLong_DIGIT_INIT(34), \ - _PyLong_DIGIT_INIT(35), \ - _PyLong_DIGIT_INIT(36), \ - _PyLong_DIGIT_INIT(37), \ - _PyLong_DIGIT_INIT(38), \ - _PyLong_DIGIT_INIT(39), \ - _PyLong_DIGIT_INIT(40), \ - _PyLong_DIGIT_INIT(41), \ - _PyLong_DIGIT_INIT(42), \ - _PyLong_DIGIT_INIT(43), \ - _PyLong_DIGIT_INIT(44), \ - _PyLong_DIGIT_INIT(45), \ - _PyLong_DIGIT_INIT(46), \ - _PyLong_DIGIT_INIT(47), \ - _PyLong_DIGIT_INIT(48), \ - _PyLong_DIGIT_INIT(49), \ - _PyLong_DIGIT_INIT(50), \ - _PyLong_DIGIT_INIT(51), \ - _PyLong_DIGIT_INIT(52), \ - _PyLong_DIGIT_INIT(53), \ - _PyLong_DIGIT_INIT(54), \ - _PyLong_DIGIT_INIT(55), \ - _PyLong_DIGIT_INIT(56), \ - _PyLong_DIGIT_INIT(57), \ - _PyLong_DIGIT_INIT(58), \ - _PyLong_DIGIT_INIT(59), \ - _PyLong_DIGIT_INIT(60), \ - _PyLong_DIGIT_INIT(61), \ - _PyLong_DIGIT_INIT(62), \ - _PyLong_DIGIT_INIT(63), \ - _PyLong_DIGIT_INIT(64), \ - _PyLong_DIGIT_INIT(65), \ - _PyLong_DIGIT_INIT(66), \ - _PyLong_DIGIT_INIT(67), \ - _PyLong_DIGIT_INIT(68), \ - _PyLong_DIGIT_INIT(69), \ - _PyLong_DIGIT_INIT(70), \ - _PyLong_DIGIT_INIT(71), \ - _PyLong_DIGIT_INIT(72), \ - _PyLong_DIGIT_INIT(73), \ - _PyLong_DIGIT_INIT(74), \ - _PyLong_DIGIT_INIT(75), \ - _PyLong_DIGIT_INIT(76), \ - _PyLong_DIGIT_INIT(77), \ - _PyLong_DIGIT_INIT(78), \ - _PyLong_DIGIT_INIT(79), \ - _PyLong_DIGIT_INIT(80), \ - _PyLong_DIGIT_INIT(81), \ - _PyLong_DIGIT_INIT(82), \ - _PyLong_DIGIT_INIT(83), \ - _PyLong_DIGIT_INIT(84), \ - _PyLong_DIGIT_INIT(85), \ - _PyLong_DIGIT_INIT(86), \ - _PyLong_DIGIT_INIT(87), \ - _PyLong_DIGIT_INIT(88), \ - _PyLong_DIGIT_INIT(89), \ - _PyLong_DIGIT_INIT(90), \ - _PyLong_DIGIT_INIT(91), \ - _PyLong_DIGIT_INIT(92), \ - _PyLong_DIGIT_INIT(93), \ - _PyLong_DIGIT_INIT(94), \ - _PyLong_DIGIT_INIT(95), \ - _PyLong_DIGIT_INIT(96), \ - _PyLong_DIGIT_INIT(97), \ - _PyLong_DIGIT_INIT(98), \ - _PyLong_DIGIT_INIT(99), \ - _PyLong_DIGIT_INIT(100), \ - _PyLong_DIGIT_INIT(101), \ - _PyLong_DIGIT_INIT(102), \ - _PyLong_DIGIT_INIT(103), \ - _PyLong_DIGIT_INIT(104), \ - _PyLong_DIGIT_INIT(105), \ - _PyLong_DIGIT_INIT(106), \ - _PyLong_DIGIT_INIT(107), \ - _PyLong_DIGIT_INIT(108), \ - _PyLong_DIGIT_INIT(109), \ - _PyLong_DIGIT_INIT(110), \ - _PyLong_DIGIT_INIT(111), \ - _PyLong_DIGIT_INIT(112), \ - _PyLong_DIGIT_INIT(113), \ - _PyLong_DIGIT_INIT(114), \ - _PyLong_DIGIT_INIT(115), \ - _PyLong_DIGIT_INIT(116), \ - _PyLong_DIGIT_INIT(117), \ - _PyLong_DIGIT_INIT(118), \ - _PyLong_DIGIT_INIT(119), \ - _PyLong_DIGIT_INIT(120), \ - _PyLong_DIGIT_INIT(121), \ - _PyLong_DIGIT_INIT(122), \ - _PyLong_DIGIT_INIT(123), \ - _PyLong_DIGIT_INIT(124), \ - _PyLong_DIGIT_INIT(125), \ - _PyLong_DIGIT_INIT(126), \ - _PyLong_DIGIT_INIT(127), \ - _PyLong_DIGIT_INIT(128), \ - _PyLong_DIGIT_INIT(129), \ - _PyLong_DIGIT_INIT(130), \ - _PyLong_DIGIT_INIT(131), \ - _PyLong_DIGIT_INIT(132), \ - _PyLong_DIGIT_INIT(133), \ - _PyLong_DIGIT_INIT(134), \ - _PyLong_DIGIT_INIT(135), \ - _PyLong_DIGIT_INIT(136), \ - _PyLong_DIGIT_INIT(137), \ - _PyLong_DIGIT_INIT(138), \ - _PyLong_DIGIT_INIT(139), \ - _PyLong_DIGIT_INIT(140), \ - _PyLong_DIGIT_INIT(141), \ - _PyLong_DIGIT_INIT(142), \ - _PyLong_DIGIT_INIT(143), \ - _PyLong_DIGIT_INIT(144), \ - _PyLong_DIGIT_INIT(145), \ - _PyLong_DIGIT_INIT(146), \ - _PyLong_DIGIT_INIT(147), \ - _PyLong_DIGIT_INIT(148), \ - _PyLong_DIGIT_INIT(149), \ - _PyLong_DIGIT_INIT(150), \ - _PyLong_DIGIT_INIT(151), \ - _PyLong_DIGIT_INIT(152), \ - _PyLong_DIGIT_INIT(153), \ - _PyLong_DIGIT_INIT(154), \ - _PyLong_DIGIT_INIT(155), \ - _PyLong_DIGIT_INIT(156), \ - _PyLong_DIGIT_INIT(157), \ - _PyLong_DIGIT_INIT(158), \ - _PyLong_DIGIT_INIT(159), \ - _PyLong_DIGIT_INIT(160), \ - _PyLong_DIGIT_INIT(161), \ - _PyLong_DIGIT_INIT(162), \ - _PyLong_DIGIT_INIT(163), \ - _PyLong_DIGIT_INIT(164), \ - _PyLong_DIGIT_INIT(165), \ - _PyLong_DIGIT_INIT(166), \ - _PyLong_DIGIT_INIT(167), \ - _PyLong_DIGIT_INIT(168), \ - _PyLong_DIGIT_INIT(169), \ - _PyLong_DIGIT_INIT(170), \ - _PyLong_DIGIT_INIT(171), \ - _PyLong_DIGIT_INIT(172), \ - _PyLong_DIGIT_INIT(173), \ - _PyLong_DIGIT_INIT(174), \ - _PyLong_DIGIT_INIT(175), \ - _PyLong_DIGIT_INIT(176), \ - _PyLong_DIGIT_INIT(177), \ - _PyLong_DIGIT_INIT(178), \ - _PyLong_DIGIT_INIT(179), \ - _PyLong_DIGIT_INIT(180), \ - _PyLong_DIGIT_INIT(181), \ - _PyLong_DIGIT_INIT(182), \ - _PyLong_DIGIT_INIT(183), \ - _PyLong_DIGIT_INIT(184), \ - _PyLong_DIGIT_INIT(185), \ - _PyLong_DIGIT_INIT(186), \ - _PyLong_DIGIT_INIT(187), \ - _PyLong_DIGIT_INIT(188), \ - _PyLong_DIGIT_INIT(189), \ - _PyLong_DIGIT_INIT(190), \ - _PyLong_DIGIT_INIT(191), \ - _PyLong_DIGIT_INIT(192), \ - _PyLong_DIGIT_INIT(193), \ - _PyLong_DIGIT_INIT(194), \ - _PyLong_DIGIT_INIT(195), \ - _PyLong_DIGIT_INIT(196), \ - _PyLong_DIGIT_INIT(197), \ - _PyLong_DIGIT_INIT(198), \ - _PyLong_DIGIT_INIT(199), \ - _PyLong_DIGIT_INIT(200), \ - _PyLong_DIGIT_INIT(201), \ - _PyLong_DIGIT_INIT(202), \ - _PyLong_DIGIT_INIT(203), \ - _PyLong_DIGIT_INIT(204), \ - _PyLong_DIGIT_INIT(205), \ - _PyLong_DIGIT_INIT(206), \ - _PyLong_DIGIT_INIT(207), \ - _PyLong_DIGIT_INIT(208), \ - _PyLong_DIGIT_INIT(209), \ - _PyLong_DIGIT_INIT(210), \ - _PyLong_DIGIT_INIT(211), \ - _PyLong_DIGIT_INIT(212), \ - _PyLong_DIGIT_INIT(213), \ - _PyLong_DIGIT_INIT(214), \ - _PyLong_DIGIT_INIT(215), \ - _PyLong_DIGIT_INIT(216), \ - _PyLong_DIGIT_INIT(217), \ - _PyLong_DIGIT_INIT(218), \ - _PyLong_DIGIT_INIT(219), \ - _PyLong_DIGIT_INIT(220), \ - _PyLong_DIGIT_INIT(221), \ - _PyLong_DIGIT_INIT(222), \ - _PyLong_DIGIT_INIT(223), \ - _PyLong_DIGIT_INIT(224), \ - _PyLong_DIGIT_INIT(225), \ - _PyLong_DIGIT_INIT(226), \ - _PyLong_DIGIT_INIT(227), \ - _PyLong_DIGIT_INIT(228), \ - _PyLong_DIGIT_INIT(229), \ - _PyLong_DIGIT_INIT(230), \ - _PyLong_DIGIT_INIT(231), \ - _PyLong_DIGIT_INIT(232), \ - _PyLong_DIGIT_INIT(233), \ - _PyLong_DIGIT_INIT(234), \ - _PyLong_DIGIT_INIT(235), \ - _PyLong_DIGIT_INIT(236), \ - _PyLong_DIGIT_INIT(237), \ - _PyLong_DIGIT_INIT(238), \ - _PyLong_DIGIT_INIT(239), \ - _PyLong_DIGIT_INIT(240), \ - _PyLong_DIGIT_INIT(241), \ - _PyLong_DIGIT_INIT(242), \ - _PyLong_DIGIT_INIT(243), \ - _PyLong_DIGIT_INIT(244), \ - _PyLong_DIGIT_INIT(245), \ - _PyLong_DIGIT_INIT(246), \ - _PyLong_DIGIT_INIT(247), \ - _PyLong_DIGIT_INIT(248), \ - _PyLong_DIGIT_INIT(249), \ - _PyLong_DIGIT_INIT(250), \ - _PyLong_DIGIT_INIT(251), \ - _PyLong_DIGIT_INIT(252), \ - _PyLong_DIGIT_INIT(253), \ - _PyLong_DIGIT_INIT(254), \ - _PyLong_DIGIT_INIT(255), \ - _PyLong_DIGIT_INIT(256), \ - }, \ - \ - .bytes_empty = _PyBytes_SIMPLE_INIT(0, 0), \ - .bytes_characters = { \ - _PyBytes_CHAR_INIT(0), \ - _PyBytes_CHAR_INIT(1), \ - _PyBytes_CHAR_INIT(2), \ - _PyBytes_CHAR_INIT(3), \ - _PyBytes_CHAR_INIT(4), \ - _PyBytes_CHAR_INIT(5), \ - _PyBytes_CHAR_INIT(6), \ - _PyBytes_CHAR_INIT(7), \ - _PyBytes_CHAR_INIT(8), \ - _PyBytes_CHAR_INIT(9), \ - _PyBytes_CHAR_INIT(10), \ - _PyBytes_CHAR_INIT(11), \ - _PyBytes_CHAR_INIT(12), \ - _PyBytes_CHAR_INIT(13), \ - _PyBytes_CHAR_INIT(14), \ - _PyBytes_CHAR_INIT(15), \ - _PyBytes_CHAR_INIT(16), \ - _PyBytes_CHAR_INIT(17), \ - _PyBytes_CHAR_INIT(18), \ - _PyBytes_CHAR_INIT(19), \ - _PyBytes_CHAR_INIT(20), \ - _PyBytes_CHAR_INIT(21), \ - _PyBytes_CHAR_INIT(22), \ - _PyBytes_CHAR_INIT(23), \ - _PyBytes_CHAR_INIT(24), \ - _PyBytes_CHAR_INIT(25), \ - _PyBytes_CHAR_INIT(26), \ - _PyBytes_CHAR_INIT(27), \ - _PyBytes_CHAR_INIT(28), \ - _PyBytes_CHAR_INIT(29), \ - _PyBytes_CHAR_INIT(30), \ - _PyBytes_CHAR_INIT(31), \ - _PyBytes_CHAR_INIT(32), \ - _PyBytes_CHAR_INIT(33), \ - _PyBytes_CHAR_INIT(34), \ - _PyBytes_CHAR_INIT(35), \ - _PyBytes_CHAR_INIT(36), \ - _PyBytes_CHAR_INIT(37), \ - _PyBytes_CHAR_INIT(38), \ - _PyBytes_CHAR_INIT(39), \ - _PyBytes_CHAR_INIT(40), \ - _PyBytes_CHAR_INIT(41), \ - _PyBytes_CHAR_INIT(42), \ - _PyBytes_CHAR_INIT(43), \ - _PyBytes_CHAR_INIT(44), \ - _PyBytes_CHAR_INIT(45), \ - _PyBytes_CHAR_INIT(46), \ - _PyBytes_CHAR_INIT(47), \ - _PyBytes_CHAR_INIT(48), \ - _PyBytes_CHAR_INIT(49), \ - _PyBytes_CHAR_INIT(50), \ - _PyBytes_CHAR_INIT(51), \ - _PyBytes_CHAR_INIT(52), \ - _PyBytes_CHAR_INIT(53), \ - _PyBytes_CHAR_INIT(54), \ - _PyBytes_CHAR_INIT(55), \ - _PyBytes_CHAR_INIT(56), \ - _PyBytes_CHAR_INIT(57), \ - _PyBytes_CHAR_INIT(58), \ - _PyBytes_CHAR_INIT(59), \ - _PyBytes_CHAR_INIT(60), \ - _PyBytes_CHAR_INIT(61), \ - _PyBytes_CHAR_INIT(62), \ - _PyBytes_CHAR_INIT(63), \ - _PyBytes_CHAR_INIT(64), \ - _PyBytes_CHAR_INIT(65), \ - _PyBytes_CHAR_INIT(66), \ - _PyBytes_CHAR_INIT(67), \ - _PyBytes_CHAR_INIT(68), \ - _PyBytes_CHAR_INIT(69), \ - _PyBytes_CHAR_INIT(70), \ - _PyBytes_CHAR_INIT(71), \ - _PyBytes_CHAR_INIT(72), \ - _PyBytes_CHAR_INIT(73), \ - _PyBytes_CHAR_INIT(74), \ - _PyBytes_CHAR_INIT(75), \ - _PyBytes_CHAR_INIT(76), \ - _PyBytes_CHAR_INIT(77), \ - _PyBytes_CHAR_INIT(78), \ - _PyBytes_CHAR_INIT(79), \ - _PyBytes_CHAR_INIT(80), \ - _PyBytes_CHAR_INIT(81), \ - _PyBytes_CHAR_INIT(82), \ - _PyBytes_CHAR_INIT(83), \ - _PyBytes_CHAR_INIT(84), \ - _PyBytes_CHAR_INIT(85), \ - _PyBytes_CHAR_INIT(86), \ - _PyBytes_CHAR_INIT(87), \ - _PyBytes_CHAR_INIT(88), \ - _PyBytes_CHAR_INIT(89), \ - _PyBytes_CHAR_INIT(90), \ - _PyBytes_CHAR_INIT(91), \ - _PyBytes_CHAR_INIT(92), \ - _PyBytes_CHAR_INIT(93), \ - _PyBytes_CHAR_INIT(94), \ - _PyBytes_CHAR_INIT(95), \ - _PyBytes_CHAR_INIT(96), \ - _PyBytes_CHAR_INIT(97), \ - _PyBytes_CHAR_INIT(98), \ - _PyBytes_CHAR_INIT(99), \ - _PyBytes_CHAR_INIT(100), \ - _PyBytes_CHAR_INIT(101), \ - _PyBytes_CHAR_INIT(102), \ - _PyBytes_CHAR_INIT(103), \ - _PyBytes_CHAR_INIT(104), \ - _PyBytes_CHAR_INIT(105), \ - _PyBytes_CHAR_INIT(106), \ - _PyBytes_CHAR_INIT(107), \ - _PyBytes_CHAR_INIT(108), \ - _PyBytes_CHAR_INIT(109), \ - _PyBytes_CHAR_INIT(110), \ - _PyBytes_CHAR_INIT(111), \ - _PyBytes_CHAR_INIT(112), \ - _PyBytes_CHAR_INIT(113), \ - _PyBytes_CHAR_INIT(114), \ - _PyBytes_CHAR_INIT(115), \ - _PyBytes_CHAR_INIT(116), \ - _PyBytes_CHAR_INIT(117), \ - _PyBytes_CHAR_INIT(118), \ - _PyBytes_CHAR_INIT(119), \ - _PyBytes_CHAR_INIT(120), \ - _PyBytes_CHAR_INIT(121), \ - _PyBytes_CHAR_INIT(122), \ - _PyBytes_CHAR_INIT(123), \ - _PyBytes_CHAR_INIT(124), \ - _PyBytes_CHAR_INIT(125), \ - _PyBytes_CHAR_INIT(126), \ - _PyBytes_CHAR_INIT(127), \ - _PyBytes_CHAR_INIT(128), \ - _PyBytes_CHAR_INIT(129), \ - _PyBytes_CHAR_INIT(130), \ - _PyBytes_CHAR_INIT(131), \ - _PyBytes_CHAR_INIT(132), \ - _PyBytes_CHAR_INIT(133), \ - _PyBytes_CHAR_INIT(134), \ - _PyBytes_CHAR_INIT(135), \ - _PyBytes_CHAR_INIT(136), \ - _PyBytes_CHAR_INIT(137), \ - _PyBytes_CHAR_INIT(138), \ - _PyBytes_CHAR_INIT(139), \ - _PyBytes_CHAR_INIT(140), \ - _PyBytes_CHAR_INIT(141), \ - _PyBytes_CHAR_INIT(142), \ - _PyBytes_CHAR_INIT(143), \ - _PyBytes_CHAR_INIT(144), \ - _PyBytes_CHAR_INIT(145), \ - _PyBytes_CHAR_INIT(146), \ - _PyBytes_CHAR_INIT(147), \ - _PyBytes_CHAR_INIT(148), \ - _PyBytes_CHAR_INIT(149), \ - _PyBytes_CHAR_INIT(150), \ - _PyBytes_CHAR_INIT(151), \ - _PyBytes_CHAR_INIT(152), \ - _PyBytes_CHAR_INIT(153), \ - _PyBytes_CHAR_INIT(154), \ - _PyBytes_CHAR_INIT(155), \ - _PyBytes_CHAR_INIT(156), \ - _PyBytes_CHAR_INIT(157), \ - _PyBytes_CHAR_INIT(158), \ - _PyBytes_CHAR_INIT(159), \ - _PyBytes_CHAR_INIT(160), \ - _PyBytes_CHAR_INIT(161), \ - _PyBytes_CHAR_INIT(162), \ - _PyBytes_CHAR_INIT(163), \ - _PyBytes_CHAR_INIT(164), \ - _PyBytes_CHAR_INIT(165), \ - _PyBytes_CHAR_INIT(166), \ - _PyBytes_CHAR_INIT(167), \ - _PyBytes_CHAR_INIT(168), \ - _PyBytes_CHAR_INIT(169), \ - _PyBytes_CHAR_INIT(170), \ - _PyBytes_CHAR_INIT(171), \ - _PyBytes_CHAR_INIT(172), \ - _PyBytes_CHAR_INIT(173), \ - _PyBytes_CHAR_INIT(174), \ - _PyBytes_CHAR_INIT(175), \ - _PyBytes_CHAR_INIT(176), \ - _PyBytes_CHAR_INIT(177), \ - _PyBytes_CHAR_INIT(178), \ - _PyBytes_CHAR_INIT(179), \ - _PyBytes_CHAR_INIT(180), \ - _PyBytes_CHAR_INIT(181), \ - _PyBytes_CHAR_INIT(182), \ - _PyBytes_CHAR_INIT(183), \ - _PyBytes_CHAR_INIT(184), \ - _PyBytes_CHAR_INIT(185), \ - _PyBytes_CHAR_INIT(186), \ - _PyBytes_CHAR_INIT(187), \ - _PyBytes_CHAR_INIT(188), \ - _PyBytes_CHAR_INIT(189), \ - _PyBytes_CHAR_INIT(190), \ - _PyBytes_CHAR_INIT(191), \ - _PyBytes_CHAR_INIT(192), \ - _PyBytes_CHAR_INIT(193), \ - _PyBytes_CHAR_INIT(194), \ - _PyBytes_CHAR_INIT(195), \ - _PyBytes_CHAR_INIT(196), \ - _PyBytes_CHAR_INIT(197), \ - _PyBytes_CHAR_INIT(198), \ - _PyBytes_CHAR_INIT(199), \ - _PyBytes_CHAR_INIT(200), \ - _PyBytes_CHAR_INIT(201), \ - _PyBytes_CHAR_INIT(202), \ - _PyBytes_CHAR_INIT(203), \ - _PyBytes_CHAR_INIT(204), \ - _PyBytes_CHAR_INIT(205), \ - _PyBytes_CHAR_INIT(206), \ - _PyBytes_CHAR_INIT(207), \ - _PyBytes_CHAR_INIT(208), \ - _PyBytes_CHAR_INIT(209), \ - _PyBytes_CHAR_INIT(210), \ - _PyBytes_CHAR_INIT(211), \ - _PyBytes_CHAR_INIT(212), \ - _PyBytes_CHAR_INIT(213), \ - _PyBytes_CHAR_INIT(214), \ - _PyBytes_CHAR_INIT(215), \ - _PyBytes_CHAR_INIT(216), \ - _PyBytes_CHAR_INIT(217), \ - _PyBytes_CHAR_INIT(218), \ - _PyBytes_CHAR_INIT(219), \ - _PyBytes_CHAR_INIT(220), \ - _PyBytes_CHAR_INIT(221), \ - _PyBytes_CHAR_INIT(222), \ - _PyBytes_CHAR_INIT(223), \ - _PyBytes_CHAR_INIT(224), \ - _PyBytes_CHAR_INIT(225), \ - _PyBytes_CHAR_INIT(226), \ - _PyBytes_CHAR_INIT(227), \ - _PyBytes_CHAR_INIT(228), \ - _PyBytes_CHAR_INIT(229), \ - _PyBytes_CHAR_INIT(230), \ - _PyBytes_CHAR_INIT(231), \ - _PyBytes_CHAR_INIT(232), \ - _PyBytes_CHAR_INIT(233), \ - _PyBytes_CHAR_INIT(234), \ - _PyBytes_CHAR_INIT(235), \ - _PyBytes_CHAR_INIT(236), \ - _PyBytes_CHAR_INIT(237), \ - _PyBytes_CHAR_INIT(238), \ - _PyBytes_CHAR_INIT(239), \ - _PyBytes_CHAR_INIT(240), \ - _PyBytes_CHAR_INIT(241), \ - _PyBytes_CHAR_INIT(242), \ - _PyBytes_CHAR_INIT(243), \ - _PyBytes_CHAR_INIT(244), \ - _PyBytes_CHAR_INIT(245), \ - _PyBytes_CHAR_INIT(246), \ - _PyBytes_CHAR_INIT(247), \ - _PyBytes_CHAR_INIT(248), \ - _PyBytes_CHAR_INIT(249), \ - _PyBytes_CHAR_INIT(250), \ - _PyBytes_CHAR_INIT(251), \ - _PyBytes_CHAR_INIT(252), \ - _PyBytes_CHAR_INIT(253), \ - _PyBytes_CHAR_INIT(254), \ - _PyBytes_CHAR_INIT(255), \ - }, \ - \ - .strings = { \ - .literals = { \ - INIT_STR(anon_dictcomp, ""), \ - INIT_STR(anon_genexpr, ""), \ - INIT_STR(anon_lambda, ""), \ - INIT_STR(anon_listcomp, ""), \ - INIT_STR(anon_module, ""), \ - INIT_STR(anon_setcomp, ""), \ - INIT_STR(anon_string, ""), \ - INIT_STR(anon_unknown, ""), \ - INIT_STR(close_br, "}"), \ - INIT_STR(comma_sep, ", "), \ - INIT_STR(dbl_close_br, "}}"), \ - INIT_STR(dbl_open_br, "{{"), \ - INIT_STR(dbl_percent, "%%"), \ - INIT_STR(dot, "."), \ - INIT_STR(dot_locals, "."), \ - INIT_STR(empty, ""), \ - INIT_STR(list_err, "list index out of range"), \ - INIT_STR(newline, "\n"), \ - INIT_STR(open_br, "{"), \ - INIT_STR(percent, "%"), \ - INIT_STR(utf_8, "utf-8"), \ - }, \ - .identifiers = { \ - INIT_ID(False), \ - INIT_ID(Py_Repr), \ - INIT_ID(TextIOWrapper), \ - INIT_ID(True), \ - INIT_ID(WarningMessage), \ - INIT_ID(_), \ - INIT_ID(__IOBase_closed), \ - INIT_ID(__abc_tpflags__), \ - INIT_ID(__abs__), \ - INIT_ID(__abstractmethods__), \ - INIT_ID(__add__), \ - INIT_ID(__aenter__), \ - INIT_ID(__aexit__), \ - INIT_ID(__aiter__), \ - INIT_ID(__all__), \ - INIT_ID(__and__), \ - INIT_ID(__anext__), \ - INIT_ID(__annotations__), \ - INIT_ID(__args__), \ - INIT_ID(__await__), \ - INIT_ID(__bases__), \ - INIT_ID(__bool__), \ - INIT_ID(__build_class__), \ - INIT_ID(__builtins__), \ - INIT_ID(__bytes__), \ - INIT_ID(__call__), \ - INIT_ID(__cantrace__), \ - INIT_ID(__class__), \ - INIT_ID(__class_getitem__), \ - INIT_ID(__classcell__), \ - INIT_ID(__complex__), \ - INIT_ID(__contains__), \ - INIT_ID(__copy__), \ - INIT_ID(__del__), \ - INIT_ID(__delattr__), \ - INIT_ID(__delete__), \ - INIT_ID(__delitem__), \ - INIT_ID(__dict__), \ - INIT_ID(__dictoffset__), \ - INIT_ID(__dir__), \ - INIT_ID(__divmod__), \ - INIT_ID(__doc__), \ - INIT_ID(__enter__), \ - INIT_ID(__eq__), \ - INIT_ID(__exit__), \ - INIT_ID(__file__), \ - INIT_ID(__float__), \ - INIT_ID(__floordiv__), \ - INIT_ID(__format__), \ - INIT_ID(__fspath__), \ - INIT_ID(__ge__), \ - INIT_ID(__get__), \ - INIT_ID(__getattr__), \ - INIT_ID(__getattribute__), \ - INIT_ID(__getinitargs__), \ - INIT_ID(__getitem__), \ - INIT_ID(__getnewargs__), \ - INIT_ID(__getnewargs_ex__), \ - INIT_ID(__getstate__), \ - INIT_ID(__gt__), \ - INIT_ID(__hash__), \ - INIT_ID(__iadd__), \ - INIT_ID(__iand__), \ - INIT_ID(__ifloordiv__), \ - INIT_ID(__ilshift__), \ - INIT_ID(__imatmul__), \ - INIT_ID(__imod__), \ - INIT_ID(__import__), \ - INIT_ID(__imul__), \ - INIT_ID(__index__), \ - INIT_ID(__init__), \ - INIT_ID(__init_subclass__), \ - INIT_ID(__instancecheck__), \ - INIT_ID(__int__), \ - INIT_ID(__invert__), \ - INIT_ID(__ior__), \ - INIT_ID(__ipow__), \ - INIT_ID(__irshift__), \ - INIT_ID(__isabstractmethod__), \ - INIT_ID(__isub__), \ - INIT_ID(__iter__), \ - INIT_ID(__itruediv__), \ - INIT_ID(__ixor__), \ - INIT_ID(__le__), \ - INIT_ID(__len__), \ - INIT_ID(__length_hint__), \ - INIT_ID(__lltrace__), \ - INIT_ID(__loader__), \ - INIT_ID(__lshift__), \ - INIT_ID(__lt__), \ - INIT_ID(__main__), \ - INIT_ID(__matmul__), \ - INIT_ID(__missing__), \ - INIT_ID(__mod__), \ - INIT_ID(__module__), \ - INIT_ID(__mro_entries__), \ - INIT_ID(__mul__), \ - INIT_ID(__name__), \ - INIT_ID(__ne__), \ - INIT_ID(__neg__), \ - INIT_ID(__new__), \ - INIT_ID(__newobj__), \ - INIT_ID(__newobj_ex__), \ - INIT_ID(__next__), \ - INIT_ID(__notes__), \ - INIT_ID(__or__), \ - INIT_ID(__orig_class__), \ - INIT_ID(__origin__), \ - INIT_ID(__package__), \ - INIT_ID(__parameters__), \ - INIT_ID(__path__), \ - INIT_ID(__pos__), \ - INIT_ID(__pow__), \ - INIT_ID(__prepare__), \ - INIT_ID(__qualname__), \ - INIT_ID(__radd__), \ - INIT_ID(__rand__), \ - INIT_ID(__rdivmod__), \ - INIT_ID(__reduce__), \ - INIT_ID(__reduce_ex__), \ - INIT_ID(__repr__), \ - INIT_ID(__reversed__), \ - INIT_ID(__rfloordiv__), \ - INIT_ID(__rlshift__), \ - INIT_ID(__rmatmul__), \ - INIT_ID(__rmod__), \ - INIT_ID(__rmul__), \ - INIT_ID(__ror__), \ - INIT_ID(__round__), \ - INIT_ID(__rpow__), \ - INIT_ID(__rrshift__), \ - INIT_ID(__rshift__), \ - INIT_ID(__rsub__), \ - INIT_ID(__rtruediv__), \ - INIT_ID(__rxor__), \ - INIT_ID(__set__), \ - INIT_ID(__set_name__), \ - INIT_ID(__setattr__), \ - INIT_ID(__setitem__), \ - INIT_ID(__setstate__), \ - INIT_ID(__sizeof__), \ - INIT_ID(__slotnames__), \ - INIT_ID(__slots__), \ - INIT_ID(__spec__), \ - INIT_ID(__str__), \ - INIT_ID(__sub__), \ - INIT_ID(__subclasscheck__), \ - INIT_ID(__subclasshook__), \ - INIT_ID(__truediv__), \ - INIT_ID(__trunc__), \ - INIT_ID(__typing_is_unpacked_typevartuple__), \ - INIT_ID(__typing_prepare_subst__), \ - INIT_ID(__typing_subst__), \ - INIT_ID(__typing_unpacked_tuple_args__), \ - INIT_ID(__warningregistry__), \ - INIT_ID(__weaklistoffset__), \ - INIT_ID(__weakref__), \ - INIT_ID(__xor__), \ - INIT_ID(_abc_impl), \ - INIT_ID(_annotation), \ - INIT_ID(_blksize), \ - INIT_ID(_bootstrap), \ - INIT_ID(_dealloc_warn), \ - INIT_ID(_feature_version), \ - INIT_ID(_finalizing), \ - INIT_ID(_find_and_load), \ - INIT_ID(_fix_up_module), \ - INIT_ID(_get_sourcefile), \ - INIT_ID(_handle_fromlist), \ - INIT_ID(_initializing), \ - INIT_ID(_is_text_encoding), \ - INIT_ID(_lock_unlock_module), \ - INIT_ID(_showwarnmsg), \ - INIT_ID(_shutdown), \ - INIT_ID(_slotnames), \ - INIT_ID(_uninitialized_submodules), \ - INIT_ID(_warn_unawaited_coroutine), \ - INIT_ID(_xoptions), \ - INIT_ID(a), \ - INIT_ID(abs_tol), \ - INIT_ID(access), \ - INIT_ID(add), \ - INIT_ID(after_in_child), \ - INIT_ID(after_in_parent), \ - INIT_ID(aggregate_class), \ - INIT_ID(append), \ - INIT_ID(argdefs), \ - INIT_ID(arguments), \ - INIT_ID(argv), \ - INIT_ID(attribute), \ - INIT_ID(authorizer_callback), \ - INIT_ID(b), \ - INIT_ID(backtick), \ - INIT_ID(base), \ - INIT_ID(before), \ - INIT_ID(big), \ - INIT_ID(binary_form), \ - INIT_ID(block), \ - INIT_ID(buffer), \ - INIT_ID(buffer_callback), \ - INIT_ID(buffer_size), \ - INIT_ID(buffering), \ - INIT_ID(buffers), \ - INIT_ID(bufsize), \ - INIT_ID(builtins), \ - INIT_ID(byteorder), \ - INIT_ID(bytes), \ - INIT_ID(bytes_per_sep), \ - INIT_ID(c_call), \ - INIT_ID(c_exception), \ - INIT_ID(c_return), \ - INIT_ID(cached_statements), \ - INIT_ID(cadata), \ - INIT_ID(cafile), \ - INIT_ID(call), \ - INIT_ID(capath), \ - INIT_ID(category), \ - INIT_ID(cb_type), \ - INIT_ID(certfile), \ - INIT_ID(check_same_thread), \ - INIT_ID(clear), \ - INIT_ID(close), \ - INIT_ID(closed), \ - INIT_ID(closefd), \ - INIT_ID(closure), \ - INIT_ID(co_argcount), \ - INIT_ID(co_cellvars), \ - INIT_ID(co_code), \ - INIT_ID(co_consts), \ - INIT_ID(co_exceptiontable), \ - INIT_ID(co_filename), \ - INIT_ID(co_firstlineno), \ - INIT_ID(co_flags), \ - INIT_ID(co_freevars), \ - INIT_ID(co_kwonlyargcount), \ - INIT_ID(co_linetable), \ - INIT_ID(co_name), \ - INIT_ID(co_names), \ - INIT_ID(co_nlocals), \ - INIT_ID(co_posonlyargcount), \ - INIT_ID(co_qualname), \ - INIT_ID(co_stacksize), \ - INIT_ID(co_varnames), \ - INIT_ID(code), \ - INIT_ID(command), \ - INIT_ID(comment_factory), \ - INIT_ID(consts), \ - INIT_ID(context), \ - INIT_ID(cookie), \ - INIT_ID(copy), \ - INIT_ID(copyreg), \ - INIT_ID(coro), \ - INIT_ID(count), \ - INIT_ID(cwd), \ - INIT_ID(data), \ - INIT_ID(database), \ - INIT_ID(decode), \ - INIT_ID(decoder), \ - INIT_ID(default), \ - INIT_ID(defaultaction), \ - INIT_ID(delete), \ - INIT_ID(depth), \ - INIT_ID(detect_types), \ - INIT_ID(deterministic), \ - INIT_ID(device), \ - INIT_ID(dict), \ - INIT_ID(dictcomp), \ - INIT_ID(difference_update), \ - INIT_ID(digest), \ - INIT_ID(digest_size), \ - INIT_ID(digestmod), \ - INIT_ID(dir_fd), \ - INIT_ID(dispatch_table), \ - INIT_ID(displayhook), \ - INIT_ID(dklen), \ - INIT_ID(doc), \ - INIT_ID(dont_inherit), \ - INIT_ID(dst), \ - INIT_ID(dst_dir_fd), \ - INIT_ID(duration), \ - INIT_ID(effective_ids), \ - INIT_ID(element_factory), \ - INIT_ID(encode), \ - INIT_ID(encoding), \ - INIT_ID(end), \ - INIT_ID(end_lineno), \ - INIT_ID(end_offset), \ - INIT_ID(endpos), \ - INIT_ID(env), \ - INIT_ID(errors), \ - INIT_ID(event), \ - INIT_ID(eventmask), \ - INIT_ID(exc_type), \ - INIT_ID(exc_value), \ - INIT_ID(excepthook), \ - INIT_ID(exception), \ - INIT_ID(exp), \ - INIT_ID(extend), \ - INIT_ID(factory), \ - INIT_ID(family), \ - INIT_ID(fanout), \ - INIT_ID(fd), \ - INIT_ID(fd2), \ - INIT_ID(fdel), \ - INIT_ID(fget), \ - INIT_ID(file), \ - INIT_ID(file_actions), \ - INIT_ID(filename), \ - INIT_ID(fileno), \ - INIT_ID(filepath), \ - INIT_ID(fillvalue), \ - INIT_ID(filters), \ - INIT_ID(final), \ - INIT_ID(find_class), \ - INIT_ID(fix_imports), \ - INIT_ID(flags), \ - INIT_ID(flush), \ - INIT_ID(follow_symlinks), \ - INIT_ID(format), \ - INIT_ID(frequency), \ - INIT_ID(fromlist), \ - INIT_ID(fset), \ - INIT_ID(func), \ - INIT_ID(generation), \ - INIT_ID(genexpr), \ - INIT_ID(get), \ - INIT_ID(get_source), \ - INIT_ID(getattr), \ - INIT_ID(getstate), \ - INIT_ID(gid), \ - INIT_ID(globals), \ - INIT_ID(groupindex), \ - INIT_ID(groups), \ - INIT_ID(handle), \ - INIT_ID(hash_name), \ - INIT_ID(header), \ - INIT_ID(headers), \ - INIT_ID(hi), \ - INIT_ID(hook), \ - INIT_ID(id), \ - INIT_ID(ignore), \ - INIT_ID(imag), \ - INIT_ID(importlib), \ - INIT_ID(in_fd), \ - INIT_ID(incoming), \ - INIT_ID(indexgroup), \ - INIT_ID(inf), \ - INIT_ID(inheritable), \ - INIT_ID(initial), \ - INIT_ID(initial_bytes), \ - INIT_ID(initial_value), \ - INIT_ID(initval), \ - INIT_ID(inner_size), \ - INIT_ID(input), \ - INIT_ID(insert_comments), \ - INIT_ID(insert_pis), \ - INIT_ID(instructions), \ - INIT_ID(intern), \ - INIT_ID(intersection), \ - INIT_ID(isatty), \ - INIT_ID(isinstance), \ - INIT_ID(isolation_level), \ - INIT_ID(istext), \ - INIT_ID(item), \ - INIT_ID(items), \ - INIT_ID(iter), \ - INIT_ID(iterable), \ - INIT_ID(iterations), \ - INIT_ID(join), \ - INIT_ID(jump), \ - INIT_ID(keepends), \ - INIT_ID(key), \ - INIT_ID(keyfile), \ - INIT_ID(keys), \ - INIT_ID(kind), \ - INIT_ID(lambda), \ - INIT_ID(last), \ - INIT_ID(last_node), \ - INIT_ID(last_traceback), \ - INIT_ID(last_type), \ - INIT_ID(last_value), \ - INIT_ID(latin1), \ - INIT_ID(leaf_size), \ - INIT_ID(len), \ - INIT_ID(length), \ - INIT_ID(level), \ - INIT_ID(limit), \ - INIT_ID(line), \ - INIT_ID(line_buffering), \ - INIT_ID(lineno), \ - INIT_ID(listcomp), \ - INIT_ID(little), \ - INIT_ID(lo), \ - INIT_ID(locale), \ - INIT_ID(locals), \ - INIT_ID(loop), \ - INIT_ID(mapping), \ - INIT_ID(match), \ - INIT_ID(max_length), \ - INIT_ID(maxdigits), \ - INIT_ID(maxevents), \ - INIT_ID(maxmem), \ - INIT_ID(maxsplit), \ - INIT_ID(maxvalue), \ - INIT_ID(memLevel), \ - INIT_ID(memlimit), \ - INIT_ID(message), \ - INIT_ID(metaclass), \ - INIT_ID(method), \ - INIT_ID(mod), \ - INIT_ID(mode), \ - INIT_ID(module), \ - INIT_ID(module_globals), \ - INIT_ID(modules), \ - INIT_ID(mro), \ - INIT_ID(msg), \ - INIT_ID(n), \ - INIT_ID(n_arg), \ - INIT_ID(n_fields), \ - INIT_ID(n_sequence_fields), \ - INIT_ID(n_unnamed_fields), \ - INIT_ID(name), \ - INIT_ID(namespace_separator), \ - INIT_ID(namespaces), \ - INIT_ID(narg), \ - INIT_ID(ndigits), \ - INIT_ID(new_limit), \ - INIT_ID(newline), \ - INIT_ID(newlines), \ - INIT_ID(next), \ - INIT_ID(node_depth), \ - INIT_ID(node_offset), \ - INIT_ID(ns), \ - INIT_ID(number), \ - INIT_ID(obj), \ - INIT_ID(object), \ - INIT_ID(offset), \ - INIT_ID(offset_dst), \ - INIT_ID(offset_src), \ - INIT_ID(on_type_read), \ - INIT_ID(onceregistry), \ - INIT_ID(oparg), \ - INIT_ID(opcode), \ - INIT_ID(open), \ - INIT_ID(opener), \ - INIT_ID(operation), \ - INIT_ID(optimize), \ - INIT_ID(options), \ - INIT_ID(order), \ - INIT_ID(out_fd), \ - INIT_ID(outgoing), \ - INIT_ID(overlapped), \ - INIT_ID(owner), \ - INIT_ID(p), \ - INIT_ID(pages), \ - INIT_ID(parent), \ - INIT_ID(password), \ - INIT_ID(path), \ - INIT_ID(pattern), \ - INIT_ID(peek), \ - INIT_ID(persistent_id), \ - INIT_ID(persistent_load), \ - INIT_ID(person), \ - INIT_ID(pi_factory), \ - INIT_ID(pid), \ - INIT_ID(policy), \ - INIT_ID(pos), \ - INIT_ID(print_file_and_line), \ - INIT_ID(priority), \ - INIT_ID(progress), \ - INIT_ID(progress_handler), \ - INIT_ID(proto), \ - INIT_ID(protocol), \ - INIT_ID(ps1), \ - INIT_ID(ps2), \ - INIT_ID(query), \ - INIT_ID(quotetabs), \ - INIT_ID(r), \ - INIT_ID(raw), \ - INIT_ID(read), \ - INIT_ID(read1), \ - INIT_ID(readable), \ - INIT_ID(readall), \ - INIT_ID(readinto), \ - INIT_ID(readinto1), \ - INIT_ID(readline), \ - INIT_ID(readonly), \ - INIT_ID(real), \ - INIT_ID(reducer_override), \ - INIT_ID(registry), \ - INIT_ID(rel_tol), \ - INIT_ID(reload), \ - INIT_ID(repl), \ - INIT_ID(replace), \ - INIT_ID(reserved), \ - INIT_ID(reset), \ - INIT_ID(resetids), \ - INIT_ID(return), \ - INIT_ID(reverse), \ - INIT_ID(reversed), \ - INIT_ID(s), \ - INIT_ID(salt), \ - INIT_ID(sched_priority), \ - INIT_ID(scheduler), \ - INIT_ID(seek), \ - INIT_ID(seekable), \ - INIT_ID(selectors), \ - INIT_ID(send), \ - INIT_ID(sep), \ - INIT_ID(sequence), \ - INIT_ID(server_hostname), \ - INIT_ID(server_side), \ - INIT_ID(session), \ - INIT_ID(setcomp), \ - INIT_ID(setpgroup), \ - INIT_ID(setsid), \ - INIT_ID(setsigdef), \ - INIT_ID(setsigmask), \ - INIT_ID(setstate), \ - INIT_ID(shape), \ - INIT_ID(show_cmd), \ - INIT_ID(signed), \ - INIT_ID(size), \ - INIT_ID(sizehint), \ - INIT_ID(sleep), \ - INIT_ID(sock), \ - INIT_ID(sort), \ - INIT_ID(sound), \ - INIT_ID(source), \ - INIT_ID(src), \ - INIT_ID(src_dir_fd), \ - INIT_ID(stacklevel), \ - INIT_ID(start), \ - INIT_ID(statement), \ - INIT_ID(status), \ - INIT_ID(stderr), \ - INIT_ID(stdin), \ - INIT_ID(stdout), \ - INIT_ID(step), \ - INIT_ID(store_name), \ - INIT_ID(strategy), \ - INIT_ID(strict), \ - INIT_ID(strict_mode), \ - INIT_ID(string), \ - INIT_ID(sub_key), \ - INIT_ID(symmetric_difference_update), \ - INIT_ID(tabsize), \ - INIT_ID(tag), \ - INIT_ID(target), \ - INIT_ID(target_is_directory), \ - INIT_ID(task), \ - INIT_ID(tb_frame), \ - INIT_ID(tb_lasti), \ - INIT_ID(tb_lineno), \ - INIT_ID(tb_next), \ - INIT_ID(tell), \ - INIT_ID(template), \ - INIT_ID(term), \ - INIT_ID(text), \ - INIT_ID(threading), \ - INIT_ID(throw), \ - INIT_ID(timeout), \ - INIT_ID(times), \ - INIT_ID(top), \ - INIT_ID(trace_callback), \ - INIT_ID(traceback), \ - INIT_ID(trailers), \ - INIT_ID(translate), \ - INIT_ID(truncate), \ - INIT_ID(twice), \ - INIT_ID(txt), \ - INIT_ID(type), \ - INIT_ID(tz), \ - INIT_ID(uid), \ - INIT_ID(unlink), \ - INIT_ID(unraisablehook), \ - INIT_ID(uri), \ - INIT_ID(usedforsecurity), \ - INIT_ID(value), \ - INIT_ID(values), \ - INIT_ID(version), \ - INIT_ID(warnings), \ - INIT_ID(warnoptions), \ - INIT_ID(wbits), \ - INIT_ID(week), \ - INIT_ID(weekday), \ - INIT_ID(which), \ - INIT_ID(who), \ - INIT_ID(withdata), \ - INIT_ID(writable), \ - INIT_ID(write), \ - INIT_ID(write_through), \ - INIT_ID(x), \ - INIT_ID(year), \ - INIT_ID(zdict), \ - }, \ - .ascii = { \ - _PyASCIIObject_INIT("\x00"), \ - _PyASCIIObject_INIT("\x01"), \ - _PyASCIIObject_INIT("\x02"), \ - _PyASCIIObject_INIT("\x03"), \ - _PyASCIIObject_INIT("\x04"), \ - _PyASCIIObject_INIT("\x05"), \ - _PyASCIIObject_INIT("\x06"), \ - _PyASCIIObject_INIT("\x07"), \ - _PyASCIIObject_INIT("\x08"), \ - _PyASCIIObject_INIT("\x09"), \ - _PyASCIIObject_INIT("\x0a"), \ - _PyASCIIObject_INIT("\x0b"), \ - _PyASCIIObject_INIT("\x0c"), \ - _PyASCIIObject_INIT("\x0d"), \ - _PyASCIIObject_INIT("\x0e"), \ - _PyASCIIObject_INIT("\x0f"), \ - _PyASCIIObject_INIT("\x10"), \ - _PyASCIIObject_INIT("\x11"), \ - _PyASCIIObject_INIT("\x12"), \ - _PyASCIIObject_INIT("\x13"), \ - _PyASCIIObject_INIT("\x14"), \ - _PyASCIIObject_INIT("\x15"), \ - _PyASCIIObject_INIT("\x16"), \ - _PyASCIIObject_INIT("\x17"), \ - _PyASCIIObject_INIT("\x18"), \ - _PyASCIIObject_INIT("\x19"), \ - _PyASCIIObject_INIT("\x1a"), \ - _PyASCIIObject_INIT("\x1b"), \ - _PyASCIIObject_INIT("\x1c"), \ - _PyASCIIObject_INIT("\x1d"), \ - _PyASCIIObject_INIT("\x1e"), \ - _PyASCIIObject_INIT("\x1f"), \ - _PyASCIIObject_INIT("\x20"), \ - _PyASCIIObject_INIT("\x21"), \ - _PyASCIIObject_INIT("\x22"), \ - _PyASCIIObject_INIT("\x23"), \ - _PyASCIIObject_INIT("\x24"), \ - _PyASCIIObject_INIT("\x25"), \ - _PyASCIIObject_INIT("\x26"), \ - _PyASCIIObject_INIT("\x27"), \ - _PyASCIIObject_INIT("\x28"), \ - _PyASCIIObject_INIT("\x29"), \ - _PyASCIIObject_INIT("\x2a"), \ - _PyASCIIObject_INIT("\x2b"), \ - _PyASCIIObject_INIT("\x2c"), \ - _PyASCIIObject_INIT("\x2d"), \ - _PyASCIIObject_INIT("\x2e"), \ - _PyASCIIObject_INIT("\x2f"), \ - _PyASCIIObject_INIT("\x30"), \ - _PyASCIIObject_INIT("\x31"), \ - _PyASCIIObject_INIT("\x32"), \ - _PyASCIIObject_INIT("\x33"), \ - _PyASCIIObject_INIT("\x34"), \ - _PyASCIIObject_INIT("\x35"), \ - _PyASCIIObject_INIT("\x36"), \ - _PyASCIIObject_INIT("\x37"), \ - _PyASCIIObject_INIT("\x38"), \ - _PyASCIIObject_INIT("\x39"), \ - _PyASCIIObject_INIT("\x3a"), \ - _PyASCIIObject_INIT("\x3b"), \ - _PyASCIIObject_INIT("\x3c"), \ - _PyASCIIObject_INIT("\x3d"), \ - _PyASCIIObject_INIT("\x3e"), \ - _PyASCIIObject_INIT("\x3f"), \ - _PyASCIIObject_INIT("\x40"), \ - _PyASCIIObject_INIT("\x41"), \ - _PyASCIIObject_INIT("\x42"), \ - _PyASCIIObject_INIT("\x43"), \ - _PyASCIIObject_INIT("\x44"), \ - _PyASCIIObject_INIT("\x45"), \ - _PyASCIIObject_INIT("\x46"), \ - _PyASCIIObject_INIT("\x47"), \ - _PyASCIIObject_INIT("\x48"), \ - _PyASCIIObject_INIT("\x49"), \ - _PyASCIIObject_INIT("\x4a"), \ - _PyASCIIObject_INIT("\x4b"), \ - _PyASCIIObject_INIT("\x4c"), \ - _PyASCIIObject_INIT("\x4d"), \ - _PyASCIIObject_INIT("\x4e"), \ - _PyASCIIObject_INIT("\x4f"), \ - _PyASCIIObject_INIT("\x50"), \ - _PyASCIIObject_INIT("\x51"), \ - _PyASCIIObject_INIT("\x52"), \ - _PyASCIIObject_INIT("\x53"), \ - _PyASCIIObject_INIT("\x54"), \ - _PyASCIIObject_INIT("\x55"), \ - _PyASCIIObject_INIT("\x56"), \ - _PyASCIIObject_INIT("\x57"), \ - _PyASCIIObject_INIT("\x58"), \ - _PyASCIIObject_INIT("\x59"), \ - _PyASCIIObject_INIT("\x5a"), \ - _PyASCIIObject_INIT("\x5b"), \ - _PyASCIIObject_INIT("\x5c"), \ - _PyASCIIObject_INIT("\x5d"), \ - _PyASCIIObject_INIT("\x5e"), \ - _PyASCIIObject_INIT("\x5f"), \ - _PyASCIIObject_INIT("\x60"), \ - _PyASCIIObject_INIT("\x61"), \ - _PyASCIIObject_INIT("\x62"), \ - _PyASCIIObject_INIT("\x63"), \ - _PyASCIIObject_INIT("\x64"), \ - _PyASCIIObject_INIT("\x65"), \ - _PyASCIIObject_INIT("\x66"), \ - _PyASCIIObject_INIT("\x67"), \ - _PyASCIIObject_INIT("\x68"), \ - _PyASCIIObject_INIT("\x69"), \ - _PyASCIIObject_INIT("\x6a"), \ - _PyASCIIObject_INIT("\x6b"), \ - _PyASCIIObject_INIT("\x6c"), \ - _PyASCIIObject_INIT("\x6d"), \ - _PyASCIIObject_INIT("\x6e"), \ - _PyASCIIObject_INIT("\x6f"), \ - _PyASCIIObject_INIT("\x70"), \ - _PyASCIIObject_INIT("\x71"), \ - _PyASCIIObject_INIT("\x72"), \ - _PyASCIIObject_INIT("\x73"), \ - _PyASCIIObject_INIT("\x74"), \ - _PyASCIIObject_INIT("\x75"), \ - _PyASCIIObject_INIT("\x76"), \ - _PyASCIIObject_INIT("\x77"), \ - _PyASCIIObject_INIT("\x78"), \ - _PyASCIIObject_INIT("\x79"), \ - _PyASCIIObject_INIT("\x7a"), \ - _PyASCIIObject_INIT("\x7b"), \ - _PyASCIIObject_INIT("\x7c"), \ - _PyASCIIObject_INIT("\x7d"), \ - _PyASCIIObject_INIT("\x7e"), \ - _PyASCIIObject_INIT("\x7f"), \ - }, \ - .latin1 = { \ - _PyUnicode_LATIN1_INIT("\x80", "\xc2\x80"), \ - _PyUnicode_LATIN1_INIT("\x81", "\xc2\x81"), \ - _PyUnicode_LATIN1_INIT("\x82", "\xc2\x82"), \ - _PyUnicode_LATIN1_INIT("\x83", "\xc2\x83"), \ - _PyUnicode_LATIN1_INIT("\x84", "\xc2\x84"), \ - _PyUnicode_LATIN1_INIT("\x85", "\xc2\x85"), \ - _PyUnicode_LATIN1_INIT("\x86", "\xc2\x86"), \ - _PyUnicode_LATIN1_INIT("\x87", "\xc2\x87"), \ - _PyUnicode_LATIN1_INIT("\x88", "\xc2\x88"), \ - _PyUnicode_LATIN1_INIT("\x89", "\xc2\x89"), \ - _PyUnicode_LATIN1_INIT("\x8a", "\xc2\x8a"), \ - _PyUnicode_LATIN1_INIT("\x8b", "\xc2\x8b"), \ - _PyUnicode_LATIN1_INIT("\x8c", "\xc2\x8c"), \ - _PyUnicode_LATIN1_INIT("\x8d", "\xc2\x8d"), \ - _PyUnicode_LATIN1_INIT("\x8e", "\xc2\x8e"), \ - _PyUnicode_LATIN1_INIT("\x8f", "\xc2\x8f"), \ - _PyUnicode_LATIN1_INIT("\x90", "\xc2\x90"), \ - _PyUnicode_LATIN1_INIT("\x91", "\xc2\x91"), \ - _PyUnicode_LATIN1_INIT("\x92", "\xc2\x92"), \ - _PyUnicode_LATIN1_INIT("\x93", "\xc2\x93"), \ - _PyUnicode_LATIN1_INIT("\x94", "\xc2\x94"), \ - _PyUnicode_LATIN1_INIT("\x95", "\xc2\x95"), \ - _PyUnicode_LATIN1_INIT("\x96", "\xc2\x96"), \ - _PyUnicode_LATIN1_INIT("\x97", "\xc2\x97"), \ - _PyUnicode_LATIN1_INIT("\x98", "\xc2\x98"), \ - _PyUnicode_LATIN1_INIT("\x99", "\xc2\x99"), \ - _PyUnicode_LATIN1_INIT("\x9a", "\xc2\x9a"), \ - _PyUnicode_LATIN1_INIT("\x9b", "\xc2\x9b"), \ - _PyUnicode_LATIN1_INIT("\x9c", "\xc2\x9c"), \ - _PyUnicode_LATIN1_INIT("\x9d", "\xc2\x9d"), \ - _PyUnicode_LATIN1_INIT("\x9e", "\xc2\x9e"), \ - _PyUnicode_LATIN1_INIT("\x9f", "\xc2\x9f"), \ - _PyUnicode_LATIN1_INIT("\xa0", "\xc2\xa0"), \ - _PyUnicode_LATIN1_INIT("\xa1", "\xc2\xa1"), \ - _PyUnicode_LATIN1_INIT("\xa2", "\xc2\xa2"), \ - _PyUnicode_LATIN1_INIT("\xa3", "\xc2\xa3"), \ - _PyUnicode_LATIN1_INIT("\xa4", "\xc2\xa4"), \ - _PyUnicode_LATIN1_INIT("\xa5", "\xc2\xa5"), \ - _PyUnicode_LATIN1_INIT("\xa6", "\xc2\xa6"), \ - _PyUnicode_LATIN1_INIT("\xa7", "\xc2\xa7"), \ - _PyUnicode_LATIN1_INIT("\xa8", "\xc2\xa8"), \ - _PyUnicode_LATIN1_INIT("\xa9", "\xc2\xa9"), \ - _PyUnicode_LATIN1_INIT("\xaa", "\xc2\xaa"), \ - _PyUnicode_LATIN1_INIT("\xab", "\xc2\xab"), \ - _PyUnicode_LATIN1_INIT("\xac", "\xc2\xac"), \ - _PyUnicode_LATIN1_INIT("\xad", "\xc2\xad"), \ - _PyUnicode_LATIN1_INIT("\xae", "\xc2\xae"), \ - _PyUnicode_LATIN1_INIT("\xaf", "\xc2\xaf"), \ - _PyUnicode_LATIN1_INIT("\xb0", "\xc2\xb0"), \ - _PyUnicode_LATIN1_INIT("\xb1", "\xc2\xb1"), \ - _PyUnicode_LATIN1_INIT("\xb2", "\xc2\xb2"), \ - _PyUnicode_LATIN1_INIT("\xb3", "\xc2\xb3"), \ - _PyUnicode_LATIN1_INIT("\xb4", "\xc2\xb4"), \ - _PyUnicode_LATIN1_INIT("\xb5", "\xc2\xb5"), \ - _PyUnicode_LATIN1_INIT("\xb6", "\xc2\xb6"), \ - _PyUnicode_LATIN1_INIT("\xb7", "\xc2\xb7"), \ - _PyUnicode_LATIN1_INIT("\xb8", "\xc2\xb8"), \ - _PyUnicode_LATIN1_INIT("\xb9", "\xc2\xb9"), \ - _PyUnicode_LATIN1_INIT("\xba", "\xc2\xba"), \ - _PyUnicode_LATIN1_INIT("\xbb", "\xc2\xbb"), \ - _PyUnicode_LATIN1_INIT("\xbc", "\xc2\xbc"), \ - _PyUnicode_LATIN1_INIT("\xbd", "\xc2\xbd"), \ - _PyUnicode_LATIN1_INIT("\xbe", "\xc2\xbe"), \ - _PyUnicode_LATIN1_INIT("\xbf", "\xc2\xbf"), \ - _PyUnicode_LATIN1_INIT("\xc0", "\xc3\x80"), \ - _PyUnicode_LATIN1_INIT("\xc1", "\xc3\x81"), \ - _PyUnicode_LATIN1_INIT("\xc2", "\xc3\x82"), \ - _PyUnicode_LATIN1_INIT("\xc3", "\xc3\x83"), \ - _PyUnicode_LATIN1_INIT("\xc4", "\xc3\x84"), \ - _PyUnicode_LATIN1_INIT("\xc5", "\xc3\x85"), \ - _PyUnicode_LATIN1_INIT("\xc6", "\xc3\x86"), \ - _PyUnicode_LATIN1_INIT("\xc7", "\xc3\x87"), \ - _PyUnicode_LATIN1_INIT("\xc8", "\xc3\x88"), \ - _PyUnicode_LATIN1_INIT("\xc9", "\xc3\x89"), \ - _PyUnicode_LATIN1_INIT("\xca", "\xc3\x8a"), \ - _PyUnicode_LATIN1_INIT("\xcb", "\xc3\x8b"), \ - _PyUnicode_LATIN1_INIT("\xcc", "\xc3\x8c"), \ - _PyUnicode_LATIN1_INIT("\xcd", "\xc3\x8d"), \ - _PyUnicode_LATIN1_INIT("\xce", "\xc3\x8e"), \ - _PyUnicode_LATIN1_INIT("\xcf", "\xc3\x8f"), \ - _PyUnicode_LATIN1_INIT("\xd0", "\xc3\x90"), \ - _PyUnicode_LATIN1_INIT("\xd1", "\xc3\x91"), \ - _PyUnicode_LATIN1_INIT("\xd2", "\xc3\x92"), \ - _PyUnicode_LATIN1_INIT("\xd3", "\xc3\x93"), \ - _PyUnicode_LATIN1_INIT("\xd4", "\xc3\x94"), \ - _PyUnicode_LATIN1_INIT("\xd5", "\xc3\x95"), \ - _PyUnicode_LATIN1_INIT("\xd6", "\xc3\x96"), \ - _PyUnicode_LATIN1_INIT("\xd7", "\xc3\x97"), \ - _PyUnicode_LATIN1_INIT("\xd8", "\xc3\x98"), \ - _PyUnicode_LATIN1_INIT("\xd9", "\xc3\x99"), \ - _PyUnicode_LATIN1_INIT("\xda", "\xc3\x9a"), \ - _PyUnicode_LATIN1_INIT("\xdb", "\xc3\x9b"), \ - _PyUnicode_LATIN1_INIT("\xdc", "\xc3\x9c"), \ - _PyUnicode_LATIN1_INIT("\xdd", "\xc3\x9d"), \ - _PyUnicode_LATIN1_INIT("\xde", "\xc3\x9e"), \ - _PyUnicode_LATIN1_INIT("\xdf", "\xc3\x9f"), \ - _PyUnicode_LATIN1_INIT("\xe0", "\xc3\xa0"), \ - _PyUnicode_LATIN1_INIT("\xe1", "\xc3\xa1"), \ - _PyUnicode_LATIN1_INIT("\xe2", "\xc3\xa2"), \ - _PyUnicode_LATIN1_INIT("\xe3", "\xc3\xa3"), \ - _PyUnicode_LATIN1_INIT("\xe4", "\xc3\xa4"), \ - _PyUnicode_LATIN1_INIT("\xe5", "\xc3\xa5"), \ - _PyUnicode_LATIN1_INIT("\xe6", "\xc3\xa6"), \ - _PyUnicode_LATIN1_INIT("\xe7", "\xc3\xa7"), \ - _PyUnicode_LATIN1_INIT("\xe8", "\xc3\xa8"), \ - _PyUnicode_LATIN1_INIT("\xe9", "\xc3\xa9"), \ - _PyUnicode_LATIN1_INIT("\xea", "\xc3\xaa"), \ - _PyUnicode_LATIN1_INIT("\xeb", "\xc3\xab"), \ - _PyUnicode_LATIN1_INIT("\xec", "\xc3\xac"), \ - _PyUnicode_LATIN1_INIT("\xed", "\xc3\xad"), \ - _PyUnicode_LATIN1_INIT("\xee", "\xc3\xae"), \ - _PyUnicode_LATIN1_INIT("\xef", "\xc3\xaf"), \ - _PyUnicode_LATIN1_INIT("\xf0", "\xc3\xb0"), \ - _PyUnicode_LATIN1_INIT("\xf1", "\xc3\xb1"), \ - _PyUnicode_LATIN1_INIT("\xf2", "\xc3\xb2"), \ - _PyUnicode_LATIN1_INIT("\xf3", "\xc3\xb3"), \ - _PyUnicode_LATIN1_INIT("\xf4", "\xc3\xb4"), \ - _PyUnicode_LATIN1_INIT("\xf5", "\xc3\xb5"), \ - _PyUnicode_LATIN1_INIT("\xf6", "\xc3\xb6"), \ - _PyUnicode_LATIN1_INIT("\xf7", "\xc3\xb7"), \ - _PyUnicode_LATIN1_INIT("\xf8", "\xc3\xb8"), \ - _PyUnicode_LATIN1_INIT("\xf9", "\xc3\xb9"), \ - _PyUnicode_LATIN1_INIT("\xfa", "\xc3\xba"), \ - _PyUnicode_LATIN1_INIT("\xfb", "\xc3\xbb"), \ - _PyUnicode_LATIN1_INIT("\xfc", "\xc3\xbc"), \ - _PyUnicode_LATIN1_INIT("\xfd", "\xc3\xbd"), \ - _PyUnicode_LATIN1_INIT("\xfe", "\xc3\xbe"), \ - _PyUnicode_LATIN1_INIT("\xff", "\xc3\xbf"), \ - }, \ - }, \ - \ - .tuple_empty = { \ - .ob_base = _PyVarObject_IMMORTAL_INIT(&PyTuple_Type, 0) \ - }, \ - }, \ +/* The following is auto-generated by Tools/build/generate_global_objects.py. */ +#define _Py_small_ints_INIT { \ + _PyLong_DIGIT_INIT(-5), \ + _PyLong_DIGIT_INIT(-4), \ + _PyLong_DIGIT_INIT(-3), \ + _PyLong_DIGIT_INIT(-2), \ + _PyLong_DIGIT_INIT(-1), \ + _PyLong_DIGIT_INIT(0), \ + _PyLong_DIGIT_INIT(1), \ + _PyLong_DIGIT_INIT(2), \ + _PyLong_DIGIT_INIT(3), \ + _PyLong_DIGIT_INIT(4), \ + _PyLong_DIGIT_INIT(5), \ + _PyLong_DIGIT_INIT(6), \ + _PyLong_DIGIT_INIT(7), \ + _PyLong_DIGIT_INIT(8), \ + _PyLong_DIGIT_INIT(9), \ + _PyLong_DIGIT_INIT(10), \ + _PyLong_DIGIT_INIT(11), \ + _PyLong_DIGIT_INIT(12), \ + _PyLong_DIGIT_INIT(13), \ + _PyLong_DIGIT_INIT(14), \ + _PyLong_DIGIT_INIT(15), \ + _PyLong_DIGIT_INIT(16), \ + _PyLong_DIGIT_INIT(17), \ + _PyLong_DIGIT_INIT(18), \ + _PyLong_DIGIT_INIT(19), \ + _PyLong_DIGIT_INIT(20), \ + _PyLong_DIGIT_INIT(21), \ + _PyLong_DIGIT_INIT(22), \ + _PyLong_DIGIT_INIT(23), \ + _PyLong_DIGIT_INIT(24), \ + _PyLong_DIGIT_INIT(25), \ + _PyLong_DIGIT_INIT(26), \ + _PyLong_DIGIT_INIT(27), \ + _PyLong_DIGIT_INIT(28), \ + _PyLong_DIGIT_INIT(29), \ + _PyLong_DIGIT_INIT(30), \ + _PyLong_DIGIT_INIT(31), \ + _PyLong_DIGIT_INIT(32), \ + _PyLong_DIGIT_INIT(33), \ + _PyLong_DIGIT_INIT(34), \ + _PyLong_DIGIT_INIT(35), \ + _PyLong_DIGIT_INIT(36), \ + _PyLong_DIGIT_INIT(37), \ + _PyLong_DIGIT_INIT(38), \ + _PyLong_DIGIT_INIT(39), \ + _PyLong_DIGIT_INIT(40), \ + _PyLong_DIGIT_INIT(41), \ + _PyLong_DIGIT_INIT(42), \ + _PyLong_DIGIT_INIT(43), \ + _PyLong_DIGIT_INIT(44), \ + _PyLong_DIGIT_INIT(45), \ + _PyLong_DIGIT_INIT(46), \ + _PyLong_DIGIT_INIT(47), \ + _PyLong_DIGIT_INIT(48), \ + _PyLong_DIGIT_INIT(49), \ + _PyLong_DIGIT_INIT(50), \ + _PyLong_DIGIT_INIT(51), \ + _PyLong_DIGIT_INIT(52), \ + _PyLong_DIGIT_INIT(53), \ + _PyLong_DIGIT_INIT(54), \ + _PyLong_DIGIT_INIT(55), \ + _PyLong_DIGIT_INIT(56), \ + _PyLong_DIGIT_INIT(57), \ + _PyLong_DIGIT_INIT(58), \ + _PyLong_DIGIT_INIT(59), \ + _PyLong_DIGIT_INIT(60), \ + _PyLong_DIGIT_INIT(61), \ + _PyLong_DIGIT_INIT(62), \ + _PyLong_DIGIT_INIT(63), \ + _PyLong_DIGIT_INIT(64), \ + _PyLong_DIGIT_INIT(65), \ + _PyLong_DIGIT_INIT(66), \ + _PyLong_DIGIT_INIT(67), \ + _PyLong_DIGIT_INIT(68), \ + _PyLong_DIGIT_INIT(69), \ + _PyLong_DIGIT_INIT(70), \ + _PyLong_DIGIT_INIT(71), \ + _PyLong_DIGIT_INIT(72), \ + _PyLong_DIGIT_INIT(73), \ + _PyLong_DIGIT_INIT(74), \ + _PyLong_DIGIT_INIT(75), \ + _PyLong_DIGIT_INIT(76), \ + _PyLong_DIGIT_INIT(77), \ + _PyLong_DIGIT_INIT(78), \ + _PyLong_DIGIT_INIT(79), \ + _PyLong_DIGIT_INIT(80), \ + _PyLong_DIGIT_INIT(81), \ + _PyLong_DIGIT_INIT(82), \ + _PyLong_DIGIT_INIT(83), \ + _PyLong_DIGIT_INIT(84), \ + _PyLong_DIGIT_INIT(85), \ + _PyLong_DIGIT_INIT(86), \ + _PyLong_DIGIT_INIT(87), \ + _PyLong_DIGIT_INIT(88), \ + _PyLong_DIGIT_INIT(89), \ + _PyLong_DIGIT_INIT(90), \ + _PyLong_DIGIT_INIT(91), \ + _PyLong_DIGIT_INIT(92), \ + _PyLong_DIGIT_INIT(93), \ + _PyLong_DIGIT_INIT(94), \ + _PyLong_DIGIT_INIT(95), \ + _PyLong_DIGIT_INIT(96), \ + _PyLong_DIGIT_INIT(97), \ + _PyLong_DIGIT_INIT(98), \ + _PyLong_DIGIT_INIT(99), \ + _PyLong_DIGIT_INIT(100), \ + _PyLong_DIGIT_INIT(101), \ + _PyLong_DIGIT_INIT(102), \ + _PyLong_DIGIT_INIT(103), \ + _PyLong_DIGIT_INIT(104), \ + _PyLong_DIGIT_INIT(105), \ + _PyLong_DIGIT_INIT(106), \ + _PyLong_DIGIT_INIT(107), \ + _PyLong_DIGIT_INIT(108), \ + _PyLong_DIGIT_INIT(109), \ + _PyLong_DIGIT_INIT(110), \ + _PyLong_DIGIT_INIT(111), \ + _PyLong_DIGIT_INIT(112), \ + _PyLong_DIGIT_INIT(113), \ + _PyLong_DIGIT_INIT(114), \ + _PyLong_DIGIT_INIT(115), \ + _PyLong_DIGIT_INIT(116), \ + _PyLong_DIGIT_INIT(117), \ + _PyLong_DIGIT_INIT(118), \ + _PyLong_DIGIT_INIT(119), \ + _PyLong_DIGIT_INIT(120), \ + _PyLong_DIGIT_INIT(121), \ + _PyLong_DIGIT_INIT(122), \ + _PyLong_DIGIT_INIT(123), \ + _PyLong_DIGIT_INIT(124), \ + _PyLong_DIGIT_INIT(125), \ + _PyLong_DIGIT_INIT(126), \ + _PyLong_DIGIT_INIT(127), \ + _PyLong_DIGIT_INIT(128), \ + _PyLong_DIGIT_INIT(129), \ + _PyLong_DIGIT_INIT(130), \ + _PyLong_DIGIT_INIT(131), \ + _PyLong_DIGIT_INIT(132), \ + _PyLong_DIGIT_INIT(133), \ + _PyLong_DIGIT_INIT(134), \ + _PyLong_DIGIT_INIT(135), \ + _PyLong_DIGIT_INIT(136), \ + _PyLong_DIGIT_INIT(137), \ + _PyLong_DIGIT_INIT(138), \ + _PyLong_DIGIT_INIT(139), \ + _PyLong_DIGIT_INIT(140), \ + _PyLong_DIGIT_INIT(141), \ + _PyLong_DIGIT_INIT(142), \ + _PyLong_DIGIT_INIT(143), \ + _PyLong_DIGIT_INIT(144), \ + _PyLong_DIGIT_INIT(145), \ + _PyLong_DIGIT_INIT(146), \ + _PyLong_DIGIT_INIT(147), \ + _PyLong_DIGIT_INIT(148), \ + _PyLong_DIGIT_INIT(149), \ + _PyLong_DIGIT_INIT(150), \ + _PyLong_DIGIT_INIT(151), \ + _PyLong_DIGIT_INIT(152), \ + _PyLong_DIGIT_INIT(153), \ + _PyLong_DIGIT_INIT(154), \ + _PyLong_DIGIT_INIT(155), \ + _PyLong_DIGIT_INIT(156), \ + _PyLong_DIGIT_INIT(157), \ + _PyLong_DIGIT_INIT(158), \ + _PyLong_DIGIT_INIT(159), \ + _PyLong_DIGIT_INIT(160), \ + _PyLong_DIGIT_INIT(161), \ + _PyLong_DIGIT_INIT(162), \ + _PyLong_DIGIT_INIT(163), \ + _PyLong_DIGIT_INIT(164), \ + _PyLong_DIGIT_INIT(165), \ + _PyLong_DIGIT_INIT(166), \ + _PyLong_DIGIT_INIT(167), \ + _PyLong_DIGIT_INIT(168), \ + _PyLong_DIGIT_INIT(169), \ + _PyLong_DIGIT_INIT(170), \ + _PyLong_DIGIT_INIT(171), \ + _PyLong_DIGIT_INIT(172), \ + _PyLong_DIGIT_INIT(173), \ + _PyLong_DIGIT_INIT(174), \ + _PyLong_DIGIT_INIT(175), \ + _PyLong_DIGIT_INIT(176), \ + _PyLong_DIGIT_INIT(177), \ + _PyLong_DIGIT_INIT(178), \ + _PyLong_DIGIT_INIT(179), \ + _PyLong_DIGIT_INIT(180), \ + _PyLong_DIGIT_INIT(181), \ + _PyLong_DIGIT_INIT(182), \ + _PyLong_DIGIT_INIT(183), \ + _PyLong_DIGIT_INIT(184), \ + _PyLong_DIGIT_INIT(185), \ + _PyLong_DIGIT_INIT(186), \ + _PyLong_DIGIT_INIT(187), \ + _PyLong_DIGIT_INIT(188), \ + _PyLong_DIGIT_INIT(189), \ + _PyLong_DIGIT_INIT(190), \ + _PyLong_DIGIT_INIT(191), \ + _PyLong_DIGIT_INIT(192), \ + _PyLong_DIGIT_INIT(193), \ + _PyLong_DIGIT_INIT(194), \ + _PyLong_DIGIT_INIT(195), \ + _PyLong_DIGIT_INIT(196), \ + _PyLong_DIGIT_INIT(197), \ + _PyLong_DIGIT_INIT(198), \ + _PyLong_DIGIT_INIT(199), \ + _PyLong_DIGIT_INIT(200), \ + _PyLong_DIGIT_INIT(201), \ + _PyLong_DIGIT_INIT(202), \ + _PyLong_DIGIT_INIT(203), \ + _PyLong_DIGIT_INIT(204), \ + _PyLong_DIGIT_INIT(205), \ + _PyLong_DIGIT_INIT(206), \ + _PyLong_DIGIT_INIT(207), \ + _PyLong_DIGIT_INIT(208), \ + _PyLong_DIGIT_INIT(209), \ + _PyLong_DIGIT_INIT(210), \ + _PyLong_DIGIT_INIT(211), \ + _PyLong_DIGIT_INIT(212), \ + _PyLong_DIGIT_INIT(213), \ + _PyLong_DIGIT_INIT(214), \ + _PyLong_DIGIT_INIT(215), \ + _PyLong_DIGIT_INIT(216), \ + _PyLong_DIGIT_INIT(217), \ + _PyLong_DIGIT_INIT(218), \ + _PyLong_DIGIT_INIT(219), \ + _PyLong_DIGIT_INIT(220), \ + _PyLong_DIGIT_INIT(221), \ + _PyLong_DIGIT_INIT(222), \ + _PyLong_DIGIT_INIT(223), \ + _PyLong_DIGIT_INIT(224), \ + _PyLong_DIGIT_INIT(225), \ + _PyLong_DIGIT_INIT(226), \ + _PyLong_DIGIT_INIT(227), \ + _PyLong_DIGIT_INIT(228), \ + _PyLong_DIGIT_INIT(229), \ + _PyLong_DIGIT_INIT(230), \ + _PyLong_DIGIT_INIT(231), \ + _PyLong_DIGIT_INIT(232), \ + _PyLong_DIGIT_INIT(233), \ + _PyLong_DIGIT_INIT(234), \ + _PyLong_DIGIT_INIT(235), \ + _PyLong_DIGIT_INIT(236), \ + _PyLong_DIGIT_INIT(237), \ + _PyLong_DIGIT_INIT(238), \ + _PyLong_DIGIT_INIT(239), \ + _PyLong_DIGIT_INIT(240), \ + _PyLong_DIGIT_INIT(241), \ + _PyLong_DIGIT_INIT(242), \ + _PyLong_DIGIT_INIT(243), \ + _PyLong_DIGIT_INIT(244), \ + _PyLong_DIGIT_INIT(245), \ + _PyLong_DIGIT_INIT(246), \ + _PyLong_DIGIT_INIT(247), \ + _PyLong_DIGIT_INIT(248), \ + _PyLong_DIGIT_INIT(249), \ + _PyLong_DIGIT_INIT(250), \ + _PyLong_DIGIT_INIT(251), \ + _PyLong_DIGIT_INIT(252), \ + _PyLong_DIGIT_INIT(253), \ + _PyLong_DIGIT_INIT(254), \ + _PyLong_DIGIT_INIT(255), \ + _PyLong_DIGIT_INIT(256), \ } -static inline void -_PyUnicode_InitStaticStrings(void) { - PyObject *string; - string = &_Py_ID(False); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(Py_Repr); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(TextIOWrapper); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(True); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(WarningMessage); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__IOBase_closed); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__abc_tpflags__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__abs__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__abstractmethods__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__add__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__aenter__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__aexit__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__aiter__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__all__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__and__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__anext__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__annotations__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__args__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__await__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__bases__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__bool__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__build_class__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__builtins__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__bytes__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__call__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__cantrace__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__class__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__class_getitem__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__classcell__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__complex__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__contains__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__copy__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__del__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__delattr__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__delete__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__delitem__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__dict__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__dictoffset__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__dir__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__divmod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__doc__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__enter__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__eq__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__exit__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__file__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__float__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__floordiv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__format__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__fspath__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ge__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__get__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getattr__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getattribute__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getinitargs__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getitem__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getnewargs__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getnewargs_ex__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__getstate__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__gt__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__hash__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__iadd__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__iand__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ifloordiv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ilshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__imatmul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__imod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__import__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__imul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__index__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__init__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__init_subclass__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__instancecheck__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__int__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__invert__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ior__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ipow__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__irshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__isabstractmethod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__isub__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__iter__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__itruediv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ixor__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__le__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__len__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__length_hint__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__lltrace__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__loader__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__lshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__lt__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__main__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__matmul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__missing__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__mod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__module__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__mro_entries__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__mul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__name__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ne__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__neg__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__new__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__newobj__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__newobj_ex__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__next__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__notes__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__or__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__orig_class__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__origin__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__package__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__parameters__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__path__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__pos__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__pow__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__prepare__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__qualname__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__radd__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rand__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rdivmod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__reduce__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__reduce_ex__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__repr__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__reversed__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rfloordiv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rlshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rmatmul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rmod__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rmul__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__ror__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__round__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rpow__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rrshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rshift__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rsub__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rtruediv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__rxor__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__set__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__set_name__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__setattr__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__setitem__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__setstate__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__sizeof__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__slotnames__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__slots__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__spec__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__str__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__sub__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__subclasscheck__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__subclasshook__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__truediv__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__trunc__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__typing_is_unpacked_typevartuple__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__typing_prepare_subst__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__typing_subst__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__typing_unpacked_tuple_args__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__warningregistry__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__weaklistoffset__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__weakref__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(__xor__); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_abc_impl); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_annotation); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_blksize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_bootstrap); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_dealloc_warn); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_feature_version); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_finalizing); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_find_and_load); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_fix_up_module); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_get_sourcefile); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_handle_fromlist); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_initializing); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_is_text_encoding); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_lock_unlock_module); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_showwarnmsg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_shutdown); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_slotnames); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_uninitialized_submodules); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_warn_unawaited_coroutine); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(_xoptions); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(a); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(abs_tol); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(access); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(add); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(after_in_child); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(after_in_parent); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(aggregate_class); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(append); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(argdefs); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(arguments); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(argv); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(attribute); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(authorizer_callback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(b); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(backtick); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(base); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(before); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(big); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(binary_form); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(block); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffer); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffer_callback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffer_size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffering); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(buffers); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(bufsize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(builtins); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(byteorder); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(bytes); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(bytes_per_sep); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(c_call); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(c_exception); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(c_return); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cached_statements); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cadata); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cafile); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(call); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(capath); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(category); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cb_type); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(certfile); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(check_same_thread); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(clear); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(close); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(closed); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(closefd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(closure); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_argcount); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_cellvars); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_code); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_consts); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_exceptiontable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_filename); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_firstlineno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_flags); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_freevars); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_kwonlyargcount); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_linetable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_name); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_names); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_nlocals); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_posonlyargcount); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_qualname); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_stacksize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(co_varnames); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(code); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(command); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(comment_factory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(consts); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(context); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cookie); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(copy); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(copyreg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(coro); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(count); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(cwd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(data); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(database); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(decode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(decoder); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(default); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(defaultaction); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(delete); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(depth); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(detect_types); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(deterministic); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(device); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dict); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dictcomp); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(difference_update); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(digest); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(digest_size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(digestmod); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dir_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dispatch_table); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(displayhook); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dklen); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(doc); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dont_inherit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dst); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(dst_dir_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(duration); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(effective_ids); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(element_factory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(encode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(encoding); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(end); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(end_lineno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(end_offset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(endpos); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(env); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(errors); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(event); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(eventmask); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(exc_type); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(exc_value); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(excepthook); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(exception); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(exp); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(extend); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(factory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(family); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fanout); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fd2); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fdel); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fget); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(file); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(file_actions); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(filename); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fileno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(filepath); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fillvalue); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(filters); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(final); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(find_class); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fix_imports); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(flags); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(flush); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(follow_symlinks); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(format); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(frequency); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fromlist); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(fset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(func); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(generation); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(genexpr); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(get); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(get_source); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(getattr); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(getstate); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(gid); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(globals); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(groupindex); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(groups); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(handle); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(hash_name); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(header); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(headers); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(hi); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(hook); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(id); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ignore); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(imag); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(importlib); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(in_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(incoming); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(indexgroup); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(inf); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(inheritable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(initial); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(initial_bytes); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(initial_value); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(initval); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(inner_size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(input); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(insert_comments); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(insert_pis); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(instructions); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(intern); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(intersection); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(isatty); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(isinstance); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(isolation_level); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(istext); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(item); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(items); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(iter); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(iterable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(iterations); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(join); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(jump); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(keepends); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(key); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(keyfile); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(keys); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(kind); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(lambda); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last_node); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last_traceback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last_type); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(last_value); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(latin1); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(leaf_size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(len); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(length); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(level); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(limit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(line); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(line_buffering); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(lineno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(listcomp); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(little); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(lo); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(locale); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(locals); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(loop); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(mapping); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(match); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(max_length); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxdigits); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxevents); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxmem); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxsplit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(maxvalue); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(memLevel); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(memlimit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(message); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(metaclass); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(method); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(mod); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(mode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(module); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(module_globals); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(modules); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(mro); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(msg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n_arg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n_fields); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n_sequence_fields); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(n_unnamed_fields); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(name); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(namespace_separator); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(namespaces); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(narg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ndigits); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(new_limit); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(newline); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(newlines); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(next); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(node_depth); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(node_offset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ns); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(number); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(obj); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(object); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(offset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(offset_dst); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(offset_src); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(on_type_read); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(onceregistry); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(oparg); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(opcode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(open); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(opener); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(operation); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(optimize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(options); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(order); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(out_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(outgoing); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(overlapped); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(owner); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(p); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pages); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(parent); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(password); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(path); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pattern); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(peek); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(persistent_id); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(persistent_load); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(person); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pi_factory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pid); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(policy); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(pos); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(print_file_and_line); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(priority); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(progress); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(progress_handler); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(proto); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(protocol); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ps1); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(ps2); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(query); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(quotetabs); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(r); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(raw); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(read); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(read1); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readall); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readinto); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readinto1); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readline); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(readonly); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(real); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reducer_override); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(registry); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(rel_tol); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reload); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(repl); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(replace); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reserved); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reset); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(resetids); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(return); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reverse); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(reversed); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(s); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(salt); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sched_priority); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(scheduler); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(seek); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(seekable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(selectors); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(send); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sep); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sequence); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(server_hostname); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(server_side); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(session); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setcomp); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setpgroup); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setsid); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setsigdef); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setsigmask); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(setstate); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(shape); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(show_cmd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(signed); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(size); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sizehint); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sleep); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sock); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sort); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sound); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(source); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(src); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(src_dir_fd); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(stacklevel); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(start); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(statement); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(status); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(stderr); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(stdin); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(stdout); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(step); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(store_name); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(strategy); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(strict); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(strict_mode); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(string); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(sub_key); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(symmetric_difference_update); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tabsize); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tag); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(target); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(target_is_directory); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(task); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tb_frame); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tb_lasti); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tb_lineno); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tb_next); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tell); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(template); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(term); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(text); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(threading); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(throw); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(timeout); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(times); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(top); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(trace_callback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(traceback); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(trailers); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(translate); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(truncate); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(twice); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(txt); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(type); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(tz); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(uid); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(unlink); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(unraisablehook); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(uri); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(usedforsecurity); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(value); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(values); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(version); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(warnings); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(warnoptions); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(wbits); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(week); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(weekday); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(which); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(who); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(withdata); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(writable); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(write); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(write_through); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(x); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(year); - PyUnicode_InternInPlace(&string); - string = &_Py_ID(zdict); - PyUnicode_InternInPlace(&string); +#define _Py_bytes_characters_INIT { \ + _PyBytes_CHAR_INIT(0), \ + _PyBytes_CHAR_INIT(1), \ + _PyBytes_CHAR_INIT(2), \ + _PyBytes_CHAR_INIT(3), \ + _PyBytes_CHAR_INIT(4), \ + _PyBytes_CHAR_INIT(5), \ + _PyBytes_CHAR_INIT(6), \ + _PyBytes_CHAR_INIT(7), \ + _PyBytes_CHAR_INIT(8), \ + _PyBytes_CHAR_INIT(9), \ + _PyBytes_CHAR_INIT(10), \ + _PyBytes_CHAR_INIT(11), \ + _PyBytes_CHAR_INIT(12), \ + _PyBytes_CHAR_INIT(13), \ + _PyBytes_CHAR_INIT(14), \ + _PyBytes_CHAR_INIT(15), \ + _PyBytes_CHAR_INIT(16), \ + _PyBytes_CHAR_INIT(17), \ + _PyBytes_CHAR_INIT(18), \ + _PyBytes_CHAR_INIT(19), \ + _PyBytes_CHAR_INIT(20), \ + _PyBytes_CHAR_INIT(21), \ + _PyBytes_CHAR_INIT(22), \ + _PyBytes_CHAR_INIT(23), \ + _PyBytes_CHAR_INIT(24), \ + _PyBytes_CHAR_INIT(25), \ + _PyBytes_CHAR_INIT(26), \ + _PyBytes_CHAR_INIT(27), \ + _PyBytes_CHAR_INIT(28), \ + _PyBytes_CHAR_INIT(29), \ + _PyBytes_CHAR_INIT(30), \ + _PyBytes_CHAR_INIT(31), \ + _PyBytes_CHAR_INIT(32), \ + _PyBytes_CHAR_INIT(33), \ + _PyBytes_CHAR_INIT(34), \ + _PyBytes_CHAR_INIT(35), \ + _PyBytes_CHAR_INIT(36), \ + _PyBytes_CHAR_INIT(37), \ + _PyBytes_CHAR_INIT(38), \ + _PyBytes_CHAR_INIT(39), \ + _PyBytes_CHAR_INIT(40), \ + _PyBytes_CHAR_INIT(41), \ + _PyBytes_CHAR_INIT(42), \ + _PyBytes_CHAR_INIT(43), \ + _PyBytes_CHAR_INIT(44), \ + _PyBytes_CHAR_INIT(45), \ + _PyBytes_CHAR_INIT(46), \ + _PyBytes_CHAR_INIT(47), \ + _PyBytes_CHAR_INIT(48), \ + _PyBytes_CHAR_INIT(49), \ + _PyBytes_CHAR_INIT(50), \ + _PyBytes_CHAR_INIT(51), \ + _PyBytes_CHAR_INIT(52), \ + _PyBytes_CHAR_INIT(53), \ + _PyBytes_CHAR_INIT(54), \ + _PyBytes_CHAR_INIT(55), \ + _PyBytes_CHAR_INIT(56), \ + _PyBytes_CHAR_INIT(57), \ + _PyBytes_CHAR_INIT(58), \ + _PyBytes_CHAR_INIT(59), \ + _PyBytes_CHAR_INIT(60), \ + _PyBytes_CHAR_INIT(61), \ + _PyBytes_CHAR_INIT(62), \ + _PyBytes_CHAR_INIT(63), \ + _PyBytes_CHAR_INIT(64), \ + _PyBytes_CHAR_INIT(65), \ + _PyBytes_CHAR_INIT(66), \ + _PyBytes_CHAR_INIT(67), \ + _PyBytes_CHAR_INIT(68), \ + _PyBytes_CHAR_INIT(69), \ + _PyBytes_CHAR_INIT(70), \ + _PyBytes_CHAR_INIT(71), \ + _PyBytes_CHAR_INIT(72), \ + _PyBytes_CHAR_INIT(73), \ + _PyBytes_CHAR_INIT(74), \ + _PyBytes_CHAR_INIT(75), \ + _PyBytes_CHAR_INIT(76), \ + _PyBytes_CHAR_INIT(77), \ + _PyBytes_CHAR_INIT(78), \ + _PyBytes_CHAR_INIT(79), \ + _PyBytes_CHAR_INIT(80), \ + _PyBytes_CHAR_INIT(81), \ + _PyBytes_CHAR_INIT(82), \ + _PyBytes_CHAR_INIT(83), \ + _PyBytes_CHAR_INIT(84), \ + _PyBytes_CHAR_INIT(85), \ + _PyBytes_CHAR_INIT(86), \ + _PyBytes_CHAR_INIT(87), \ + _PyBytes_CHAR_INIT(88), \ + _PyBytes_CHAR_INIT(89), \ + _PyBytes_CHAR_INIT(90), \ + _PyBytes_CHAR_INIT(91), \ + _PyBytes_CHAR_INIT(92), \ + _PyBytes_CHAR_INIT(93), \ + _PyBytes_CHAR_INIT(94), \ + _PyBytes_CHAR_INIT(95), \ + _PyBytes_CHAR_INIT(96), \ + _PyBytes_CHAR_INIT(97), \ + _PyBytes_CHAR_INIT(98), \ + _PyBytes_CHAR_INIT(99), \ + _PyBytes_CHAR_INIT(100), \ + _PyBytes_CHAR_INIT(101), \ + _PyBytes_CHAR_INIT(102), \ + _PyBytes_CHAR_INIT(103), \ + _PyBytes_CHAR_INIT(104), \ + _PyBytes_CHAR_INIT(105), \ + _PyBytes_CHAR_INIT(106), \ + _PyBytes_CHAR_INIT(107), \ + _PyBytes_CHAR_INIT(108), \ + _PyBytes_CHAR_INIT(109), \ + _PyBytes_CHAR_INIT(110), \ + _PyBytes_CHAR_INIT(111), \ + _PyBytes_CHAR_INIT(112), \ + _PyBytes_CHAR_INIT(113), \ + _PyBytes_CHAR_INIT(114), \ + _PyBytes_CHAR_INIT(115), \ + _PyBytes_CHAR_INIT(116), \ + _PyBytes_CHAR_INIT(117), \ + _PyBytes_CHAR_INIT(118), \ + _PyBytes_CHAR_INIT(119), \ + _PyBytes_CHAR_INIT(120), \ + _PyBytes_CHAR_INIT(121), \ + _PyBytes_CHAR_INIT(122), \ + _PyBytes_CHAR_INIT(123), \ + _PyBytes_CHAR_INIT(124), \ + _PyBytes_CHAR_INIT(125), \ + _PyBytes_CHAR_INIT(126), \ + _PyBytes_CHAR_INIT(127), \ + _PyBytes_CHAR_INIT(128), \ + _PyBytes_CHAR_INIT(129), \ + _PyBytes_CHAR_INIT(130), \ + _PyBytes_CHAR_INIT(131), \ + _PyBytes_CHAR_INIT(132), \ + _PyBytes_CHAR_INIT(133), \ + _PyBytes_CHAR_INIT(134), \ + _PyBytes_CHAR_INIT(135), \ + _PyBytes_CHAR_INIT(136), \ + _PyBytes_CHAR_INIT(137), \ + _PyBytes_CHAR_INIT(138), \ + _PyBytes_CHAR_INIT(139), \ + _PyBytes_CHAR_INIT(140), \ + _PyBytes_CHAR_INIT(141), \ + _PyBytes_CHAR_INIT(142), \ + _PyBytes_CHAR_INIT(143), \ + _PyBytes_CHAR_INIT(144), \ + _PyBytes_CHAR_INIT(145), \ + _PyBytes_CHAR_INIT(146), \ + _PyBytes_CHAR_INIT(147), \ + _PyBytes_CHAR_INIT(148), \ + _PyBytes_CHAR_INIT(149), \ + _PyBytes_CHAR_INIT(150), \ + _PyBytes_CHAR_INIT(151), \ + _PyBytes_CHAR_INIT(152), \ + _PyBytes_CHAR_INIT(153), \ + _PyBytes_CHAR_INIT(154), \ + _PyBytes_CHAR_INIT(155), \ + _PyBytes_CHAR_INIT(156), \ + _PyBytes_CHAR_INIT(157), \ + _PyBytes_CHAR_INIT(158), \ + _PyBytes_CHAR_INIT(159), \ + _PyBytes_CHAR_INIT(160), \ + _PyBytes_CHAR_INIT(161), \ + _PyBytes_CHAR_INIT(162), \ + _PyBytes_CHAR_INIT(163), \ + _PyBytes_CHAR_INIT(164), \ + _PyBytes_CHAR_INIT(165), \ + _PyBytes_CHAR_INIT(166), \ + _PyBytes_CHAR_INIT(167), \ + _PyBytes_CHAR_INIT(168), \ + _PyBytes_CHAR_INIT(169), \ + _PyBytes_CHAR_INIT(170), \ + _PyBytes_CHAR_INIT(171), \ + _PyBytes_CHAR_INIT(172), \ + _PyBytes_CHAR_INIT(173), \ + _PyBytes_CHAR_INIT(174), \ + _PyBytes_CHAR_INIT(175), \ + _PyBytes_CHAR_INIT(176), \ + _PyBytes_CHAR_INIT(177), \ + _PyBytes_CHAR_INIT(178), \ + _PyBytes_CHAR_INIT(179), \ + _PyBytes_CHAR_INIT(180), \ + _PyBytes_CHAR_INIT(181), \ + _PyBytes_CHAR_INIT(182), \ + _PyBytes_CHAR_INIT(183), \ + _PyBytes_CHAR_INIT(184), \ + _PyBytes_CHAR_INIT(185), \ + _PyBytes_CHAR_INIT(186), \ + _PyBytes_CHAR_INIT(187), \ + _PyBytes_CHAR_INIT(188), \ + _PyBytes_CHAR_INIT(189), \ + _PyBytes_CHAR_INIT(190), \ + _PyBytes_CHAR_INIT(191), \ + _PyBytes_CHAR_INIT(192), \ + _PyBytes_CHAR_INIT(193), \ + _PyBytes_CHAR_INIT(194), \ + _PyBytes_CHAR_INIT(195), \ + _PyBytes_CHAR_INIT(196), \ + _PyBytes_CHAR_INIT(197), \ + _PyBytes_CHAR_INIT(198), \ + _PyBytes_CHAR_INIT(199), \ + _PyBytes_CHAR_INIT(200), \ + _PyBytes_CHAR_INIT(201), \ + _PyBytes_CHAR_INIT(202), \ + _PyBytes_CHAR_INIT(203), \ + _PyBytes_CHAR_INIT(204), \ + _PyBytes_CHAR_INIT(205), \ + _PyBytes_CHAR_INIT(206), \ + _PyBytes_CHAR_INIT(207), \ + _PyBytes_CHAR_INIT(208), \ + _PyBytes_CHAR_INIT(209), \ + _PyBytes_CHAR_INIT(210), \ + _PyBytes_CHAR_INIT(211), \ + _PyBytes_CHAR_INIT(212), \ + _PyBytes_CHAR_INIT(213), \ + _PyBytes_CHAR_INIT(214), \ + _PyBytes_CHAR_INIT(215), \ + _PyBytes_CHAR_INIT(216), \ + _PyBytes_CHAR_INIT(217), \ + _PyBytes_CHAR_INIT(218), \ + _PyBytes_CHAR_INIT(219), \ + _PyBytes_CHAR_INIT(220), \ + _PyBytes_CHAR_INIT(221), \ + _PyBytes_CHAR_INIT(222), \ + _PyBytes_CHAR_INIT(223), \ + _PyBytes_CHAR_INIT(224), \ + _PyBytes_CHAR_INIT(225), \ + _PyBytes_CHAR_INIT(226), \ + _PyBytes_CHAR_INIT(227), \ + _PyBytes_CHAR_INIT(228), \ + _PyBytes_CHAR_INIT(229), \ + _PyBytes_CHAR_INIT(230), \ + _PyBytes_CHAR_INIT(231), \ + _PyBytes_CHAR_INIT(232), \ + _PyBytes_CHAR_INIT(233), \ + _PyBytes_CHAR_INIT(234), \ + _PyBytes_CHAR_INIT(235), \ + _PyBytes_CHAR_INIT(236), \ + _PyBytes_CHAR_INIT(237), \ + _PyBytes_CHAR_INIT(238), \ + _PyBytes_CHAR_INIT(239), \ + _PyBytes_CHAR_INIT(240), \ + _PyBytes_CHAR_INIT(241), \ + _PyBytes_CHAR_INIT(242), \ + _PyBytes_CHAR_INIT(243), \ + _PyBytes_CHAR_INIT(244), \ + _PyBytes_CHAR_INIT(245), \ + _PyBytes_CHAR_INIT(246), \ + _PyBytes_CHAR_INIT(247), \ + _PyBytes_CHAR_INIT(248), \ + _PyBytes_CHAR_INIT(249), \ + _PyBytes_CHAR_INIT(250), \ + _PyBytes_CHAR_INIT(251), \ + _PyBytes_CHAR_INIT(252), \ + _PyBytes_CHAR_INIT(253), \ + _PyBytes_CHAR_INIT(254), \ + _PyBytes_CHAR_INIT(255), \ } -#ifdef Py_DEBUG -static inline void -_PyStaticObjects_CheckRefcnt(void) { - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -5]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -5]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -4]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -4]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -3]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -3]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -2]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -2]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -1]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + -1]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 0]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 0]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 1]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 1]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 2]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 2]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 3]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 3]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 4]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 4]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 5]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 5]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 6]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 6]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 7]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 7]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 8]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 8]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 9]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 9]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 10]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 10]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 11]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 11]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 12]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 12]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 13]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 13]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 14]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 14]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 15]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 15]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 16]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 16]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 17]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 17]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 18]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 18]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 19]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 19]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 20]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 20]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 21]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 21]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 22]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 22]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 23]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 23]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 24]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 24]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 25]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 25]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 26]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 26]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 27]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 27]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 28]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 28]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 29]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 29]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 30]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 30]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 31]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 31]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 32]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 32]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 33]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 33]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 34]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 34]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 35]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 35]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 36]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 36]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 37]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 37]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 38]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 38]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 39]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 39]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 40]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 40]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 41]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 41]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 42]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 42]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 43]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 43]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 44]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 44]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 45]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 45]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 46]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 46]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 47]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 47]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 48]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 48]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 49]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 49]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 50]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 50]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 51]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 51]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 52]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 52]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 53]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 53]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 54]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 54]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 55]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 55]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 56]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 56]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 57]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 57]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 58]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 58]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 59]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 59]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 60]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 60]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 61]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 61]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 62]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 62]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 63]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 63]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 64]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 64]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 65]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 65]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 66]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 66]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 67]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 67]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 68]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 68]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 69]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 69]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 70]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 70]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 71]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 71]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 72]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 72]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 73]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 73]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 74]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 74]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 75]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 75]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 76]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 76]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 77]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 77]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 78]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 78]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 79]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 79]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 80]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 80]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 81]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 81]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 82]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 82]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 83]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 83]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 84]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 84]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 85]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 85]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 86]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 86]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 87]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 87]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 88]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 88]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 89]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 89]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 90]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 90]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 91]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 91]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 92]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 92]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 93]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 93]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 94]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 94]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 95]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 95]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 96]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 96]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 97]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 97]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 98]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 98]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 99]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 99]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 100]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 100]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 101]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 101]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 102]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 102]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 103]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 103]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 104]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 104]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 105]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 105]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 106]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 106]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 107]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 107]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 108]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 108]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 109]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 109]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 110]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 110]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 111]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 111]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 112]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 112]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 113]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 113]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 114]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 114]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 115]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 115]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 116]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 116]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 117]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 117]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 118]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 118]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 119]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 119]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 120]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 120]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 121]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 121]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 122]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 122]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 123]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 123]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 124]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 124]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 125]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 125]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 126]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 126]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 127]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 127]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 129]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 129]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 130]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 130]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 131]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 131]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 132]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 132]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 133]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 133]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 134]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 134]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 135]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 135]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 136]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 136]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 137]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 137]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 138]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 138]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 139]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 139]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 140]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 140]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 141]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 141]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 142]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 142]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 143]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 143]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 144]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 144]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 145]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 145]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 146]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 146]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 147]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 147]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 148]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 148]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 149]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 149]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 150]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 150]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 151]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 151]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 152]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 152]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 153]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 153]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 154]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 154]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 155]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 155]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 156]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 156]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 157]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 157]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 158]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 158]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 159]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 159]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 160]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 160]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 161]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 161]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 162]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 162]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 163]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 163]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 164]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 164]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 165]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 165]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 166]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 166]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 167]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 167]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 168]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 168]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 169]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 169]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 170]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 170]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 171]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 171]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 172]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 172]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 173]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 173]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 174]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 174]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 175]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 175]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 176]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 176]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 177]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 177]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 178]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 178]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 179]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 179]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 180]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 180]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 181]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 181]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 182]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 182]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 183]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 183]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 184]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 184]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 185]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 185]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 186]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 186]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 187]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 187]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 188]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 188]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 189]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 189]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 190]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 190]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 191]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 191]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 192]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 192]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 193]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 193]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 194]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 194]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 195]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 195]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 196]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 196]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 197]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 197]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 198]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 198]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 199]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 199]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 200]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 200]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 201]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 201]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 202]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 202]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 203]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 203]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 204]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 204]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 205]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 205]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 206]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 206]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 207]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 207]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 208]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 208]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 209]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 209]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 210]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 210]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 211]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 211]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 212]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 212]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 213]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 213]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 214]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 214]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 215]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 215]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 216]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 216]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 217]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 217]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 218]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 218]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 219]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 219]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 220]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 220]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 221]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 221]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 222]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 222]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 223]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 223]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 224]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 224]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 225]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 225]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 226]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 226]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 227]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 227]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 228]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 228]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 229]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 229]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 230]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 230]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 231]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 231]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 232]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 232]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 233]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 233]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 234]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 234]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 235]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 235]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 236]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 236]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 237]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 237]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 238]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 238]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 239]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 239]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 240]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 240]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 241]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 241]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 242]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 242]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 243]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 243]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 244]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 244]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 245]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 245]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 246]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 246]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 247]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 247]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 248]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 248]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 249]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 249]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 250]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 250]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 251]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 251]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 252]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 252]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 253]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 253]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 254]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 254]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 255]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 255]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 256]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(small_ints)[_PY_NSMALLNEGINTS + 256]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_empty)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_empty)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[0]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[0]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[1]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[1]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[2]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[2]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[3]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[3]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[4]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[4]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[5]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[5]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[6]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[6]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[7]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[7]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[8]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[8]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[9]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[9]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[10]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[10]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[11]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[11]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[12]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[12]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[13]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[13]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[14]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[14]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[15]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[15]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[16]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[16]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[17]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[17]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[18]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[18]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[19]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[19]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[20]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[20]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[21]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[21]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[22]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[22]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[23]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[23]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[24]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[24]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[25]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[25]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[26]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[26]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[27]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[27]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[28]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[28]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[29]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[29]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[30]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[30]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[31]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[31]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[32]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[32]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[33]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[33]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[34]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[34]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[35]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[35]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[36]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[36]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[37]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[37]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[38]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[38]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[39]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[39]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[40]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[40]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[41]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[41]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[42]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[42]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[43]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[43]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[44]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[44]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[45]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[45]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[46]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[46]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[47]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[47]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[48]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[48]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[49]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[49]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[50]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[50]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[51]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[51]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[52]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[52]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[53]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[53]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[54]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[54]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[55]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[55]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[56]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[56]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[57]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[57]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[58]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[58]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[59]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[59]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[60]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[60]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[61]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[61]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[62]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[62]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[63]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[63]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[64]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[64]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[65]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[65]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[66]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[66]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[67]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[67]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[68]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[68]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[69]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[69]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[70]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[70]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[71]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[71]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[72]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[72]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[73]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[73]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[74]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[74]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[75]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[75]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[76]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[76]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[77]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[77]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[78]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[78]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[79]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[79]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[80]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[80]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[81]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[81]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[82]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[82]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[83]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[83]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[84]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[84]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[85]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[85]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[86]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[86]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[87]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[87]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[88]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[88]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[89]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[89]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[90]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[90]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[91]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[91]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[92]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[92]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[93]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[93]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[94]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[94]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[95]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[95]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[96]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[96]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[97]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[97]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[98]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[98]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[99]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[99]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[100]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[100]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[101]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[101]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[102]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[102]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[103]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[103]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[104]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[104]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[105]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[105]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[106]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[106]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[107]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[107]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[108]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[108]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[109]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[109]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[110]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[110]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[111]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[111]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[112]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[112]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[113]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[113]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[114]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[114]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[115]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[115]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[116]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[116]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[117]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[117]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[118]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[118]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[119]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[119]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[120]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[120]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[121]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[121]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[122]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[122]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[123]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[123]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[124]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[124]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[125]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[125]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[126]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[126]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[127]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[127]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[129]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[129]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[130]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[130]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[131]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[131]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[132]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[132]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[133]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[133]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[134]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[134]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[135]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[135]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[136]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[136]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[137]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[137]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[138]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[138]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[139]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[139]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[140]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[140]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[141]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[141]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[142]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[142]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[143]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[143]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[144]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[144]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[145]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[145]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[146]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[146]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[147]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[147]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[148]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[148]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[149]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[149]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[150]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[150]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[151]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[151]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[152]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[152]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[153]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[153]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[154]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[154]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[155]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[155]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[156]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[156]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[157]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[157]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[158]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[158]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[159]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[159]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[160]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[160]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[161]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[161]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[162]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[162]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[163]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[163]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[164]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[164]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[165]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[165]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[166]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[166]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[167]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[167]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[168]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[168]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[169]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[169]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[170]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[170]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[171]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[171]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[172]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[172]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[173]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[173]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[174]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[174]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[175]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[175]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[176]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[176]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[177]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[177]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[178]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[178]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[179]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[179]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[180]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[180]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[181]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[181]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[182]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[182]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[183]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[183]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[184]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[184]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[185]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[185]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[186]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[186]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[187]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[187]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[188]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[188]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[189]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[189]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[190]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[190]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[191]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[191]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[192]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[192]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[193]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[193]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[194]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[194]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[195]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[195]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[196]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[196]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[197]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[197]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[198]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[198]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[199]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[199]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[200]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[200]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[201]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[201]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[202]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[202]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[203]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[203]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[204]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[204]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[205]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[205]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[206]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[206]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[207]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[207]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[208]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[208]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[209]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[209]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[210]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[210]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[211]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[211]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[212]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[212]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[213]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[213]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[214]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[214]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[215]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[215]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[216]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[216]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[217]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[217]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[218]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[218]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[219]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[219]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[220]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[220]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[221]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[221]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[222]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[222]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[223]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[223]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[224]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[224]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[225]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[225]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[226]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[226]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[227]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[227]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[228]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[228]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[229]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[229]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[230]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[230]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[231]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[231]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[232]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[232]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[233]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[233]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[234]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[234]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[235]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[235]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[236]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[236]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[237]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[237]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[238]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[238]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[239]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[239]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[240]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[240]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[241]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[241]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[242]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[242]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[243]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[243]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[244]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[244]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[245]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[245]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[246]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[246]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[247]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[247]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[248]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[248]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[249]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[249]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[250]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[250]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[251]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[251]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[252]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[252]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[253]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[253]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[254]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[254]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(bytes_characters)[255]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(bytes_characters)[255]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_dictcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_dictcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_genexpr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_genexpr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_lambda)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_lambda)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_listcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_listcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_module)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_module)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_setcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_setcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_string)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_string)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(anon_unknown)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(anon_unknown)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(close_br)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(close_br)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(comma_sep)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(comma_sep)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dbl_close_br)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dbl_close_br)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dbl_open_br)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dbl_open_br)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dbl_percent)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dbl_percent)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dot)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dot)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(dot_locals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(dot_locals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(empty)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(empty)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(list_err)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(list_err)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(newline)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(newline)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(open_br)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(open_br)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(percent)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(percent)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_STR(utf_8)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_STR(utf_8)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(False)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(False)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(Py_Repr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(Py_Repr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(TextIOWrapper)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(TextIOWrapper)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(True)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(True)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(WarningMessage)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(WarningMessage)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__IOBase_closed)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__IOBase_closed)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__abc_tpflags__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__abc_tpflags__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__abs__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__abs__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__abstractmethods__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__abstractmethods__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__add__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__add__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__aenter__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__aenter__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__aexit__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__aexit__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__aiter__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__aiter__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__all__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__all__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__and__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__and__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__anext__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__anext__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__annotations__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__annotations__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__args__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__args__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__await__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__await__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__bases__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__bases__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__bool__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__bool__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__build_class__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__build_class__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__builtins__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__builtins__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__bytes__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__bytes__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__call__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__call__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__cantrace__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__cantrace__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__class__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__class__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__class_getitem__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__class_getitem__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__classcell__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__classcell__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__complex__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__complex__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__contains__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__contains__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__copy__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__copy__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__del__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__del__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__delattr__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__delattr__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__delete__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__delete__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__delitem__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__delitem__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__dict__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__dict__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__dictoffset__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__dictoffset__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__dir__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__dir__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__divmod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__divmod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__doc__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__doc__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__enter__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__enter__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__eq__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__eq__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__exit__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__exit__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__file__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__file__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__float__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__float__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__floordiv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__floordiv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__format__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__format__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__fspath__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__fspath__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ge__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ge__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__get__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__get__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getattr__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getattr__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getattribute__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getattribute__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getinitargs__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getinitargs__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getitem__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getitem__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getnewargs__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getnewargs__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getnewargs_ex__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getnewargs_ex__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__getstate__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__getstate__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__gt__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__gt__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__hash__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__hash__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__iadd__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__iadd__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__iand__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__iand__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ifloordiv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ifloordiv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ilshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ilshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__imatmul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__imatmul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__imod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__imod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__import__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__import__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__imul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__imul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__index__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__index__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__init__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__init__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__init_subclass__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__init_subclass__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__instancecheck__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__instancecheck__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__int__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__int__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__invert__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__invert__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ior__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ior__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ipow__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ipow__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__irshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__irshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__isabstractmethod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__isabstractmethod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__isub__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__isub__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__iter__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__iter__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__itruediv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__itruediv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ixor__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ixor__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__le__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__le__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__len__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__len__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__length_hint__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__length_hint__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__lltrace__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__lltrace__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__loader__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__loader__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__lshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__lshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__lt__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__lt__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__main__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__main__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__matmul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__matmul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__missing__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__missing__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__mod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__mod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__module__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__module__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__mro_entries__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__mro_entries__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__mul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__mul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__name__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__name__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ne__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ne__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__neg__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__neg__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__new__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__new__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__newobj__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__newobj__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__newobj_ex__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__newobj_ex__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__next__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__next__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__notes__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__notes__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__or__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__or__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__orig_class__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__orig_class__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__origin__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__origin__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__package__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__package__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__parameters__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__parameters__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__path__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__path__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__pos__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__pos__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__pow__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__pow__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__prepare__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__prepare__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__qualname__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__qualname__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__radd__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__radd__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rand__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rand__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rdivmod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rdivmod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__reduce__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__reduce__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__reduce_ex__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__reduce_ex__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__repr__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__repr__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__reversed__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__reversed__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rfloordiv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rfloordiv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rlshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rlshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rmatmul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rmatmul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rmod__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rmod__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rmul__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rmul__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__ror__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__ror__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__round__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__round__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rpow__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rpow__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rrshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rrshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rshift__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rshift__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rsub__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rsub__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rtruediv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rtruediv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__rxor__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__rxor__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__set__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__set__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__set_name__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__set_name__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__setattr__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__setattr__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__setitem__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__setitem__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__setstate__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__setstate__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__sizeof__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__sizeof__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__slotnames__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__slotnames__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__slots__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__slots__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__spec__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__spec__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__str__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__str__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__sub__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__sub__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__subclasscheck__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__subclasscheck__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__subclasshook__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__subclasshook__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__truediv__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__truediv__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__trunc__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__trunc__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__typing_is_unpacked_typevartuple__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__typing_is_unpacked_typevartuple__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__typing_prepare_subst__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__typing_prepare_subst__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__typing_subst__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__typing_subst__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__typing_unpacked_tuple_args__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__typing_unpacked_tuple_args__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__warningregistry__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__warningregistry__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__weaklistoffset__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__weaklistoffset__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__weakref__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__weakref__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(__xor__)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(__xor__)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_abc_impl)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_abc_impl)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_annotation)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_annotation)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_blksize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_blksize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_bootstrap)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_bootstrap)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_dealloc_warn)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_dealloc_warn)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_feature_version)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_feature_version)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_finalizing)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_finalizing)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_find_and_load)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_find_and_load)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_fix_up_module)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_fix_up_module)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_get_sourcefile)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_get_sourcefile)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_handle_fromlist)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_handle_fromlist)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_initializing)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_initializing)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_is_text_encoding)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_is_text_encoding)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_lock_unlock_module)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_lock_unlock_module)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_showwarnmsg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_showwarnmsg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_shutdown)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_shutdown)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_slotnames)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_slotnames)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_uninitialized_submodules)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_uninitialized_submodules)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_warn_unawaited_coroutine)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_warn_unawaited_coroutine)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(_xoptions)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(_xoptions)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(a)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(a)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(abs_tol)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(abs_tol)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(access)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(access)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(add)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(add)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(after_in_child)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(after_in_child)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(after_in_parent)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(after_in_parent)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(aggregate_class)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(aggregate_class)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(append)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(append)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(argdefs)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(argdefs)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(arguments)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(arguments)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(argv)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(argv)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(attribute)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(attribute)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(authorizer_callback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(authorizer_callback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(b)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(b)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(backtick)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(backtick)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(base)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(base)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(before)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(before)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(big)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(big)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(binary_form)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(binary_form)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(block)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(block)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffer)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffer)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffer_callback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffer_callback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffer_size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffer_size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffering)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffering)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(buffers)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(buffers)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(bufsize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(bufsize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(builtins)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(builtins)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(byteorder)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(byteorder)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(bytes)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(bytes)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(bytes_per_sep)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(bytes_per_sep)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(c_call)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(c_call)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(c_exception)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(c_exception)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(c_return)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(c_return)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cached_statements)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cached_statements)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cadata)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cadata)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cafile)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cafile)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(call)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(call)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(capath)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(capath)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(category)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(category)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cb_type)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cb_type)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(certfile)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(certfile)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(check_same_thread)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(check_same_thread)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(clear)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(clear)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(close)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(close)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(closed)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(closed)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(closefd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(closefd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(closure)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(closure)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_argcount)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_argcount)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_cellvars)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_cellvars)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_code)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_code)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_consts)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_consts)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_exceptiontable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_exceptiontable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_filename)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_filename)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_firstlineno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_firstlineno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_flags)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_flags)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_freevars)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_freevars)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_kwonlyargcount)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_kwonlyargcount)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_linetable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_linetable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_name)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_name)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_names)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_names)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_nlocals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_nlocals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_posonlyargcount)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_posonlyargcount)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_qualname)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_qualname)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_stacksize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_stacksize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(co_varnames)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(co_varnames)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(code)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(code)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(command)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(command)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(comment_factory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(comment_factory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(consts)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(consts)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(context)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(context)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cookie)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cookie)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(copy)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(copy)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(copyreg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(copyreg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(coro)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(coro)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(count)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(count)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(cwd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(cwd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(data)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(data)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(database)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(database)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(decode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(decode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(decoder)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(decoder)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(default)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(default)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(defaultaction)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(defaultaction)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(delete)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(delete)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(depth)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(depth)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(detect_types)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(detect_types)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(deterministic)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(deterministic)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(device)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(device)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dict)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dict)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dictcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dictcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(difference_update)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(difference_update)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(digest)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(digest)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(digest_size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(digest_size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(digestmod)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(digestmod)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dir_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dir_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dispatch_table)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dispatch_table)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(displayhook)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(displayhook)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dklen)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dklen)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(doc)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(doc)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dont_inherit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dont_inherit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dst)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dst)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(dst_dir_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(dst_dir_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(duration)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(duration)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(effective_ids)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(effective_ids)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(element_factory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(element_factory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(encode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(encode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(encoding)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(encoding)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(end)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(end)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(end_lineno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(end_lineno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(end_offset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(end_offset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(endpos)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(endpos)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(env)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(env)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(errors)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(errors)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(event)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(event)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(eventmask)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(eventmask)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(exc_type)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(exc_type)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(exc_value)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(exc_value)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(excepthook)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(excepthook)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(exception)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(exception)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(exp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(exp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(extend)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(extend)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(factory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(factory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(family)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(family)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fanout)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fanout)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fd2)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fd2)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fdel)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fdel)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fget)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fget)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(file)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(file)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(file_actions)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(file_actions)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(filename)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(filename)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fileno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fileno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(filepath)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(filepath)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fillvalue)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fillvalue)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(filters)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(filters)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(final)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(final)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(find_class)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(find_class)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fix_imports)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fix_imports)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(flags)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(flags)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(flush)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(flush)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(follow_symlinks)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(follow_symlinks)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(format)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(format)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(frequency)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(frequency)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fromlist)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fromlist)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(fset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(fset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(func)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(func)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(generation)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(generation)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(genexpr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(genexpr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(get)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(get)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(get_source)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(get_source)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(getattr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(getattr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(getstate)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(getstate)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(gid)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(gid)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(globals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(globals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(groupindex)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(groupindex)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(groups)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(groups)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(handle)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(handle)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(hash_name)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(hash_name)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(header)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(header)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(headers)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(headers)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(hi)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(hi)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(hook)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(hook)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(id)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(id)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ignore)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ignore)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(imag)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(imag)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(importlib)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(importlib)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(in_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(in_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(incoming)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(incoming)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(indexgroup)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(indexgroup)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(inf)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(inf)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(inheritable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(inheritable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(initial)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(initial)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(initial_bytes)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(initial_bytes)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(initial_value)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(initial_value)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(initval)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(initval)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(inner_size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(inner_size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(input)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(input)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(insert_comments)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(insert_comments)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(insert_pis)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(insert_pis)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(instructions)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(instructions)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(intern)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(intern)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(intersection)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(intersection)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(isatty)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(isatty)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(isinstance)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(isinstance)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(isolation_level)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(isolation_level)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(istext)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(istext)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(item)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(item)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(items)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(items)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(iter)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(iter)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(iterable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(iterable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(iterations)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(iterations)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(join)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(join)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(jump)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(jump)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(keepends)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(keepends)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(key)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(key)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(keyfile)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(keyfile)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(keys)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(keys)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(kind)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(kind)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(lambda)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(lambda)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last_node)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last_node)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last_traceback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last_traceback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last_type)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last_type)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(last_value)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(last_value)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(latin1)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(latin1)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(leaf_size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(leaf_size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(len)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(len)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(length)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(length)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(level)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(level)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(limit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(limit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(line)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(line)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(line_buffering)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(line_buffering)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(lineno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(lineno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(listcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(listcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(little)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(little)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(lo)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(lo)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(locale)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(locale)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(locals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(locals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(loop)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(loop)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(mapping)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(mapping)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(match)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(match)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(max_length)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(max_length)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxdigits)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxdigits)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxevents)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxevents)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxmem)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxmem)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxsplit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxsplit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(maxvalue)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(maxvalue)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(memLevel)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(memLevel)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(memlimit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(memlimit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(message)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(message)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(metaclass)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(metaclass)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(method)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(method)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(mod)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(mod)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(mode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(mode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(module)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(module)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(module_globals)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(module_globals)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(modules)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(modules)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(mro)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(mro)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(msg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(msg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n_arg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n_arg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n_fields)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n_fields)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n_sequence_fields)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n_sequence_fields)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(n_unnamed_fields)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(n_unnamed_fields)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(name)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(name)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(namespace_separator)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(namespace_separator)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(namespaces)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(namespaces)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(narg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(narg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ndigits)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ndigits)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(new_limit)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(new_limit)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(newline)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(newline)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(newlines)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(newlines)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(next)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(next)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(node_depth)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(node_depth)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(node_offset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(node_offset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ns)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ns)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(number)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(number)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(obj)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(obj)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(object)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(object)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(offset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(offset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(offset_dst)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(offset_dst)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(offset_src)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(offset_src)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(on_type_read)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(on_type_read)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(onceregistry)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(onceregistry)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(oparg)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(oparg)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(opcode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(opcode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(open)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(open)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(opener)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(opener)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(operation)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(operation)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(optimize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(optimize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(options)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(options)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(order)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(order)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(out_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(out_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(outgoing)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(outgoing)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(overlapped)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(overlapped)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(owner)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(owner)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(p)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(p)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pages)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pages)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(parent)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(parent)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(password)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(password)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(path)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(path)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pattern)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pattern)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(peek)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(peek)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(persistent_id)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(persistent_id)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(persistent_load)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(persistent_load)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(person)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(person)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pi_factory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pi_factory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pid)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pid)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(policy)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(policy)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(pos)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(pos)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(print_file_and_line)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(print_file_and_line)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(priority)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(priority)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(progress)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(progress)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(progress_handler)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(progress_handler)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(proto)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(proto)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(protocol)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(protocol)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ps1)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ps1)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(ps2)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(ps2)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(query)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(query)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(quotetabs)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(quotetabs)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(r)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(r)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(raw)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(raw)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(read)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(read)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(read1)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(read1)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readall)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readall)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readinto)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readinto)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readinto1)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readinto1)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readline)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readline)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(readonly)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(readonly)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(real)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(real)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reducer_override)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reducer_override)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(registry)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(registry)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(rel_tol)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(rel_tol)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reload)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reload)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(repl)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(repl)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(replace)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(replace)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reserved)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reserved)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reset)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reset)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(resetids)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(resetids)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(return)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(return)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reverse)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reverse)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(reversed)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(reversed)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(s)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(s)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(salt)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(salt)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sched_priority)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sched_priority)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(scheduler)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(scheduler)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(seek)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(seek)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(seekable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(seekable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(selectors)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(selectors)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(send)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(send)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sep)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sep)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sequence)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sequence)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(server_hostname)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(server_hostname)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(server_side)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(server_side)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(session)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(session)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setcomp)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setcomp)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setpgroup)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setpgroup)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setsid)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setsid)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setsigdef)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setsigdef)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setsigmask)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setsigmask)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(setstate)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(setstate)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(shape)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(shape)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(show_cmd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(show_cmd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(signed)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(signed)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(size)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(size)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sizehint)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sizehint)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sleep)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sleep)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sock)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sock)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sort)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sort)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sound)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sound)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(source)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(source)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(src)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(src)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(src_dir_fd)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(src_dir_fd)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(stacklevel)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(stacklevel)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(start)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(start)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(statement)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(statement)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(status)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(status)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(stderr)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(stderr)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(stdin)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(stdin)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(stdout)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(stdout)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(step)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(step)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(store_name)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(store_name)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(strategy)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(strategy)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(strict)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(strict)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(strict_mode)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(strict_mode)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(string)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(string)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(sub_key)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(sub_key)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(symmetric_difference_update)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(symmetric_difference_update)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tabsize)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tabsize)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tag)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tag)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(target)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(target)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(target_is_directory)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(target_is_directory)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(task)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(task)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tb_frame)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tb_frame)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tb_lasti)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tb_lasti)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tb_lineno)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tb_lineno)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tb_next)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tb_next)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tell)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tell)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(template)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(template)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(term)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(term)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(text)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(text)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(threading)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(threading)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(throw)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(throw)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(timeout)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(timeout)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(times)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(times)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(top)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(top)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(trace_callback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(trace_callback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(traceback)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(traceback)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(trailers)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(trailers)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(translate)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(translate)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(truncate)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(truncate)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(twice)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(twice)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(txt)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(txt)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(type)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(type)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(tz)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(tz)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(uid)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(uid)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(unlink)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(unlink)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(unraisablehook)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(unraisablehook)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(uri)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(uri)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(usedforsecurity)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(usedforsecurity)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(value)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(value)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(values)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(values)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(version)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(version)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(warnings)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(warnings)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(warnoptions)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(warnoptions)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(wbits)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(wbits)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(week)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(week)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(weekday)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(weekday)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(which)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(which)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(who)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(who)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(withdata)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(withdata)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(writable)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(writable)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(write)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(write)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(write_through)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(write_through)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(x)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(x)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(year)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(year)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_ID(zdict)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_ID(zdict)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[0]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[0]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[1]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[1]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[2]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[2]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[3]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[3]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[4]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[4]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[5]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[5]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[6]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[6]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[7]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[7]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[8]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[8]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[9]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[9]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[10]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[10]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[11]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[11]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[12]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[12]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[13]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[13]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[14]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[14]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[15]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[15]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[16]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[16]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[17]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[17]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[18]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[18]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[19]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[19]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[20]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[20]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[21]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[21]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[22]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[22]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[23]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[23]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[24]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[24]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[25]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[25]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[26]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[26]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[27]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[27]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[28]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[28]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[29]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[29]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[30]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[30]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[31]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[31]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[32]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[32]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[33]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[33]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[34]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[34]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[35]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[35]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[36]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[36]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[37]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[37]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[38]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[38]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[39]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[39]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[40]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[40]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[41]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[41]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[42]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[42]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[43]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[43]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[44]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[44]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[45]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[45]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[46]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[46]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[47]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[47]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[48]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[48]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[49]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[49]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[50]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[50]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[51]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[51]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[52]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[52]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[53]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[53]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[54]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[54]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[55]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[55]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[56]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[56]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[57]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[57]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[58]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[58]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[59]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[59]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[60]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[60]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[61]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[61]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[62]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[62]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[63]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[63]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[64]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[64]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[65]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[65]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[66]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[66]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[67]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[67]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[68]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[68]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[69]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[69]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[70]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[70]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[71]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[71]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[72]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[72]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[73]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[73]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[74]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[74]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[75]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[75]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[76]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[76]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[77]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[77]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[78]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[78]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[79]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[79]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[80]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[80]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[81]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[81]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[82]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[82]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[83]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[83]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[84]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[84]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[85]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[85]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[86]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[86]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[87]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[87]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[88]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[88]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[89]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[89]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[90]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[90]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[91]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[91]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[92]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[92]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[93]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[93]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[94]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[94]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[95]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[95]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[96]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[96]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[97]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[97]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[98]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[98]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[99]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[99]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[100]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[100]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[101]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[101]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[102]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[102]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[103]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[103]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[104]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[104]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[105]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[105]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[106]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[106]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[107]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[107]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[108]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[108]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[109]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[109]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[110]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[110]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[111]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[111]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[112]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[112]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[113]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[113]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[114]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[114]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[115]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[115]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[116]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[116]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[117]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[117]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[118]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[118]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[119]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[119]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[120]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[120]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[121]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[121]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[122]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[122]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[123]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[123]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[124]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[124]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[125]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[125]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[126]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[126]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).ascii[127]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).ascii[127]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[128 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[128 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[129 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[129 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[130 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[130 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[131 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[131 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[132 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[132 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[133 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[133 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[134 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[134 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[135 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[135 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[136 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[136 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[137 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[137 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[138 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[138 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[139 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[139 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[140 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[140 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[141 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[141 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[142 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[142 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[143 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[143 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[144 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[144 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[145 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[145 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[146 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[146 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[147 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[147 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[148 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[148 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[149 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[149 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[150 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[150 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[151 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[151 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[152 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[152 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[153 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[153 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[154 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[154 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[155 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[155 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[156 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[156 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[157 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[157 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[158 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[158 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[159 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[159 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[160 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[160 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[161 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[161 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[162 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[162 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[163 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[163 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[164 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[164 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[165 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[165 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[166 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[166 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[167 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[167 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[168 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[168 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[169 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[169 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[170 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[170 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[171 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[171 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[172 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[172 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[173 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[173 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[174 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[174 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[175 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[175 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[176 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[176 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[177 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[177 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[178 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[178 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[179 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[179 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[180 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[180 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[181 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[181 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[182 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[182 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[183 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[183 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[184 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[184 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[185 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[185 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[186 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[186 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[187 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[187 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[188 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[188 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[189 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[189 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[190 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[190 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[191 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[191 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[192 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[192 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[193 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[193 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[194 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[194 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[195 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[195 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[196 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[196 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[197 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[197 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[198 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[198 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[199 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[199 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[200 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[200 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[201 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[201 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[202 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[202 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[203 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[203 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[204 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[204 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[205 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[205 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[206 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[206 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[207 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[207 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[208 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[208 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[209 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[209 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[210 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[210 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[211 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[211 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[212 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[212 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[213 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[213 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[214 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[214 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[215 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[215 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[216 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[216 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[217 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[217 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[218 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[218 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[219 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[219 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[220 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[220 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[221 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[221 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[222 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[222 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[223 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[223 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[224 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[224 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[225 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[225 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[226 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[226 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[227 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[227 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[228 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[228 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[229 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[229 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[230 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[230 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[231 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[231 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[232 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[232 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[233 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[233 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[234 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[234 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[235 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[235 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[236 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[236 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[237 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[237 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[238 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[238 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[239 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[239 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[240 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[240 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[241 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[241 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[242 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[242 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[243 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[243 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[244 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[244 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[245 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[245 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[246 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[246 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[247 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[247 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[248 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[248 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[249 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[249 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[250 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[250 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[251 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[251 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[252 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[252 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[253 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[253 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[254 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[254 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(strings).latin1[255 - 128]) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(strings).latin1[255 - 128]); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; - if (Py_REFCNT((PyObject *)&_Py_SINGLETON(tuple_empty)) < _PyObject_IMMORTAL_REFCNT) { - _PyObject_Dump((PyObject *)&_Py_SINGLETON(tuple_empty)); - Py_FatalError("immortal object has less refcnt than expected _PyObject_IMMORTAL_REFCNT"); - }; +#define _Py_str_literals_INIT { \ + INIT_STR(anon_dictcomp, ""), \ + INIT_STR(anon_genexpr, ""), \ + INIT_STR(anon_lambda, ""), \ + INIT_STR(anon_listcomp, ""), \ + INIT_STR(anon_module, ""), \ + INIT_STR(anon_setcomp, ""), \ + INIT_STR(anon_string, ""), \ + INIT_STR(anon_unknown, ""), \ + INIT_STR(close_br, "}"), \ + INIT_STR(dbl_close_br, "}}"), \ + INIT_STR(dbl_open_br, "{{"), \ + INIT_STR(dbl_percent, "%%"), \ + INIT_STR(dot, "."), \ + INIT_STR(dot_locals, "."), \ + INIT_STR(empty, ""), \ + INIT_STR(json_decoder, "json.decoder"), \ + INIT_STR(list_err, "list index out of range"), \ + INIT_STR(newline, "\n"), \ + INIT_STR(open_br, "{"), \ + INIT_STR(percent, "%"), \ + INIT_STR(shim_name, ""), \ + INIT_STR(utf_8, "utf-8"), \ +} + +#define _Py_str_identifiers_INIT { \ + INIT_ID(CANCELLED), \ + INIT_ID(FINISHED), \ + INIT_ID(False), \ + INIT_ID(JSONDecodeError), \ + INIT_ID(PENDING), \ + INIT_ID(Py_Repr), \ + INIT_ID(TextIOWrapper), \ + INIT_ID(True), \ + INIT_ID(WarningMessage), \ + INIT_ID(_), \ + INIT_ID(__IOBase_closed), \ + INIT_ID(__abc_tpflags__), \ + INIT_ID(__abs__), \ + INIT_ID(__abstractmethods__), \ + INIT_ID(__add__), \ + INIT_ID(__aenter__), \ + INIT_ID(__aexit__), \ + INIT_ID(__aiter__), \ + INIT_ID(__all__), \ + INIT_ID(__and__), \ + INIT_ID(__anext__), \ + INIT_ID(__annotations__), \ + INIT_ID(__args__), \ + INIT_ID(__asyncio_running_event_loop__), \ + INIT_ID(__await__), \ + INIT_ID(__bases__), \ + INIT_ID(__bool__), \ + INIT_ID(__build_class__), \ + INIT_ID(__builtins__), \ + INIT_ID(__bytes__), \ + INIT_ID(__call__), \ + INIT_ID(__cantrace__), \ + INIT_ID(__class__), \ + INIT_ID(__class_getitem__), \ + INIT_ID(__classcell__), \ + INIT_ID(__complex__), \ + INIT_ID(__contains__), \ + INIT_ID(__copy__), \ + INIT_ID(__ctypes_from_outparam__), \ + INIT_ID(__del__), \ + INIT_ID(__delattr__), \ + INIT_ID(__delete__), \ + INIT_ID(__delitem__), \ + INIT_ID(__dict__), \ + INIT_ID(__dictoffset__), \ + INIT_ID(__dir__), \ + INIT_ID(__divmod__), \ + INIT_ID(__doc__), \ + INIT_ID(__enter__), \ + INIT_ID(__eq__), \ + INIT_ID(__exit__), \ + INIT_ID(__file__), \ + INIT_ID(__float__), \ + INIT_ID(__floordiv__), \ + INIT_ID(__format__), \ + INIT_ID(__fspath__), \ + INIT_ID(__ge__), \ + INIT_ID(__get__), \ + INIT_ID(__getattr__), \ + INIT_ID(__getattribute__), \ + INIT_ID(__getinitargs__), \ + INIT_ID(__getitem__), \ + INIT_ID(__getnewargs__), \ + INIT_ID(__getnewargs_ex__), \ + INIT_ID(__getstate__), \ + INIT_ID(__gt__), \ + INIT_ID(__hash__), \ + INIT_ID(__iadd__), \ + INIT_ID(__iand__), \ + INIT_ID(__ifloordiv__), \ + INIT_ID(__ilshift__), \ + INIT_ID(__imatmul__), \ + INIT_ID(__imod__), \ + INIT_ID(__import__), \ + INIT_ID(__imul__), \ + INIT_ID(__index__), \ + INIT_ID(__init__), \ + INIT_ID(__init_subclass__), \ + INIT_ID(__instancecheck__), \ + INIT_ID(__int__), \ + INIT_ID(__invert__), \ + INIT_ID(__ior__), \ + INIT_ID(__ipow__), \ + INIT_ID(__irshift__), \ + INIT_ID(__isabstractmethod__), \ + INIT_ID(__isub__), \ + INIT_ID(__iter__), \ + INIT_ID(__itruediv__), \ + INIT_ID(__ixor__), \ + INIT_ID(__le__), \ + INIT_ID(__len__), \ + INIT_ID(__length_hint__), \ + INIT_ID(__lltrace__), \ + INIT_ID(__loader__), \ + INIT_ID(__lshift__), \ + INIT_ID(__lt__), \ + INIT_ID(__main__), \ + INIT_ID(__matmul__), \ + INIT_ID(__missing__), \ + INIT_ID(__mod__), \ + INIT_ID(__module__), \ + INIT_ID(__mro_entries__), \ + INIT_ID(__mul__), \ + INIT_ID(__name__), \ + INIT_ID(__ne__), \ + INIT_ID(__neg__), \ + INIT_ID(__new__), \ + INIT_ID(__newobj__), \ + INIT_ID(__newobj_ex__), \ + INIT_ID(__next__), \ + INIT_ID(__notes__), \ + INIT_ID(__or__), \ + INIT_ID(__orig_class__), \ + INIT_ID(__origin__), \ + INIT_ID(__package__), \ + INIT_ID(__parameters__), \ + INIT_ID(__path__), \ + INIT_ID(__pos__), \ + INIT_ID(__pow__), \ + INIT_ID(__prepare__), \ + INIT_ID(__qualname__), \ + INIT_ID(__radd__), \ + INIT_ID(__rand__), \ + INIT_ID(__rdivmod__), \ + INIT_ID(__reduce__), \ + INIT_ID(__reduce_ex__), \ + INIT_ID(__repr__), \ + INIT_ID(__reversed__), \ + INIT_ID(__rfloordiv__), \ + INIT_ID(__rlshift__), \ + INIT_ID(__rmatmul__), \ + INIT_ID(__rmod__), \ + INIT_ID(__rmul__), \ + INIT_ID(__ror__), \ + INIT_ID(__round__), \ + INIT_ID(__rpow__), \ + INIT_ID(__rrshift__), \ + INIT_ID(__rshift__), \ + INIT_ID(__rsub__), \ + INIT_ID(__rtruediv__), \ + INIT_ID(__rxor__), \ + INIT_ID(__set__), \ + INIT_ID(__set_name__), \ + INIT_ID(__setattr__), \ + INIT_ID(__setitem__), \ + INIT_ID(__setstate__), \ + INIT_ID(__sizeof__), \ + INIT_ID(__slotnames__), \ + INIT_ID(__slots__), \ + INIT_ID(__spec__), \ + INIT_ID(__str__), \ + INIT_ID(__sub__), \ + INIT_ID(__subclasscheck__), \ + INIT_ID(__subclasshook__), \ + INIT_ID(__truediv__), \ + INIT_ID(__trunc__), \ + INIT_ID(__typing_is_unpacked_typevartuple__), \ + INIT_ID(__typing_prepare_subst__), \ + INIT_ID(__typing_subst__), \ + INIT_ID(__typing_unpacked_tuple_args__), \ + INIT_ID(__warningregistry__), \ + INIT_ID(__weaklistoffset__), \ + INIT_ID(__weakref__), \ + INIT_ID(__xor__), \ + INIT_ID(_abc_impl), \ + INIT_ID(_abstract_), \ + INIT_ID(_annotation), \ + INIT_ID(_anonymous_), \ + INIT_ID(_argtypes_), \ + INIT_ID(_as_parameter_), \ + INIT_ID(_asyncio_future_blocking), \ + INIT_ID(_blksize), \ + INIT_ID(_bootstrap), \ + INIT_ID(_check_retval_), \ + INIT_ID(_dealloc_warn), \ + INIT_ID(_feature_version), \ + INIT_ID(_fields_), \ + INIT_ID(_finalizing), \ + INIT_ID(_find_and_load), \ + INIT_ID(_fix_up_module), \ + INIT_ID(_flags_), \ + INIT_ID(_get_sourcefile), \ + INIT_ID(_handle_fromlist), \ + INIT_ID(_initializing), \ + INIT_ID(_is_text_encoding), \ + INIT_ID(_length_), \ + INIT_ID(_lock_unlock_module), \ + INIT_ID(_loop), \ + INIT_ID(_needs_com_addref_), \ + INIT_ID(_pack_), \ + INIT_ID(_restype_), \ + INIT_ID(_showwarnmsg), \ + INIT_ID(_shutdown), \ + INIT_ID(_slotnames), \ + INIT_ID(_strptime_datetime), \ + INIT_ID(_swappedbytes_), \ + INIT_ID(_type_), \ + INIT_ID(_uninitialized_submodules), \ + INIT_ID(_warn_unawaited_coroutine), \ + INIT_ID(_xoptions), \ + INIT_ID(a), \ + INIT_ID(abs_tol), \ + INIT_ID(access), \ + INIT_ID(add), \ + INIT_ID(add_done_callback), \ + INIT_ID(after_in_child), \ + INIT_ID(after_in_parent), \ + INIT_ID(aggregate_class), \ + INIT_ID(append), \ + INIT_ID(argdefs), \ + INIT_ID(arguments), \ + INIT_ID(argv), \ + INIT_ID(as_integer_ratio), \ + INIT_ID(ast), \ + INIT_ID(attribute), \ + INIT_ID(authorizer_callback), \ + INIT_ID(autocommit), \ + INIT_ID(b), \ + INIT_ID(backtick), \ + INIT_ID(base), \ + INIT_ID(before), \ + INIT_ID(big), \ + INIT_ID(binary_form), \ + INIT_ID(block), \ + INIT_ID(buffer), \ + INIT_ID(buffer_callback), \ + INIT_ID(buffer_size), \ + INIT_ID(buffering), \ + INIT_ID(buffers), \ + INIT_ID(bufsize), \ + INIT_ID(builtins), \ + INIT_ID(byteorder), \ + INIT_ID(bytes), \ + INIT_ID(bytes_per_sep), \ + INIT_ID(c), \ + INIT_ID(c_call), \ + INIT_ID(c_exception), \ + INIT_ID(c_return), \ + INIT_ID(cached_statements), \ + INIT_ID(cadata), \ + INIT_ID(cafile), \ + INIT_ID(call), \ + INIT_ID(call_exception_handler), \ + INIT_ID(call_soon), \ + INIT_ID(cancel), \ + INIT_ID(capath), \ + INIT_ID(category), \ + INIT_ID(cb_type), \ + INIT_ID(certfile), \ + INIT_ID(check_same_thread), \ + INIT_ID(clear), \ + INIT_ID(close), \ + INIT_ID(closed), \ + INIT_ID(closefd), \ + INIT_ID(closure), \ + INIT_ID(co_argcount), \ + INIT_ID(co_cellvars), \ + INIT_ID(co_code), \ + INIT_ID(co_consts), \ + INIT_ID(co_exceptiontable), \ + INIT_ID(co_filename), \ + INIT_ID(co_firstlineno), \ + INIT_ID(co_flags), \ + INIT_ID(co_freevars), \ + INIT_ID(co_kwonlyargcount), \ + INIT_ID(co_linetable), \ + INIT_ID(co_name), \ + INIT_ID(co_names), \ + INIT_ID(co_nlocals), \ + INIT_ID(co_posonlyargcount), \ + INIT_ID(co_qualname), \ + INIT_ID(co_stacksize), \ + INIT_ID(co_varnames), \ + INIT_ID(code), \ + INIT_ID(command), \ + INIT_ID(comment_factory), \ + INIT_ID(consts), \ + INIT_ID(context), \ + INIT_ID(cookie), \ + INIT_ID(copy), \ + INIT_ID(copyreg), \ + INIT_ID(coro), \ + INIT_ID(count), \ + INIT_ID(cwd), \ + INIT_ID(d), \ + INIT_ID(data), \ + INIT_ID(database), \ + INIT_ID(decode), \ + INIT_ID(decoder), \ + INIT_ID(default), \ + INIT_ID(defaultaction), \ + INIT_ID(delete), \ + INIT_ID(depth), \ + INIT_ID(detect_types), \ + INIT_ID(deterministic), \ + INIT_ID(device), \ + INIT_ID(dict), \ + INIT_ID(dictcomp), \ + INIT_ID(difference_update), \ + INIT_ID(digest), \ + INIT_ID(digest_size), \ + INIT_ID(digestmod), \ + INIT_ID(dir_fd), \ + INIT_ID(discard), \ + INIT_ID(dispatch_table), \ + INIT_ID(displayhook), \ + INIT_ID(dklen), \ + INIT_ID(doc), \ + INIT_ID(dont_inherit), \ + INIT_ID(dst), \ + INIT_ID(dst_dir_fd), \ + INIT_ID(duration), \ + INIT_ID(e), \ + INIT_ID(effective_ids), \ + INIT_ID(element_factory), \ + INIT_ID(encode), \ + INIT_ID(encoding), \ + INIT_ID(end), \ + INIT_ID(end_lineno), \ + INIT_ID(end_offset), \ + INIT_ID(endpos), \ + INIT_ID(env), \ + INIT_ID(errors), \ + INIT_ID(event), \ + INIT_ID(eventmask), \ + INIT_ID(exc_type), \ + INIT_ID(exc_value), \ + INIT_ID(excepthook), \ + INIT_ID(exception), \ + INIT_ID(exp), \ + INIT_ID(extend), \ + INIT_ID(facility), \ + INIT_ID(factory), \ + INIT_ID(false), \ + INIT_ID(family), \ + INIT_ID(fanout), \ + INIT_ID(fd), \ + INIT_ID(fd2), \ + INIT_ID(fdel), \ + INIT_ID(fget), \ + INIT_ID(file), \ + INIT_ID(file_actions), \ + INIT_ID(filename), \ + INIT_ID(fileno), \ + INIT_ID(filepath), \ + INIT_ID(fillvalue), \ + INIT_ID(filters), \ + INIT_ID(final), \ + INIT_ID(find_class), \ + INIT_ID(fix_imports), \ + INIT_ID(flags), \ + INIT_ID(flush), \ + INIT_ID(follow_symlinks), \ + INIT_ID(format), \ + INIT_ID(frequency), \ + INIT_ID(from_param), \ + INIT_ID(fromlist), \ + INIT_ID(fromtimestamp), \ + INIT_ID(fromutc), \ + INIT_ID(fset), \ + INIT_ID(func), \ + INIT_ID(future), \ + INIT_ID(generation), \ + INIT_ID(genexpr), \ + INIT_ID(get), \ + INIT_ID(get_debug), \ + INIT_ID(get_event_loop), \ + INIT_ID(get_loop), \ + INIT_ID(get_source), \ + INIT_ID(getattr), \ + INIT_ID(getstate), \ + INIT_ID(gid), \ + INIT_ID(globals), \ + INIT_ID(groupindex), \ + INIT_ID(groups), \ + INIT_ID(handle), \ + INIT_ID(hash_name), \ + INIT_ID(header), \ + INIT_ID(headers), \ + INIT_ID(hi), \ + INIT_ID(hook), \ + INIT_ID(id), \ + INIT_ID(ident), \ + INIT_ID(ignore), \ + INIT_ID(imag), \ + INIT_ID(importlib), \ + INIT_ID(in_fd), \ + INIT_ID(incoming), \ + INIT_ID(indexgroup), \ + INIT_ID(inf), \ + INIT_ID(inheritable), \ + INIT_ID(initial), \ + INIT_ID(initial_bytes), \ + INIT_ID(initial_value), \ + INIT_ID(initval), \ + INIT_ID(inner_size), \ + INIT_ID(input), \ + INIT_ID(insert_comments), \ + INIT_ID(insert_pis), \ + INIT_ID(instructions), \ + INIT_ID(intern), \ + INIT_ID(intersection), \ + INIT_ID(isatty), \ + INIT_ID(isinstance), \ + INIT_ID(isoformat), \ + INIT_ID(isolation_level), \ + INIT_ID(istext), \ + INIT_ID(item), \ + INIT_ID(items), \ + INIT_ID(iter), \ + INIT_ID(iterable), \ + INIT_ID(iterations), \ + INIT_ID(join), \ + INIT_ID(jump), \ + INIT_ID(keepends), \ + INIT_ID(key), \ + INIT_ID(keyfile), \ + INIT_ID(keys), \ + INIT_ID(kind), \ + INIT_ID(kw), \ + INIT_ID(kw1), \ + INIT_ID(kw2), \ + INIT_ID(lambda), \ + INIT_ID(last), \ + INIT_ID(last_node), \ + INIT_ID(last_traceback), \ + INIT_ID(last_type), \ + INIT_ID(last_value), \ + INIT_ID(latin1), \ + INIT_ID(leaf_size), \ + INIT_ID(len), \ + INIT_ID(length), \ + INIT_ID(level), \ + INIT_ID(limit), \ + INIT_ID(line), \ + INIT_ID(line_buffering), \ + INIT_ID(lineno), \ + INIT_ID(listcomp), \ + INIT_ID(little), \ + INIT_ID(lo), \ + INIT_ID(locale), \ + INIT_ID(locals), \ + INIT_ID(logoption), \ + INIT_ID(loop), \ + INIT_ID(mapping), \ + INIT_ID(match), \ + INIT_ID(max_length), \ + INIT_ID(maxdigits), \ + INIT_ID(maxevents), \ + INIT_ID(maxmem), \ + INIT_ID(maxsplit), \ + INIT_ID(maxvalue), \ + INIT_ID(memLevel), \ + INIT_ID(memlimit), \ + INIT_ID(message), \ + INIT_ID(metaclass), \ + INIT_ID(method), \ + INIT_ID(mod), \ + INIT_ID(mode), \ + INIT_ID(module), \ + INIT_ID(module_globals), \ + INIT_ID(modules), \ + INIT_ID(mro), \ + INIT_ID(msg), \ + INIT_ID(mycmp), \ + INIT_ID(n), \ + INIT_ID(n_arg), \ + INIT_ID(n_fields), \ + INIT_ID(n_sequence_fields), \ + INIT_ID(n_unnamed_fields), \ + INIT_ID(name), \ + INIT_ID(name_from), \ + INIT_ID(namespace_separator), \ + INIT_ID(namespaces), \ + INIT_ID(narg), \ + INIT_ID(ndigits), \ + INIT_ID(new_limit), \ + INIT_ID(newline), \ + INIT_ID(newlines), \ + INIT_ID(next), \ + INIT_ID(node_depth), \ + INIT_ID(node_offset), \ + INIT_ID(ns), \ + INIT_ID(nstype), \ + INIT_ID(null), \ + INIT_ID(number), \ + INIT_ID(obj), \ + INIT_ID(object), \ + INIT_ID(offset), \ + INIT_ID(offset_dst), \ + INIT_ID(offset_src), \ + INIT_ID(on_type_read), \ + INIT_ID(onceregistry), \ + INIT_ID(only_keys), \ + INIT_ID(oparg), \ + INIT_ID(opcode), \ + INIT_ID(open), \ + INIT_ID(opener), \ + INIT_ID(operation), \ + INIT_ID(optimize), \ + INIT_ID(options), \ + INIT_ID(order), \ + INIT_ID(out_fd), \ + INIT_ID(outgoing), \ + INIT_ID(overlapped), \ + INIT_ID(owner), \ + INIT_ID(p), \ + INIT_ID(pages), \ + INIT_ID(parent), \ + INIT_ID(password), \ + INIT_ID(path), \ + INIT_ID(pattern), \ + INIT_ID(peek), \ + INIT_ID(persistent_id), \ + INIT_ID(persistent_load), \ + INIT_ID(person), \ + INIT_ID(pi_factory), \ + INIT_ID(pid), \ + INIT_ID(policy), \ + INIT_ID(pos), \ + INIT_ID(pos1), \ + INIT_ID(pos2), \ + INIT_ID(print_file_and_line), \ + INIT_ID(priority), \ + INIT_ID(progress), \ + INIT_ID(progress_handler), \ + INIT_ID(proto), \ + INIT_ID(protocol), \ + INIT_ID(ps1), \ + INIT_ID(ps2), \ + INIT_ID(query), \ + INIT_ID(quotetabs), \ + INIT_ID(r), \ + INIT_ID(raw), \ + INIT_ID(read), \ + INIT_ID(read1), \ + INIT_ID(readable), \ + INIT_ID(readall), \ + INIT_ID(readinto), \ + INIT_ID(readinto1), \ + INIT_ID(readline), \ + INIT_ID(readonly), \ + INIT_ID(real), \ + INIT_ID(reducer_override), \ + INIT_ID(registry), \ + INIT_ID(rel_tol), \ + INIT_ID(reload), \ + INIT_ID(repl), \ + INIT_ID(replace), \ + INIT_ID(reserved), \ + INIT_ID(reset), \ + INIT_ID(resetids), \ + INIT_ID(return), \ + INIT_ID(reverse), \ + INIT_ID(reversed), \ + INIT_ID(s), \ + INIT_ID(salt), \ + INIT_ID(sched_priority), \ + INIT_ID(scheduler), \ + INIT_ID(seek), \ + INIT_ID(seekable), \ + INIT_ID(selectors), \ + INIT_ID(self), \ + INIT_ID(send), \ + INIT_ID(sep), \ + INIT_ID(sequence), \ + INIT_ID(server_hostname), \ + INIT_ID(server_side), \ + INIT_ID(session), \ + INIT_ID(setcomp), \ + INIT_ID(setpgroup), \ + INIT_ID(setsid), \ + INIT_ID(setsigdef), \ + INIT_ID(setsigmask), \ + INIT_ID(setstate), \ + INIT_ID(shape), \ + INIT_ID(show_cmd), \ + INIT_ID(signed), \ + INIT_ID(size), \ + INIT_ID(sizehint), \ + INIT_ID(sleep), \ + INIT_ID(sock), \ + INIT_ID(sort), \ + INIT_ID(sound), \ + INIT_ID(source), \ + INIT_ID(source_traceback), \ + INIT_ID(src), \ + INIT_ID(src_dir_fd), \ + INIT_ID(stacklevel), \ + INIT_ID(start), \ + INIT_ID(statement), \ + INIT_ID(status), \ + INIT_ID(stderr), \ + INIT_ID(stdin), \ + INIT_ID(stdout), \ + INIT_ID(step), \ + INIT_ID(store_name), \ + INIT_ID(strategy), \ + INIT_ID(strftime), \ + INIT_ID(strict), \ + INIT_ID(strict_mode), \ + INIT_ID(string), \ + INIT_ID(sub_key), \ + INIT_ID(symmetric_difference_update), \ + INIT_ID(tabsize), \ + INIT_ID(tag), \ + INIT_ID(target), \ + INIT_ID(target_is_directory), \ + INIT_ID(task), \ + INIT_ID(tb_frame), \ + INIT_ID(tb_lasti), \ + INIT_ID(tb_lineno), \ + INIT_ID(tb_next), \ + INIT_ID(tell), \ + INIT_ID(template), \ + INIT_ID(term), \ + INIT_ID(text), \ + INIT_ID(threading), \ + INIT_ID(throw), \ + INIT_ID(timeout), \ + INIT_ID(times), \ + INIT_ID(timetuple), \ + INIT_ID(top), \ + INIT_ID(trace_callback), \ + INIT_ID(traceback), \ + INIT_ID(trailers), \ + INIT_ID(translate), \ + INIT_ID(true), \ + INIT_ID(truncate), \ + INIT_ID(twice), \ + INIT_ID(txt), \ + INIT_ID(type), \ + INIT_ID(tz), \ + INIT_ID(tzname), \ + INIT_ID(uid), \ + INIT_ID(unlink), \ + INIT_ID(unraisablehook), \ + INIT_ID(uri), \ + INIT_ID(usedforsecurity), \ + INIT_ID(value), \ + INIT_ID(values), \ + INIT_ID(version), \ + INIT_ID(warnings), \ + INIT_ID(warnoptions), \ + INIT_ID(wbits), \ + INIT_ID(week), \ + INIT_ID(weekday), \ + INIT_ID(which), \ + INIT_ID(who), \ + INIT_ID(withdata), \ + INIT_ID(writable), \ + INIT_ID(write), \ + INIT_ID(write_through), \ + INIT_ID(x), \ + INIT_ID(year), \ + INIT_ID(zdict), \ +} + +#define _Py_str_ascii_INIT { \ + _PyASCIIObject_INIT("\x00"), \ + _PyASCIIObject_INIT("\x01"), \ + _PyASCIIObject_INIT("\x02"), \ + _PyASCIIObject_INIT("\x03"), \ + _PyASCIIObject_INIT("\x04"), \ + _PyASCIIObject_INIT("\x05"), \ + _PyASCIIObject_INIT("\x06"), \ + _PyASCIIObject_INIT("\x07"), \ + _PyASCIIObject_INIT("\x08"), \ + _PyASCIIObject_INIT("\x09"), \ + _PyASCIIObject_INIT("\x0a"), \ + _PyASCIIObject_INIT("\x0b"), \ + _PyASCIIObject_INIT("\x0c"), \ + _PyASCIIObject_INIT("\x0d"), \ + _PyASCIIObject_INIT("\x0e"), \ + _PyASCIIObject_INIT("\x0f"), \ + _PyASCIIObject_INIT("\x10"), \ + _PyASCIIObject_INIT("\x11"), \ + _PyASCIIObject_INIT("\x12"), \ + _PyASCIIObject_INIT("\x13"), \ + _PyASCIIObject_INIT("\x14"), \ + _PyASCIIObject_INIT("\x15"), \ + _PyASCIIObject_INIT("\x16"), \ + _PyASCIIObject_INIT("\x17"), \ + _PyASCIIObject_INIT("\x18"), \ + _PyASCIIObject_INIT("\x19"), \ + _PyASCIIObject_INIT("\x1a"), \ + _PyASCIIObject_INIT("\x1b"), \ + _PyASCIIObject_INIT("\x1c"), \ + _PyASCIIObject_INIT("\x1d"), \ + _PyASCIIObject_INIT("\x1e"), \ + _PyASCIIObject_INIT("\x1f"), \ + _PyASCIIObject_INIT("\x20"), \ + _PyASCIIObject_INIT("\x21"), \ + _PyASCIIObject_INIT("\x22"), \ + _PyASCIIObject_INIT("\x23"), \ + _PyASCIIObject_INIT("\x24"), \ + _PyASCIIObject_INIT("\x25"), \ + _PyASCIIObject_INIT("\x26"), \ + _PyASCIIObject_INIT("\x27"), \ + _PyASCIIObject_INIT("\x28"), \ + _PyASCIIObject_INIT("\x29"), \ + _PyASCIIObject_INIT("\x2a"), \ + _PyASCIIObject_INIT("\x2b"), \ + _PyASCIIObject_INIT("\x2c"), \ + _PyASCIIObject_INIT("\x2d"), \ + _PyASCIIObject_INIT("\x2e"), \ + _PyASCIIObject_INIT("\x2f"), \ + _PyASCIIObject_INIT("\x30"), \ + _PyASCIIObject_INIT("\x31"), \ + _PyASCIIObject_INIT("\x32"), \ + _PyASCIIObject_INIT("\x33"), \ + _PyASCIIObject_INIT("\x34"), \ + _PyASCIIObject_INIT("\x35"), \ + _PyASCIIObject_INIT("\x36"), \ + _PyASCIIObject_INIT("\x37"), \ + _PyASCIIObject_INIT("\x38"), \ + _PyASCIIObject_INIT("\x39"), \ + _PyASCIIObject_INIT("\x3a"), \ + _PyASCIIObject_INIT("\x3b"), \ + _PyASCIIObject_INIT("\x3c"), \ + _PyASCIIObject_INIT("\x3d"), \ + _PyASCIIObject_INIT("\x3e"), \ + _PyASCIIObject_INIT("\x3f"), \ + _PyASCIIObject_INIT("\x40"), \ + _PyASCIIObject_INIT("\x41"), \ + _PyASCIIObject_INIT("\x42"), \ + _PyASCIIObject_INIT("\x43"), \ + _PyASCIIObject_INIT("\x44"), \ + _PyASCIIObject_INIT("\x45"), \ + _PyASCIIObject_INIT("\x46"), \ + _PyASCIIObject_INIT("\x47"), \ + _PyASCIIObject_INIT("\x48"), \ + _PyASCIIObject_INIT("\x49"), \ + _PyASCIIObject_INIT("\x4a"), \ + _PyASCIIObject_INIT("\x4b"), \ + _PyASCIIObject_INIT("\x4c"), \ + _PyASCIIObject_INIT("\x4d"), \ + _PyASCIIObject_INIT("\x4e"), \ + _PyASCIIObject_INIT("\x4f"), \ + _PyASCIIObject_INIT("\x50"), \ + _PyASCIIObject_INIT("\x51"), \ + _PyASCIIObject_INIT("\x52"), \ + _PyASCIIObject_INIT("\x53"), \ + _PyASCIIObject_INIT("\x54"), \ + _PyASCIIObject_INIT("\x55"), \ + _PyASCIIObject_INIT("\x56"), \ + _PyASCIIObject_INIT("\x57"), \ + _PyASCIIObject_INIT("\x58"), \ + _PyASCIIObject_INIT("\x59"), \ + _PyASCIIObject_INIT("\x5a"), \ + _PyASCIIObject_INIT("\x5b"), \ + _PyASCIIObject_INIT("\x5c"), \ + _PyASCIIObject_INIT("\x5d"), \ + _PyASCIIObject_INIT("\x5e"), \ + _PyASCIIObject_INIT("\x5f"), \ + _PyASCIIObject_INIT("\x60"), \ + _PyASCIIObject_INIT("\x61"), \ + _PyASCIIObject_INIT("\x62"), \ + _PyASCIIObject_INIT("\x63"), \ + _PyASCIIObject_INIT("\x64"), \ + _PyASCIIObject_INIT("\x65"), \ + _PyASCIIObject_INIT("\x66"), \ + _PyASCIIObject_INIT("\x67"), \ + _PyASCIIObject_INIT("\x68"), \ + _PyASCIIObject_INIT("\x69"), \ + _PyASCIIObject_INIT("\x6a"), \ + _PyASCIIObject_INIT("\x6b"), \ + _PyASCIIObject_INIT("\x6c"), \ + _PyASCIIObject_INIT("\x6d"), \ + _PyASCIIObject_INIT("\x6e"), \ + _PyASCIIObject_INIT("\x6f"), \ + _PyASCIIObject_INIT("\x70"), \ + _PyASCIIObject_INIT("\x71"), \ + _PyASCIIObject_INIT("\x72"), \ + _PyASCIIObject_INIT("\x73"), \ + _PyASCIIObject_INIT("\x74"), \ + _PyASCIIObject_INIT("\x75"), \ + _PyASCIIObject_INIT("\x76"), \ + _PyASCIIObject_INIT("\x77"), \ + _PyASCIIObject_INIT("\x78"), \ + _PyASCIIObject_INIT("\x79"), \ + _PyASCIIObject_INIT("\x7a"), \ + _PyASCIIObject_INIT("\x7b"), \ + _PyASCIIObject_INIT("\x7c"), \ + _PyASCIIObject_INIT("\x7d"), \ + _PyASCIIObject_INIT("\x7e"), \ + _PyASCIIObject_INIT("\x7f"), \ +} + +#define _Py_str_latin1_INIT { \ + _PyUnicode_LATIN1_INIT("\x80", "\xc2\x80"), \ + _PyUnicode_LATIN1_INIT("\x81", "\xc2\x81"), \ + _PyUnicode_LATIN1_INIT("\x82", "\xc2\x82"), \ + _PyUnicode_LATIN1_INIT("\x83", "\xc2\x83"), \ + _PyUnicode_LATIN1_INIT("\x84", "\xc2\x84"), \ + _PyUnicode_LATIN1_INIT("\x85", "\xc2\x85"), \ + _PyUnicode_LATIN1_INIT("\x86", "\xc2\x86"), \ + _PyUnicode_LATIN1_INIT("\x87", "\xc2\x87"), \ + _PyUnicode_LATIN1_INIT("\x88", "\xc2\x88"), \ + _PyUnicode_LATIN1_INIT("\x89", "\xc2\x89"), \ + _PyUnicode_LATIN1_INIT("\x8a", "\xc2\x8a"), \ + _PyUnicode_LATIN1_INIT("\x8b", "\xc2\x8b"), \ + _PyUnicode_LATIN1_INIT("\x8c", "\xc2\x8c"), \ + _PyUnicode_LATIN1_INIT("\x8d", "\xc2\x8d"), \ + _PyUnicode_LATIN1_INIT("\x8e", "\xc2\x8e"), \ + _PyUnicode_LATIN1_INIT("\x8f", "\xc2\x8f"), \ + _PyUnicode_LATIN1_INIT("\x90", "\xc2\x90"), \ + _PyUnicode_LATIN1_INIT("\x91", "\xc2\x91"), \ + _PyUnicode_LATIN1_INIT("\x92", "\xc2\x92"), \ + _PyUnicode_LATIN1_INIT("\x93", "\xc2\x93"), \ + _PyUnicode_LATIN1_INIT("\x94", "\xc2\x94"), \ + _PyUnicode_LATIN1_INIT("\x95", "\xc2\x95"), \ + _PyUnicode_LATIN1_INIT("\x96", "\xc2\x96"), \ + _PyUnicode_LATIN1_INIT("\x97", "\xc2\x97"), \ + _PyUnicode_LATIN1_INIT("\x98", "\xc2\x98"), \ + _PyUnicode_LATIN1_INIT("\x99", "\xc2\x99"), \ + _PyUnicode_LATIN1_INIT("\x9a", "\xc2\x9a"), \ + _PyUnicode_LATIN1_INIT("\x9b", "\xc2\x9b"), \ + _PyUnicode_LATIN1_INIT("\x9c", "\xc2\x9c"), \ + _PyUnicode_LATIN1_INIT("\x9d", "\xc2\x9d"), \ + _PyUnicode_LATIN1_INIT("\x9e", "\xc2\x9e"), \ + _PyUnicode_LATIN1_INIT("\x9f", "\xc2\x9f"), \ + _PyUnicode_LATIN1_INIT("\xa0", "\xc2\xa0"), \ + _PyUnicode_LATIN1_INIT("\xa1", "\xc2\xa1"), \ + _PyUnicode_LATIN1_INIT("\xa2", "\xc2\xa2"), \ + _PyUnicode_LATIN1_INIT("\xa3", "\xc2\xa3"), \ + _PyUnicode_LATIN1_INIT("\xa4", "\xc2\xa4"), \ + _PyUnicode_LATIN1_INIT("\xa5", "\xc2\xa5"), \ + _PyUnicode_LATIN1_INIT("\xa6", "\xc2\xa6"), \ + _PyUnicode_LATIN1_INIT("\xa7", "\xc2\xa7"), \ + _PyUnicode_LATIN1_INIT("\xa8", "\xc2\xa8"), \ + _PyUnicode_LATIN1_INIT("\xa9", "\xc2\xa9"), \ + _PyUnicode_LATIN1_INIT("\xaa", "\xc2\xaa"), \ + _PyUnicode_LATIN1_INIT("\xab", "\xc2\xab"), \ + _PyUnicode_LATIN1_INIT("\xac", "\xc2\xac"), \ + _PyUnicode_LATIN1_INIT("\xad", "\xc2\xad"), \ + _PyUnicode_LATIN1_INIT("\xae", "\xc2\xae"), \ + _PyUnicode_LATIN1_INIT("\xaf", "\xc2\xaf"), \ + _PyUnicode_LATIN1_INIT("\xb0", "\xc2\xb0"), \ + _PyUnicode_LATIN1_INIT("\xb1", "\xc2\xb1"), \ + _PyUnicode_LATIN1_INIT("\xb2", "\xc2\xb2"), \ + _PyUnicode_LATIN1_INIT("\xb3", "\xc2\xb3"), \ + _PyUnicode_LATIN1_INIT("\xb4", "\xc2\xb4"), \ + _PyUnicode_LATIN1_INIT("\xb5", "\xc2\xb5"), \ + _PyUnicode_LATIN1_INIT("\xb6", "\xc2\xb6"), \ + _PyUnicode_LATIN1_INIT("\xb7", "\xc2\xb7"), \ + _PyUnicode_LATIN1_INIT("\xb8", "\xc2\xb8"), \ + _PyUnicode_LATIN1_INIT("\xb9", "\xc2\xb9"), \ + _PyUnicode_LATIN1_INIT("\xba", "\xc2\xba"), \ + _PyUnicode_LATIN1_INIT("\xbb", "\xc2\xbb"), \ + _PyUnicode_LATIN1_INIT("\xbc", "\xc2\xbc"), \ + _PyUnicode_LATIN1_INIT("\xbd", "\xc2\xbd"), \ + _PyUnicode_LATIN1_INIT("\xbe", "\xc2\xbe"), \ + _PyUnicode_LATIN1_INIT("\xbf", "\xc2\xbf"), \ + _PyUnicode_LATIN1_INIT("\xc0", "\xc3\x80"), \ + _PyUnicode_LATIN1_INIT("\xc1", "\xc3\x81"), \ + _PyUnicode_LATIN1_INIT("\xc2", "\xc3\x82"), \ + _PyUnicode_LATIN1_INIT("\xc3", "\xc3\x83"), \ + _PyUnicode_LATIN1_INIT("\xc4", "\xc3\x84"), \ + _PyUnicode_LATIN1_INIT("\xc5", "\xc3\x85"), \ + _PyUnicode_LATIN1_INIT("\xc6", "\xc3\x86"), \ + _PyUnicode_LATIN1_INIT("\xc7", "\xc3\x87"), \ + _PyUnicode_LATIN1_INIT("\xc8", "\xc3\x88"), \ + _PyUnicode_LATIN1_INIT("\xc9", "\xc3\x89"), \ + _PyUnicode_LATIN1_INIT("\xca", "\xc3\x8a"), \ + _PyUnicode_LATIN1_INIT("\xcb", "\xc3\x8b"), \ + _PyUnicode_LATIN1_INIT("\xcc", "\xc3\x8c"), \ + _PyUnicode_LATIN1_INIT("\xcd", "\xc3\x8d"), \ + _PyUnicode_LATIN1_INIT("\xce", "\xc3\x8e"), \ + _PyUnicode_LATIN1_INIT("\xcf", "\xc3\x8f"), \ + _PyUnicode_LATIN1_INIT("\xd0", "\xc3\x90"), \ + _PyUnicode_LATIN1_INIT("\xd1", "\xc3\x91"), \ + _PyUnicode_LATIN1_INIT("\xd2", "\xc3\x92"), \ + _PyUnicode_LATIN1_INIT("\xd3", "\xc3\x93"), \ + _PyUnicode_LATIN1_INIT("\xd4", "\xc3\x94"), \ + _PyUnicode_LATIN1_INIT("\xd5", "\xc3\x95"), \ + _PyUnicode_LATIN1_INIT("\xd6", "\xc3\x96"), \ + _PyUnicode_LATIN1_INIT("\xd7", "\xc3\x97"), \ + _PyUnicode_LATIN1_INIT("\xd8", "\xc3\x98"), \ + _PyUnicode_LATIN1_INIT("\xd9", "\xc3\x99"), \ + _PyUnicode_LATIN1_INIT("\xda", "\xc3\x9a"), \ + _PyUnicode_LATIN1_INIT("\xdb", "\xc3\x9b"), \ + _PyUnicode_LATIN1_INIT("\xdc", "\xc3\x9c"), \ + _PyUnicode_LATIN1_INIT("\xdd", "\xc3\x9d"), \ + _PyUnicode_LATIN1_INIT("\xde", "\xc3\x9e"), \ + _PyUnicode_LATIN1_INIT("\xdf", "\xc3\x9f"), \ + _PyUnicode_LATIN1_INIT("\xe0", "\xc3\xa0"), \ + _PyUnicode_LATIN1_INIT("\xe1", "\xc3\xa1"), \ + _PyUnicode_LATIN1_INIT("\xe2", "\xc3\xa2"), \ + _PyUnicode_LATIN1_INIT("\xe3", "\xc3\xa3"), \ + _PyUnicode_LATIN1_INIT("\xe4", "\xc3\xa4"), \ + _PyUnicode_LATIN1_INIT("\xe5", "\xc3\xa5"), \ + _PyUnicode_LATIN1_INIT("\xe6", "\xc3\xa6"), \ + _PyUnicode_LATIN1_INIT("\xe7", "\xc3\xa7"), \ + _PyUnicode_LATIN1_INIT("\xe8", "\xc3\xa8"), \ + _PyUnicode_LATIN1_INIT("\xe9", "\xc3\xa9"), \ + _PyUnicode_LATIN1_INIT("\xea", "\xc3\xaa"), \ + _PyUnicode_LATIN1_INIT("\xeb", "\xc3\xab"), \ + _PyUnicode_LATIN1_INIT("\xec", "\xc3\xac"), \ + _PyUnicode_LATIN1_INIT("\xed", "\xc3\xad"), \ + _PyUnicode_LATIN1_INIT("\xee", "\xc3\xae"), \ + _PyUnicode_LATIN1_INIT("\xef", "\xc3\xaf"), \ + _PyUnicode_LATIN1_INIT("\xf0", "\xc3\xb0"), \ + _PyUnicode_LATIN1_INIT("\xf1", "\xc3\xb1"), \ + _PyUnicode_LATIN1_INIT("\xf2", "\xc3\xb2"), \ + _PyUnicode_LATIN1_INIT("\xf3", "\xc3\xb3"), \ + _PyUnicode_LATIN1_INIT("\xf4", "\xc3\xb4"), \ + _PyUnicode_LATIN1_INIT("\xf5", "\xc3\xb5"), \ + _PyUnicode_LATIN1_INIT("\xf6", "\xc3\xb6"), \ + _PyUnicode_LATIN1_INIT("\xf7", "\xc3\xb7"), \ + _PyUnicode_LATIN1_INIT("\xf8", "\xc3\xb8"), \ + _PyUnicode_LATIN1_INIT("\xf9", "\xc3\xb9"), \ + _PyUnicode_LATIN1_INIT("\xfa", "\xc3\xba"), \ + _PyUnicode_LATIN1_INIT("\xfb", "\xc3\xbb"), \ + _PyUnicode_LATIN1_INIT("\xfc", "\xc3\xbc"), \ + _PyUnicode_LATIN1_INIT("\xfd", "\xc3\xbd"), \ + _PyUnicode_LATIN1_INIT("\xfe", "\xc3\xbe"), \ + _PyUnicode_LATIN1_INIT("\xff", "\xc3\xbf"), \ } -#endif /* End auto-generated code */ + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_symtable.h b/Include/internal/pycore_symtable.h index 2d64aba22ff905..8532646ce7d95c 100644 --- a/Include/internal/pycore_symtable.h +++ b/Include/internal/pycore_symtable.h @@ -49,7 +49,7 @@ typedef struct _symtable_entry { PyObject *ste_varnames; /* list of function parameters */ PyObject *ste_children; /* list of child blocks */ PyObject *ste_directives;/* locations of global and nonlocal statements */ - _Py_block_ty ste_type; /* module, class or function */ + _Py_block_ty ste_type; /* module, class, function or annotation */ int ste_nested; /* true if block is nested */ unsigned ste_free : 1; /* true if block has free variables */ unsigned ste_child_free : 1; /* true if a child block has free vars, diff --git a/Include/internal/pycore_token.h b/Include/internal/pycore_token.h index f9b8240e2168f2..95459ab9f7d004 100644 --- a/Include/internal/pycore_token.h +++ b/Include/internal/pycore_token.h @@ -1,4 +1,4 @@ -/* Auto-generated by Tools/scripts/generate_token.py */ +/* Auto-generated by Tools/build/generate_token.py */ /* Token types */ #ifndef Py_INTERNAL_TOKEN_H diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index 5e7aca1b9f5ae9..71f3068900da31 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -18,6 +18,15 @@ extern void _PyTypes_Fini(PyInterpreterState *); /* other API */ +/* Length of array of slotdef pointers used to store slots with the + same __name__. There should be at most MAX_EQUIV-1 slotdef entries with + the same __name__, for any __name__. Since that's a static property, it is + appropriate to declare fixed-size arrays for this. */ +#define MAX_EQUIV 10 + +typedef struct wrapperbase pytype_slotdef; + + // Type attribute lookup cache: speed up attribute and method lookups, // see _PyType_Lookup(). struct type_cache_entry { diff --git a/Include/internal/pycore_unicodeobject.h b/Include/internal/pycore_unicodeobject.h index 63bf04b3e1b872..b315ca1ae5b64b 100644 --- a/Include/internal/pycore_unicodeobject.h +++ b/Include/internal/pycore_unicodeobject.h @@ -31,6 +31,10 @@ struct _Py_unicode_runtime_ids { Py_ssize_t next_index; }; +struct _Py_unicode_runtime_state { + struct _Py_unicode_runtime_ids ids; +}; + /* fs_codec.encoding is initialized to NULL. Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */ struct _Py_unicode_fs_codec { diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h new file mode 100644 index 00000000000000..80be342b5b3b44 --- /dev/null +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -0,0 +1,1332 @@ +#ifndef Py_INTERNAL_UNICODEOBJECT_GENERATED_H +#define Py_INTERNAL_UNICODEOBJECT_GENERATED_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +/* The following is auto-generated by Tools/build/generate_global_objects.py. */ +static inline void +_PyUnicode_InitStaticStrings(void) { + PyObject *string; + string = &_Py_ID(CANCELLED); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(FINISHED); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(False); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(JSONDecodeError); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(PENDING); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(Py_Repr); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(TextIOWrapper); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(True); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(WarningMessage); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__IOBase_closed); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__abc_tpflags__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__abs__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__abstractmethods__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__add__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__aenter__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__aexit__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__aiter__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__all__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__and__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__anext__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__annotations__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__args__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__asyncio_running_event_loop__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__await__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__bases__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__bool__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__build_class__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__builtins__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__bytes__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__call__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__cantrace__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__class__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__class_getitem__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__classcell__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__complex__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__contains__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__copy__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ctypes_from_outparam__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__del__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__delattr__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__delete__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__delitem__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__dict__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__dictoffset__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__dir__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__divmod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__doc__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__enter__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__eq__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__exit__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__file__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__float__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__floordiv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__format__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__fspath__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ge__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__get__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getattr__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getattribute__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getinitargs__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getitem__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getnewargs__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getnewargs_ex__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__getstate__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__gt__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__hash__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__iadd__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__iand__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ifloordiv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ilshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__imatmul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__imod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__import__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__imul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__index__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__init__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__init_subclass__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__instancecheck__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__int__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__invert__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ior__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ipow__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__irshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__isabstractmethod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__isub__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__iter__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__itruediv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ixor__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__le__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__len__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__length_hint__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__lltrace__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__loader__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__lshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__lt__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__main__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__matmul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__missing__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__mod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__module__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__mro_entries__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__mul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__name__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ne__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__neg__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__new__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__newobj__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__newobj_ex__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__next__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__notes__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__or__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__orig_class__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__origin__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__package__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__parameters__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__path__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__pos__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__pow__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__prepare__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__qualname__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__radd__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rand__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rdivmod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__reduce__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__reduce_ex__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__repr__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__reversed__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rfloordiv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rlshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rmatmul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rmod__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rmul__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__ror__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__round__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rpow__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rrshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rshift__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rsub__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rtruediv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__rxor__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__set__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__set_name__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__setattr__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__setitem__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__setstate__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__sizeof__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__slotnames__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__slots__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__spec__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__str__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__sub__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__subclasscheck__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__subclasshook__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__truediv__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__trunc__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__typing_is_unpacked_typevartuple__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__typing_prepare_subst__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__typing_subst__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__typing_unpacked_tuple_args__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__warningregistry__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__weaklistoffset__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__weakref__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(__xor__); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_abc_impl); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_abstract_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_annotation); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_anonymous_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_argtypes_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_as_parameter_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_asyncio_future_blocking); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_blksize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_bootstrap); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_check_retval_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_dealloc_warn); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_feature_version); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_fields_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_finalizing); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_find_and_load); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_fix_up_module); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_flags_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_get_sourcefile); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_handle_fromlist); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_initializing); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_is_text_encoding); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_length_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_lock_unlock_module); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_loop); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_needs_com_addref_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_pack_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_restype_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_showwarnmsg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_shutdown); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_slotnames); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_strptime_datetime); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_swappedbytes_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_type_); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_uninitialized_submodules); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_warn_unawaited_coroutine); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(_xoptions); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(a); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(abs_tol); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(access); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(add); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(add_done_callback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(after_in_child); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(after_in_parent); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(aggregate_class); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(append); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(argdefs); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(arguments); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(argv); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(as_integer_ratio); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ast); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(attribute); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(authorizer_callback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(autocommit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(b); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(backtick); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(base); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(before); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(big); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(binary_form); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(block); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffer); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffer_callback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffer_size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffering); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(buffers); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(bufsize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(builtins); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(byteorder); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(bytes); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(bytes_per_sep); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(c); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(c_call); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(c_exception); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(c_return); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cached_statements); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cadata); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cafile); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(call); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(call_exception_handler); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(call_soon); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cancel); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(capath); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(category); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cb_type); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(certfile); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(check_same_thread); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(clear); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(close); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(closed); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(closefd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(closure); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_argcount); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_cellvars); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_code); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_consts); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_exceptiontable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_filename); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_firstlineno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_flags); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_freevars); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_kwonlyargcount); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_linetable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_name); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_names); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_nlocals); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_posonlyargcount); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_qualname); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_stacksize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(co_varnames); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(code); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(command); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(comment_factory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(consts); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(context); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cookie); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(copy); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(copyreg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(coro); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(count); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(cwd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(d); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(data); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(database); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(decode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(decoder); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(default); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(defaultaction); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(delete); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(depth); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(detect_types); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(deterministic); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(device); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dict); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dictcomp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(difference_update); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(digest); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(digest_size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(digestmod); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dir_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(discard); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dispatch_table); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(displayhook); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dklen); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(doc); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dont_inherit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dst); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(dst_dir_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(duration); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(e); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(effective_ids); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(element_factory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(encode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(encoding); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(end); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(end_lineno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(end_offset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(endpos); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(env); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(errors); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(event); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(eventmask); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(exc_type); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(exc_value); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(excepthook); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(exception); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(exp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(extend); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(facility); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(factory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(false); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(family); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fanout); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fd2); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fdel); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fget); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(file); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(file_actions); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(filename); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fileno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(filepath); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fillvalue); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(filters); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(final); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(find_class); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fix_imports); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(flags); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(flush); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(follow_symlinks); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(format); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(frequency); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(from_param); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fromlist); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fromtimestamp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fromutc); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(fset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(func); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(future); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(generation); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(genexpr); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get_debug); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get_event_loop); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get_loop); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(get_source); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(getattr); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(getstate); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(gid); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(globals); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(groupindex); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(groups); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(handle); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(hash_name); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(header); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(headers); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(hi); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(hook); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(id); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ident); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ignore); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(imag); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(importlib); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(in_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(incoming); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(indexgroup); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(inf); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(inheritable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(initial); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(initial_bytes); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(initial_value); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(initval); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(inner_size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(input); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(insert_comments); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(insert_pis); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(instructions); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(intern); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(intersection); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(isatty); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(isinstance); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(isoformat); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(isolation_level); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(istext); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(item); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(items); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(iter); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(iterable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(iterations); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(join); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(jump); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(keepends); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(key); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(keyfile); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(keys); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(kind); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(kw); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(kw1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(kw2); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(lambda); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last_node); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last_traceback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last_type); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(last_value); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(latin1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(leaf_size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(len); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(length); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(level); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(limit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(line); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(line_buffering); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(lineno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(listcomp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(little); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(lo); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(locale); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(locals); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(logoption); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(loop); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mapping); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(match); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(max_length); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxdigits); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxevents); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxmem); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxsplit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(maxvalue); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(memLevel); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(memlimit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(message); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(metaclass); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(method); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mod); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(module); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(module_globals); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(modules); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mro); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(msg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(mycmp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n_arg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n_fields); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n_sequence_fields); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(n_unnamed_fields); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(name); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(name_from); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(namespace_separator); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(namespaces); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(narg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ndigits); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(new_limit); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(newline); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(newlines); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(next); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(node_depth); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(node_offset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ns); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(nstype); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(null); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(number); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(obj); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(object); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(offset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(offset_dst); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(offset_src); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(on_type_read); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(onceregistry); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(only_keys); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(oparg); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(opcode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(open); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(opener); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(operation); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(optimize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(options); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(order); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(out_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(outgoing); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(overlapped); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(owner); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(p); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pages); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(parent); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(password); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(path); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pattern); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(peek); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(persistent_id); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(persistent_load); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(person); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pi_factory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pid); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(policy); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pos); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pos1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(pos2); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(print_file_and_line); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(priority); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(progress); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(progress_handler); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(proto); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(protocol); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ps1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(ps2); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(query); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(quotetabs); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(r); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(raw); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(read); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(read1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readall); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readinto); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readinto1); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readline); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(readonly); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(real); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reducer_override); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(registry); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(rel_tol); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reload); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(repl); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(replace); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reserved); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reset); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(resetids); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(return); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reverse); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(reversed); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(s); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(salt); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sched_priority); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(scheduler); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(seek); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(seekable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(selectors); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(self); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(send); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sep); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sequence); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(server_hostname); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(server_side); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(session); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setcomp); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setpgroup); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setsid); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setsigdef); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setsigmask); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(setstate); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(shape); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(show_cmd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(signed); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(size); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sizehint); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sleep); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sock); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sort); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sound); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(source); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(source_traceback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(src); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(src_dir_fd); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(stacklevel); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(start); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(statement); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(status); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(stderr); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(stdin); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(stdout); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(step); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(store_name); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(strategy); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(strftime); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(strict); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(strict_mode); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(string); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(sub_key); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(symmetric_difference_update); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tabsize); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tag); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(target); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(target_is_directory); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(task); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tb_frame); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tb_lasti); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tb_lineno); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tb_next); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tell); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(template); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(term); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(text); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(threading); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(throw); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(timeout); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(times); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(timetuple); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(top); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(trace_callback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(traceback); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(trailers); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(translate); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(true); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(truncate); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(twice); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(txt); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(type); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tz); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(tzname); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(uid); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(unlink); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(unraisablehook); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(uri); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(usedforsecurity); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(value); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(values); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(version); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(warnings); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(warnoptions); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(wbits); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(week); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(weekday); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(which); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(who); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(withdata); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(writable); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(write); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(write_through); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(x); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(year); + PyUnicode_InternInPlace(&string); + string = &_Py_ID(zdict); + PyUnicode_InternInPlace(&string); +} +/* End auto-generated code */ +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_UNICODEOBJECT_GENERATED_H */ diff --git a/Include/memoryobject.h b/Include/memoryobject.h index 19aec679a5fad1..2c9146aa2b5b06 100644 --- a/Include/memoryobject.h +++ b/Include/memoryobject.h @@ -6,20 +6,10 @@ extern "C" { #endif -#ifndef Py_LIMITED_API -PyAPI_DATA(PyTypeObject) _PyManagedBuffer_Type; -#endif PyAPI_DATA(PyTypeObject) PyMemoryView_Type; #define PyMemoryView_Check(op) Py_IS_TYPE((op), &PyMemoryView_Type) -#ifndef Py_LIMITED_API -/* Get a pointer to the memoryview's private copy of the exporter's buffer. */ -#define PyMemoryView_GET_BUFFER(op) (&((PyMemoryViewObject *)(op))->view) -/* Get a pointer to the exporting object (this may be NULL!). */ -#define PyMemoryView_GET_BASE(op) (((PyMemoryViewObject *)(op))->view.obj) -#endif - PyAPI_FUNC(PyObject *) PyMemoryView_FromObject(PyObject *base); #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000 PyAPI_FUNC(PyObject *) PyMemoryView_FromMemory(char *mem, Py_ssize_t size, @@ -32,38 +22,10 @@ PyAPI_FUNC(PyObject *) PyMemoryView_GetContiguous(PyObject *base, int buffertype, char order); - -/* The structs are declared here so that macros can work, but they shouldn't - be considered public. Don't access their fields directly, use the macros - and functions instead! */ #ifndef Py_LIMITED_API -#define _Py_MANAGED_BUFFER_RELEASED 0x001 /* access to exporter blocked */ -#define _Py_MANAGED_BUFFER_FREE_FORMAT 0x002 /* free format */ -typedef struct { - PyObject_HEAD - int flags; /* state flags */ - Py_ssize_t exports; /* number of direct memoryview exports */ - Py_buffer master; /* snapshot buffer obtained from the original exporter */ -} _PyManagedBufferObject; - - -/* memoryview state flags */ -#define _Py_MEMORYVIEW_RELEASED 0x001 /* access to master buffer blocked */ -#define _Py_MEMORYVIEW_C 0x002 /* C-contiguous layout */ -#define _Py_MEMORYVIEW_FORTRAN 0x004 /* Fortran contiguous layout */ -#define _Py_MEMORYVIEW_SCALAR 0x008 /* scalar: ndim = 0 */ -#define _Py_MEMORYVIEW_PIL 0x010 /* PIL-style layout */ - -typedef struct { - PyObject_VAR_HEAD - _PyManagedBufferObject *mbuf; /* managed buffer */ - Py_hash_t hash; /* hash value for read-only views */ - int flags; /* state flags */ - Py_ssize_t exports; /* number of buffer re-exports */ - Py_buffer view; /* private copy of the exporter's view */ - PyObject *weakreflist; - Py_ssize_t ob_array[1]; /* shape, strides, suboffsets */ -} PyMemoryViewObject; +# define Py_CPYTHON_MEMORYOBJECT_H +# include "cpython/memoryobject.h" +# undef Py_CPYTHON_MEMORYOBJECT_H #endif #ifdef __cplusplus diff --git a/Include/moduleobject.h b/Include/moduleobject.h index fbb2c5ae79444b..555564ec73b4a2 100644 --- a/Include/moduleobject.h +++ b/Include/moduleobject.h @@ -43,8 +43,22 @@ PyAPI_DATA(PyTypeObject) PyModuleDef_Type; typedef struct PyModuleDef_Base { PyObject_HEAD + /* The function used to re-initialize the module. + This is only set for legacy (single-phase init) extension modules + and only used for those that support multiple initializations + (m_size >= 0). + It is set by _PyImport_LoadDynamicModuleWithSpec() + and _imp.create_builtin(). */ PyObject* (*m_init)(void); + /* The module's index into its interpreter's modules_by_index cache. + This is set for all extension modules but only used for legacy ones. + (See PyInterpreterState.modules_by_index for more info.) + It is set by PyModuleDef_Init(). */ Py_ssize_t m_index; + /* A copy of the module's __dict__ after the first time it was loaded. + This is only set/used for legacy modules that do not support + multiple initializations. + It is set by _PyImport_FixupExtensionObject(). */ PyObject* m_copy; } PyModuleDef_Base; diff --git a/Include/opcode.h b/Include/opcode.h index 42825df6217b46..f284313d2ed756 100644 --- a/Include/opcode.h +++ b/Include/opcode.h @@ -1,4 +1,4 @@ -// Auto-generated by Tools/scripts/generate_opcode_h.py from Lib/opcode.py +// Auto-generated by Tools/build/generate_opcode_h.py from Lib/opcode.py #ifndef Py_OPCODE_H #define Py_OPCODE_H @@ -11,6 +11,8 @@ extern "C" { #define CACHE 0 #define POP_TOP 1 #define PUSH_NULL 2 +#define INTERPRETER_EXIT 3 +#define END_FOR 4 #define NOP 9 #define UNARY_POSITIVE 10 #define UNARY_NEGATIVE 11 @@ -35,6 +37,7 @@ extern "C" { #define CLEANUP_THROW 55 #define STORE_SUBSCR 60 #define DELETE_SUBSCR 61 +#define STOPITERATION_ERROR 63 #define GET_ITER 68 #define GET_YIELD_FROM_ITER 69 #define PRINT_EXPR 70 @@ -126,79 +129,67 @@ extern "C" { #define JUMP_NO_INTERRUPT 261 #define LOAD_METHOD 262 #define MAX_PSEUDO_OPCODE 262 -#define BINARY_OP_ADAPTIVE 3 -#define BINARY_OP_ADD_FLOAT 4 -#define BINARY_OP_ADD_INT 5 -#define BINARY_OP_ADD_UNICODE 6 -#define BINARY_OP_INPLACE_ADD_UNICODE 7 -#define BINARY_OP_MULTIPLY_FLOAT 8 -#define BINARY_OP_MULTIPLY_INT 13 -#define BINARY_OP_SUBTRACT_FLOAT 14 -#define BINARY_OP_SUBTRACT_INT 16 -#define BINARY_SUBSCR_ADAPTIVE 17 +#define BINARY_OP_ADD_FLOAT 5 +#define BINARY_OP_ADD_INT 6 +#define BINARY_OP_ADD_UNICODE 7 +#define BINARY_OP_INPLACE_ADD_UNICODE 8 +#define BINARY_OP_MULTIPLY_FLOAT 13 +#define BINARY_OP_MULTIPLY_INT 14 +#define BINARY_OP_SUBTRACT_FLOAT 16 +#define BINARY_OP_SUBTRACT_INT 17 #define BINARY_SUBSCR_DICT 18 #define BINARY_SUBSCR_GETITEM 19 #define BINARY_SUBSCR_LIST_INT 20 #define BINARY_SUBSCR_TUPLE_INT 21 -#define CALL_ADAPTIVE 22 -#define CALL_PY_EXACT_ARGS 23 -#define CALL_PY_WITH_DEFAULTS 24 -#define CALL_BOUND_METHOD_EXACT_ARGS 28 -#define CALL_BUILTIN_CLASS 29 -#define CALL_BUILTIN_FAST_WITH_KEYWORDS 34 -#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 38 -#define CALL_NO_KW_BUILTIN_FAST 39 -#define CALL_NO_KW_BUILTIN_O 40 -#define CALL_NO_KW_ISINSTANCE 41 -#define CALL_NO_KW_LEN 42 -#define CALL_NO_KW_LIST_APPEND 43 -#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 44 -#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 45 -#define CALL_NO_KW_METHOD_DESCRIPTOR_O 46 -#define CALL_NO_KW_STR_1 47 -#define CALL_NO_KW_TUPLE_1 48 -#define CALL_NO_KW_TYPE_1 56 -#define COMPARE_OP_ADAPTIVE 57 -#define COMPARE_OP_FLOAT_JUMP 58 -#define COMPARE_OP_INT_JUMP 59 -#define COMPARE_OP_STR_JUMP 62 -#define EXTENDED_ARG_QUICK 63 -#define FOR_ITER_ADAPTIVE 64 -#define FOR_ITER_LIST 65 -#define FOR_ITER_RANGE 66 -#define JUMP_BACKWARD_QUICK 67 -#define LOAD_ATTR_ADAPTIVE 72 -#define LOAD_ATTR_CLASS 73 -#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 76 -#define LOAD_ATTR_INSTANCE_VALUE 77 -#define LOAD_ATTR_MODULE 78 -#define LOAD_ATTR_PROPERTY 79 -#define LOAD_ATTR_SLOT 80 -#define LOAD_ATTR_WITH_HINT 81 -#define LOAD_ATTR_METHOD_LAZY_DICT 86 -#define LOAD_ATTR_METHOD_NO_DICT 113 -#define LOAD_ATTR_METHOD_WITH_DICT 121 -#define LOAD_ATTR_METHOD_WITH_VALUES 141 -#define LOAD_CONST__LOAD_FAST 143 -#define LOAD_FAST__LOAD_CONST 153 -#define LOAD_FAST__LOAD_FAST 154 -#define LOAD_GLOBAL_ADAPTIVE 158 -#define LOAD_GLOBAL_BUILTIN 159 -#define LOAD_GLOBAL_MODULE 160 -#define RESUME_QUICK 161 -#define STORE_ATTR_ADAPTIVE 166 -#define STORE_ATTR_INSTANCE_VALUE 167 -#define STORE_ATTR_SLOT 168 -#define STORE_ATTR_WITH_HINT 169 -#define STORE_FAST__LOAD_FAST 170 -#define STORE_FAST__STORE_FAST 173 -#define STORE_SUBSCR_ADAPTIVE 174 -#define STORE_SUBSCR_DICT 175 -#define STORE_SUBSCR_LIST_INT 176 -#define UNPACK_SEQUENCE_ADAPTIVE 177 -#define UNPACK_SEQUENCE_LIST 178 -#define UNPACK_SEQUENCE_TUPLE 179 -#define UNPACK_SEQUENCE_TWO_TUPLE 180 +#define CALL_PY_EXACT_ARGS 22 +#define CALL_PY_WITH_DEFAULTS 23 +#define CALL_BOUND_METHOD_EXACT_ARGS 24 +#define CALL_BUILTIN_CLASS 28 +#define CALL_BUILTIN_FAST_WITH_KEYWORDS 29 +#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 34 +#define CALL_NO_KW_BUILTIN_FAST 38 +#define CALL_NO_KW_BUILTIN_O 39 +#define CALL_NO_KW_ISINSTANCE 40 +#define CALL_NO_KW_LEN 41 +#define CALL_NO_KW_LIST_APPEND 42 +#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 43 +#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 44 +#define CALL_NO_KW_METHOD_DESCRIPTOR_O 45 +#define CALL_NO_KW_STR_1 46 +#define CALL_NO_KW_TUPLE_1 47 +#define CALL_NO_KW_TYPE_1 48 +#define COMPARE_OP_FLOAT_JUMP 56 +#define COMPARE_OP_INT_JUMP 57 +#define COMPARE_OP_STR_JUMP 58 +#define FOR_ITER_LIST 59 +#define FOR_ITER_RANGE 62 +#define FOR_ITER_GEN 64 +#define LOAD_ATTR_CLASS 65 +#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 66 +#define LOAD_ATTR_INSTANCE_VALUE 67 +#define LOAD_ATTR_MODULE 72 +#define LOAD_ATTR_PROPERTY 73 +#define LOAD_ATTR_SLOT 76 +#define LOAD_ATTR_WITH_HINT 77 +#define LOAD_ATTR_METHOD_LAZY_DICT 78 +#define LOAD_ATTR_METHOD_NO_DICT 79 +#define LOAD_ATTR_METHOD_WITH_DICT 80 +#define LOAD_ATTR_METHOD_WITH_VALUES 81 +#define LOAD_CONST__LOAD_FAST 86 +#define LOAD_FAST__LOAD_CONST 113 +#define LOAD_FAST__LOAD_FAST 121 +#define LOAD_GLOBAL_BUILTIN 141 +#define LOAD_GLOBAL_MODULE 143 +#define STORE_ATTR_INSTANCE_VALUE 153 +#define STORE_ATTR_SLOT 154 +#define STORE_ATTR_WITH_HINT 158 +#define STORE_FAST__LOAD_FAST 159 +#define STORE_FAST__STORE_FAST 160 +#define STORE_SUBSCR_DICT 161 +#define STORE_SUBSCR_LIST_INT 166 +#define UNPACK_SEQUENCE_LIST 167 +#define UNPACK_SEQUENCE_TUPLE 168 +#define UNPACK_SEQUENCE_TWO_TUPLE 169 #define DO_TRACING 255 #define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\ diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 37a984494d713c..a16b8d7104e3b6 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 12 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA -#define PY_RELEASE_SERIAL 0 +#define PY_RELEASE_SERIAL 2 /* Version as a string */ -#define PY_VERSION "3.12.0a0" +#define PY_VERSION "3.12.0a2+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Include/pybuffer.h b/Include/pybuffer.h index 6893505e66e3e8..bbac60972f5127 100644 --- a/Include/pybuffer.h +++ b/Include/pybuffer.h @@ -32,6 +32,9 @@ typedef struct { void *internal; } Py_buffer; +typedef int (*getbufferproc)(PyObject *, Py_buffer *, int); +typedef void (*releasebufferproc)(PyObject *, Py_buffer *); + /* Return 1 if the getbuffer function is available, otherwise return 0. */ PyAPI_FUNC(int) PyObject_CheckBuffer(PyObject *obj); diff --git a/Include/structmember.h b/Include/structmember.h index 65a777d5f52117..f6e8fd829892f4 100644 --- a/Include/structmember.h +++ b/Include/structmember.h @@ -5,69 +5,50 @@ extern "C" { #endif -/* Interface to map C struct members to Python object attributes */ - -#include /* For offsetof */ - -/* An array of PyMemberDef structures defines the name, type and offset - of selected members of a C structure. These can be read by - PyMember_GetOne() and set by PyMember_SetOne() (except if their READONLY - flag is set). The array must be terminated with an entry whose name - pointer is NULL. */ - -struct PyMemberDef { - const char *name; - int type; - Py_ssize_t offset; - int flags; - const char *doc; -}; +/* Interface to map C struct members to Python object attributes + * + * This header is deprecated: new code should not use stuff from here. + * New definitions are in descrobject.h. + * + * However, there's nothing wrong with old code continuing to use it, + * and there's not much mainenance overhead in maintaining a few aliases. + * So, don't be too eager to convert old code. + * + * It uses names not prefixed with Py_. + * It is also *not* included from Python.h and must be included individually. + */ + +#include /* For offsetof (not always provided by Python.h) */ /* Types */ -#define T_SHORT 0 -#define T_INT 1 -#define T_LONG 2 -#define T_FLOAT 3 -#define T_DOUBLE 4 -#define T_STRING 5 -#define T_OBJECT 6 -/* XXX the ordering here is weird for binary compatibility */ -#define T_CHAR 7 /* 1-character string */ -#define T_BYTE 8 /* 8-bit signed int */ -/* unsigned variants: */ -#define T_UBYTE 9 -#define T_USHORT 10 -#define T_UINT 11 -#define T_ULONG 12 - -/* Added by Jack: strings contained in the structure */ -#define T_STRING_INPLACE 13 - -/* Added by Lillo: bools contained in the structure (assumed char) */ -#define T_BOOL 14 - -#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError - when the value is NULL, instead of - converting to None. */ -#define T_LONGLONG 17 -#define T_ULONGLONG 18 - -#define T_PYSSIZET 19 /* Py_ssize_t */ -#define T_NONE 20 /* Value is always None */ - +#define T_SHORT Py_T_SHORT +#define T_INT Py_T_INT +#define T_LONG Py_T_LONG +#define T_FLOAT Py_T_FLOAT +#define T_DOUBLE Py_T_DOUBLE +#define T_STRING Py_T_STRING +#define T_OBJECT _Py_T_OBJECT +#define T_CHAR Py_T_CHAR +#define T_BYTE Py_T_BYTE +#define T_UBYTE Py_T_UBYTE +#define T_USHORT Py_T_USHORT +#define T_UINT Py_T_UINT +#define T_ULONG Py_T_ULONG +#define T_STRING_INPLACE Py_T_STRING_INPLACE +#define T_BOOL Py_T_BOOL +#define T_OBJECT_EX Py_T_OBJECT_EX +#define T_LONGLONG Py_T_LONGLONG +#define T_ULONGLONG Py_T_ULONGLONG +#define T_PYSSIZET Py_T_PYSSIZET +#define T_NONE _Py_T_NONE /* Flags */ -#define READONLY 1 -#define READ_RESTRICTED 2 -#define PY_WRITE_RESTRICTED 4 +#define READONLY Py_READONLY +#define PY_AUDIT_READ Py_AUDIT_READ +#define READ_RESTRICTED Py_AUDIT_READ +#define PY_WRITE_RESTRICTED _Py_WRITE_RESTRICTED #define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) -#define PY_AUDIT_READ READ_RESTRICTED - -/* Current API, use this */ -PyAPI_FUNC(PyObject *) PyMember_GetOne(const char *, PyMemberDef *); -PyAPI_FUNC(int) PyMember_SetOne(char *, PyMemberDef *, PyObject *); - #ifdef __cplusplus } diff --git a/Lib/_aix_support.py b/Lib/_aix_support.py index 1d8482ff382507..18533e769b7514 100644 --- a/Lib/_aix_support.py +++ b/Lib/_aix_support.py @@ -1,15 +1,9 @@ """Shared AIX support functions.""" +import subprocess import sys import sysconfig -try: - import subprocess -except ImportError: # pragma: no cover - # _aix_support is used in distutils by setup.py to build C extensions, - # before subprocess dependencies like _posixsubprocess are available. - import _bootsubprocess as subprocess - def _aix_tag(vrtl, bd): # type: (List[int], int) -> str diff --git a/Lib/_pyio.py b/Lib/_pyio.py index 12510784c8b926..163cf9de279ff0 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -1129,6 +1129,7 @@ def peek(self, size=0): do at most one raw read to satisfy it. We never return more than self.buffer_size. """ + self._checkClosed("peek of closed file") with self._read_lock: return self._peek_unlocked(size) @@ -1147,6 +1148,7 @@ def read1(self, size=-1): """Reads up to size bytes, with at most one read() system call.""" # Returns up to size bytes. If at least one byte is buffered, we # only return buffered bytes. Otherwise, we do one raw read. + self._checkClosed("read of closed file") if size < 0: size = self.buffer_size if size == 0: @@ -1164,6 +1166,8 @@ def read1(self, size=-1): def _readinto(self, buf, read1): """Read data into *buf* with at most one system call.""" + self._checkClosed("readinto of closed file") + # Need to create a memoryview object of type 'b', otherwise # we may not be able to assign bytes to it, and slicing it # would create a new object. @@ -1213,6 +1217,7 @@ def tell(self): def seek(self, pos, whence=0): if whence not in valid_seek_flags: raise ValueError("invalid whence value") + self._checkClosed("seek of closed file") with self._read_lock: if whence == 1: pos -= len(self._read_buf) - self._read_pos diff --git a/Lib/_pylong.py b/Lib/_pylong.py new file mode 100644 index 00000000000000..d14c1d93836327 --- /dev/null +++ b/Lib/_pylong.py @@ -0,0 +1,286 @@ +"""Python implementations of some algorithms for use by longobject.c. +The goal is to provide asymptotically faster algorithms that can be +used for operations on integers with many digits. In those cases, the +performance overhead of the Python implementation is not significant +since the asymptotic behavior is what dominates runtime. Functions +provided by this module should be considered private and not part of any +public API. + +Note: for ease of maintainability, please prefer clear code and avoid +"micro-optimizations". This module will only be imported and used for +integers with a huge number of digits. Saving a few microseconds with +tricky or non-obvious code is not worth it. For people looking for +maximum performance, they should use something like gmpy2.""" + +import sys +import re +import decimal + + +def int_to_decimal(n): + """Asymptotically fast conversion of an 'int' to Decimal.""" + + # Function due to Tim Peters. See GH issue #90716 for details. + # https://github.com/python/cpython/issues/90716 + # + # The implementation in longobject.c of base conversion algorithms + # between power-of-2 and non-power-of-2 bases are quadratic time. + # This function implements a divide-and-conquer algorithm that is + # faster for large numbers. Builds an equal decimal.Decimal in a + # "clever" recursive way. If we want a string representation, we + # apply str to _that_. + + D = decimal.Decimal + D2 = D(2) + + BITLIM = 128 + + mem = {} + + def w2pow(w): + """Return D(2)**w and store the result. Also possibly save some + intermediate results. In context, these are likely to be reused + across various levels of the conversion to Decimal.""" + if (result := mem.get(w)) is None: + if w <= BITLIM: + result = D2**w + elif w - 1 in mem: + result = (t := mem[w - 1]) + t + else: + w2 = w >> 1 + # If w happens to be odd, w-w2 is one larger then w2 + # now. Recurse on the smaller first (w2), so that it's + # in the cache and the larger (w-w2) can be handled by + # the cheaper `w-1 in mem` branch instead. + result = w2pow(w2) * w2pow(w - w2) + mem[w] = result + return result + + def inner(n, w): + if w <= BITLIM: + return D(n) + w2 = w >> 1 + hi = n >> w2 + lo = n - (hi << w2) + return inner(lo, w2) + inner(hi, w - w2) * w2pow(w2) + + with decimal.localcontext() as ctx: + ctx.prec = decimal.MAX_PREC + ctx.Emax = decimal.MAX_EMAX + ctx.Emin = decimal.MIN_EMIN + ctx.traps[decimal.Inexact] = 1 + + if n < 0: + negate = True + n = -n + else: + negate = False + result = inner(n, n.bit_length()) + if negate: + result = -result + return result + + +def int_to_decimal_string(n): + """Asymptotically fast conversion of an 'int' to a decimal string.""" + return str(int_to_decimal(n)) + + +def _str_to_int_inner(s): + """Asymptotically fast conversion of a 'str' to an 'int'.""" + + # Function due to Bjorn Martinsson. See GH issue #90716 for details. + # https://github.com/python/cpython/issues/90716 + # + # The implementation in longobject.c of base conversion algorithms + # between power-of-2 and non-power-of-2 bases are quadratic time. + # This function implements a divide-and-conquer algorithm making use + # of Python's built in big int multiplication. Since Python uses the + # Karatsuba algorithm for multiplication, the time complexity + # of this function is O(len(s)**1.58). + + DIGLIM = 2048 + + mem = {} + + def w5pow(w): + """Return 5**w and store the result. + Also possibly save some intermediate results. In context, these + are likely to be reused across various levels of the conversion + to 'int'. + """ + if (result := mem.get(w)) is None: + if w <= DIGLIM: + result = 5**w + elif w - 1 in mem: + result = mem[w - 1] * 5 + else: + w2 = w >> 1 + # If w happens to be odd, w-w2 is one larger then w2 + # now. Recurse on the smaller first (w2), so that it's + # in the cache and the larger (w-w2) can be handled by + # the cheaper `w-1 in mem` branch instead. + result = w5pow(w2) * w5pow(w - w2) + mem[w] = result + return result + + def inner(a, b): + if b - a <= DIGLIM: + return int(s[a:b]) + mid = (a + b + 1) >> 1 + return inner(mid, b) + ((inner(a, mid) * w5pow(b - mid)) << (b - mid)) + + return inner(0, len(s)) + + +def int_from_string(s): + """Asymptotically fast version of PyLong_FromString(), conversion + of a string of decimal digits into an 'int'.""" + # PyLong_FromString() has already removed leading +/-, checked for invalid + # use of underscore characters, checked that string consists of only digits + # and underscores, and stripped leading whitespace. The input can still + # contain underscores and have trailing whitespace. + s = s.rstrip().replace('_', '') + return _str_to_int_inner(s) + + +def str_to_int(s): + """Asymptotically fast version of decimal string to 'int' conversion.""" + # FIXME: this doesn't support the full syntax that int() supports. + m = re.match(r'\s*([+-]?)([0-9_]+)\s*', s) + if not m: + raise ValueError('invalid literal for int() with base 10') + v = int_from_string(m.group(2)) + if m.group(1) == '-': + v = -v + return v + + +# Fast integer division, based on code from Mark Dickinson, fast_div.py +# GH-47701. Additional refinements and optimizations by Bjorn Martinsson. The +# algorithm is due to Burnikel and Ziegler, in their paper "Fast Recursive +# Division". + +_DIV_LIMIT = 4000 + + +def _div2n1n(a, b, n): + """Divide a 2n-bit nonnegative integer a by an n-bit positive integer + b, using a recursive divide-and-conquer algorithm. + + Inputs: + n is a positive integer + b is a positive integer with exactly n bits + a is a nonnegative integer such that a < 2**n * b + + Output: + (q, r) such that a = b*q+r and 0 <= r < b. + + """ + if a.bit_length() - n <= _DIV_LIMIT: + return divmod(a, b) + pad = n & 1 + if pad: + a <<= 1 + b <<= 1 + n += 1 + half_n = n >> 1 + mask = (1 << half_n) - 1 + b1, b2 = b >> half_n, b & mask + q1, r = _div3n2n(a >> n, (a >> half_n) & mask, b, b1, b2, half_n) + q2, r = _div3n2n(r, a & mask, b, b1, b2, half_n) + if pad: + r >>= 1 + return q1 << half_n | q2, r + + +def _div3n2n(a12, a3, b, b1, b2, n): + """Helper function for _div2n1n; not intended to be called directly.""" + if a12 >> n == b1: + q, r = (1 << n) - 1, a12 - (b1 << n) + b1 + else: + q, r = _div2n1n(a12, b1, n) + r = (r << n | a3) - q * b2 + while r < 0: + q -= 1 + r += b + return q, r + + +def _int2digits(a, n): + """Decompose non-negative int a into base 2**n + + Input: + a is a non-negative integer + + Output: + List of the digits of a in base 2**n in little-endian order, + meaning the most significant digit is last. The most + significant digit is guaranteed to be non-zero. + If a is 0 then the output is an empty list. + + """ + a_digits = [0] * ((a.bit_length() + n - 1) // n) + + def inner(x, L, R): + if L + 1 == R: + a_digits[L] = x + return + mid = (L + R) >> 1 + shift = (mid - L) * n + upper = x >> shift + lower = x ^ (upper << shift) + inner(lower, L, mid) + inner(upper, mid, R) + + if a: + inner(a, 0, len(a_digits)) + return a_digits + + +def _digits2int(digits, n): + """Combine base-2**n digits into an int. This function is the + inverse of `_int2digits`. For more details, see _int2digits. + """ + + def inner(L, R): + if L + 1 == R: + return digits[L] + mid = (L + R) >> 1 + shift = (mid - L) * n + return (inner(mid, R) << shift) + inner(L, mid) + + return inner(0, len(digits)) if digits else 0 + + +def _divmod_pos(a, b): + """Divide a non-negative integer a by a positive integer b, giving + quotient and remainder.""" + # Use grade-school algorithm in base 2**n, n = nbits(b) + n = b.bit_length() + a_digits = _int2digits(a, n) + + r = 0 + q_digits = [] + for a_digit in reversed(a_digits): + q_digit, r = _div2n1n((r << n) + a_digit, b, n) + q_digits.append(q_digit) + q_digits.reverse() + q = _digits2int(q_digits, n) + return q, r + + +def int_divmod(a, b): + """Asymptotically fast replacement for divmod, for 'int'. + Its time complexity is O(n**1.58), where n = #bits(a) + #bits(b). + """ + if b == 0: + raise ZeroDivisionError + elif b < 0: + q, r = int_divmod(-a, -b) + return q, -r + elif a < 0: + q, r = int_divmod(~a, b) + return ~q, b + ~r + else: + return _divmod_pos(a, b) diff --git a/Lib/argparse.py b/Lib/argparse.py index d2dcfdf5682f7c..240625ff01084e 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -1997,7 +1997,11 @@ def consume_optional(start_index): # arguments, try to parse more single-dash options out # of the tail of the option string chars = self.prefix_chars - if arg_count == 0 and option_string[1] not in chars: + if ( + arg_count == 0 + and option_string[1] not in chars + and explicit_arg != '' + ): action_tuples.append((action, [], option_string)) char = option_string[0] option_string = char + explicit_arg[0] diff --git a/Lib/ast.py b/Lib/ast.py index 1a94e9368c161a..2cbc80a9835aa5 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -237,6 +237,12 @@ def increment_lineno(node, n=1): location in a file. """ for child in walk(node): + # TypeIgnore is a special case where lineno is not an attribute + # but rather a field of the node itself. + if isinstance(child, TypeIgnore): + child.lineno = getattr(child, 'lineno', 0) + n + continue + if 'lineno' in child._attributes: child.lineno = getattr(child, 'lineno', 0) + n if ( diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index c8a2f9f25634ef..f2f93758c3a817 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -377,7 +377,7 @@ async def serve_forever(self): self._serving_forever_fut = None async def wait_closed(self): - if self._sockets is None or self._waiters is None: + if self._waiters is None or self._active_count == 0: return waiter = self._loop.create_future() self._waiters.append(waiter) @@ -986,6 +986,8 @@ async def _connect_sock(self, exceptions, addr_info, local_addr_infos=None): if sock is not None: sock.close() raise + finally: + exceptions = my_exceptions = None async def create_connection( self, protocol_factory, host=None, port=None, @@ -1084,19 +1086,22 @@ async def create_connection( if sock is None: exceptions = [exc for sub in exceptions for exc in sub] - if all_errors: - raise ExceptionGroup("create_connection failed", exceptions) - if len(exceptions) == 1: - raise exceptions[0] - else: - # If they all have the same str(), raise one. - model = str(exceptions[0]) - if all(str(exc) == model for exc in exceptions): + try: + if all_errors: + raise ExceptionGroup("create_connection failed", exceptions) + if len(exceptions) == 1: raise exceptions[0] - # Raise a combined exception so the user can see all - # the various error messages. - raise OSError('Multiple exceptions: {}'.format( - ', '.join(str(exc) for exc in exceptions))) + else: + # If they all have the same str(), raise one. + model = str(exceptions[0]) + if all(str(exc) == model for exc in exceptions): + raise exceptions[0] + # Raise a combined exception so the user can see all + # the various error messages. + raise OSError('Multiple exceptions: {}'.format( + ', '.join(str(exc) for exc in exceptions))) + finally: + exceptions = None else: if sock is None: @@ -1904,6 +1909,8 @@ def _run_once(self): event_list = self._selector.select(timeout) self._process_events(event_list) + # Needed to break cycles when an exception occurs. + event_list = None # Handle 'later' callbacks that are ready. end_time = self.time() + self._clock_resolution diff --git a/Lib/asyncio/base_subprocess.py b/Lib/asyncio/base_subprocess.py index c2ca4a2792f666..e15bb4141fc02a 100644 --- a/Lib/asyncio/base_subprocess.py +++ b/Lib/asyncio/base_subprocess.py @@ -216,7 +216,9 @@ def _process_exited(self, returncode): self._proc.returncode = returncode self._call(self._protocol.process_exited) for p in self._pipes.values(): - p.pipe.close() + if p is not None: + p.pipe.close() + self._try_finish() async def _wait(self): diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index 39776e3c2cce48..3a6b44a0910869 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -8,7 +8,6 @@ import contextvars import logging import sys -import warnings from types import GenericAlias from . import base_futures @@ -151,11 +150,6 @@ def cancel(self, msg=None): change the future's state to cancelled, schedule the callbacks and return True. """ - if msg is not None: - warnings.warn("Passing 'msg' argument to Future.cancel() " - "is deprecated since Python 3.11, and " - "scheduled for removal in Python 3.14.", - DeprecationWarning, stacklevel=2) self.__log_traceback = False if self._state != _PENDING: return False diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py index ddb9daca026936..c6aab408fc7410 100644 --- a/Lib/asyncio/proactor_events.py +++ b/Lib/asyncio/proactor_events.py @@ -60,6 +60,7 @@ def __init__(self, loop, sock, protocol, waiter=None, self._pending_write = 0 self._conn_lost = 0 self._closing = False # Set when close() called. + self._called_connection_lost = False self._eof_written = False if self._server is not None: self._server._attach() @@ -136,7 +137,7 @@ def _force_close(self, exc): self._empty_waiter.set_result(None) else: self._empty_waiter.set_exception(exc) - if self._closing: + if self._closing and self._called_connection_lost: return self._closing = True self._conn_lost += 1 @@ -151,6 +152,8 @@ def _force_close(self, exc): self._loop.call_soon(self._call_connection_lost, exc) def _call_connection_lost(self, exc): + if self._called_connection_lost: + return try: self._protocol.connection_lost(exc) finally: @@ -166,6 +169,7 @@ def _call_connection_lost(self, exc): if server is not None: server._detach() self._server = None + self._called_connection_lost = True def get_write_buffer_size(self): size = self._pending_write diff --git a/Lib/asyncio/runners.py b/Lib/asyncio/runners.py index b1c4dbd7619721..1b89236599aad7 100644 --- a/Lib/asyncio/runners.py +++ b/Lib/asyncio/runners.py @@ -157,7 +157,7 @@ def _on_sigint(self, signum, frame, main_task): raise KeyboardInterrupt() -def run(main, *, debug=None): +def run(main, *, debug=None, loop_factory=None): """Execute the coroutine and return the result. This function runs the passed coroutine, taking care of @@ -190,7 +190,7 @@ async def main(): raise RuntimeError( "asyncio.run() cannot be called from a running event loop") - with Runner(debug=debug) as runner: + with Runner(debug=debug, loop_factory=loop_factory) as runner: return runner.run(main) diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index c9bbe2ac014351..3d30006198f671 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -58,6 +58,7 @@ def __init__(self, selector=None): def _make_socket_transport(self, sock, protocol, waiter=None, *, extra=None, server=None): + self._ensure_fd_no_transport(sock) return _SelectorSocketTransport(self, sock, protocol, waiter, extra, server) @@ -68,6 +69,7 @@ def _make_ssl_transport( ssl_handshake_timeout=constants.SSL_HANDSHAKE_TIMEOUT, ssl_shutdown_timeout=constants.SSL_SHUTDOWN_TIMEOUT, ): + self._ensure_fd_no_transport(rawsock) ssl_protocol = sslproto.SSLProtocol( self, protocol, sslcontext, waiter, server_side, server_hostname, @@ -80,6 +82,7 @@ def _make_ssl_transport( def _make_datagram_transport(self, sock, protocol, address=None, waiter=None, extra=None): + self._ensure_fd_no_transport(sock) return _SelectorDatagramTransport(self, sock, protocol, address, waiter, extra) @@ -630,7 +633,11 @@ async def sock_connect(self, sock, address): fut = self.create_future() self._sock_connect(fut, sock, address) - return await fut + try: + return await fut + finally: + # Needed to break cycles when an exception occurs. + fut = None def _sock_connect(self, fut, sock, address): fd = sock.fileno() @@ -652,6 +659,8 @@ def _sock_connect(self, fut, sock, address): fut.set_exception(exc) else: fut.set_result(None) + finally: + fut = None def _sock_write_done(self, fd, fut, handle=None): if handle is None or not handle.cancelled(): @@ -675,6 +684,8 @@ def _sock_connect_cb(self, fut, sock, address): fut.set_exception(exc) else: fut.set_result(None) + finally: + fut = None async def sock_accept(self, sock): """Accept a connection. @@ -1123,7 +1134,7 @@ def _reset_empty_waiter(self): self._empty_waiter = None -class _SelectorDatagramTransport(_SelectorTransport): +class _SelectorDatagramTransport(_SelectorTransport, transports.DatagramTransport): _buffer_factory = collections.deque diff --git a/Lib/asyncio/sslproto.py b/Lib/asyncio/sslproto.py index de00953cc1d0f7..bbf9cad6bc7f86 100644 --- a/Lib/asyncio/sslproto.py +++ b/Lib/asyncio/sslproto.py @@ -107,8 +107,11 @@ def close(self): protocol's connection_lost() method will (eventually) called with None as its argument. """ - self._closed = True - self._ssl_protocol._start_shutdown() + if not self._closed: + self._closed = True + self._ssl_protocol._start_shutdown() + else: + self._ssl_protocol = None def __del__(self, _warnings=warnings): if not self._closed: @@ -196,12 +199,6 @@ def get_read_buffer_size(self): """Return the current size of the read buffer.""" return self._ssl_protocol._get_read_buffer_size() - def get_write_buffer_limits(self): - """Get the high and low watermarks for write flow control. - Return a tuple (low, high) where low and high are - positive number of bytes.""" - return self._ssl_protocol._transport.get_write_buffer_limits() - @property def _protocol_paused(self): # Required for sendfile fallback pause_writing/resume_writing logic diff --git a/Lib/asyncio/taskgroups.py b/Lib/asyncio/taskgroups.py index 5d5e2a8a85dd48..911419e1769c17 100644 --- a/Lib/asyncio/taskgroups.py +++ b/Lib/asyncio/taskgroups.py @@ -128,11 +128,11 @@ async def __aexit__(self, et, exc, tb): # Exceptions are heavy objects that can have object # cycles (bad for GC); let's not keep a reference to # a bunch of them. - errors = self._errors - self._errors = None - - me = BaseExceptionGroup('unhandled errors in a TaskGroup', errors) - raise me from None + try: + me = BaseExceptionGroup('unhandled errors in a TaskGroup', self._errors) + raise me from None + finally: + self._errors = None def create_task(self, coro, *, name=None, context=None): if not self._entered: diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index 8d6dfcd81b7377..571013745aa03a 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -210,11 +210,6 @@ def cancel(self, msg=None): This also increases the task's count of cancellation requests. """ - if msg is not None: - warnings.warn("Passing 'msg' argument to Task.cancel() " - "is deprecated since Python 3.11, and " - "scheduled for removal in Python 3.14.", - DeprecationWarning, stacklevel=2) self._log_traceback = False if self.done(): return False diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 96e6d73a759794..b21e0394141bf4 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -195,22 +195,25 @@ def _make_write_pipe_transport(self, pipe, protocol, waiter=None, async def _make_subprocess_transport(self, protocol, args, shell, stdin, stdout, stderr, bufsize, extra=None, **kwargs): - with events.get_child_watcher() as watcher: + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + watcher = events.get_child_watcher() + + with watcher: if not watcher.is_active(): # Check early. # Raising exception before process creation # prevents subprocess execution if the watcher # is not ready to handle it. raise RuntimeError("asyncio.get_child_watcher() is not activated, " - "subprocess support is not installed.") + "subprocess support is not installed.") waiter = self.create_future() transp = _UnixSubprocessTransport(self, protocol, args, shell, - stdin, stdout, stderr, bufsize, - waiter=waiter, extra=extra, - **kwargs) - + stdin, stdout, stderr, bufsize, + waiter=waiter, extra=extra, + **kwargs) watcher.add_child_handler(transp.get_pid(), - self._child_watcher_callback, transp) + self._child_watcher_callback, transp) try: await waiter except (SystemExit, KeyboardInterrupt): @@ -800,12 +803,11 @@ class _UnixSubprocessTransport(base_subprocess.BaseSubprocessTransport): def _start(self, args, shell, stdin, stdout, stderr, bufsize, **kwargs): stdin_w = None - if stdin == subprocess.PIPE: - # Use a socket pair for stdin, since not all platforms + if stdin == subprocess.PIPE and sys.platform.startswith('aix'): + # Use a socket pair for stdin on AIX, since it does not # support selecting read events on the write end of a # socket (which we use in order to detect closing of the - # other end). Notably this is needed on AIX, and works - # just fine on other platforms. + # other end). stdin, stdin_w = socket.socketpair() try: self._proc = subprocess.Popen( @@ -844,6 +846,13 @@ class AbstractChildWatcher: waitpid(-1), there should be only one active object per process. """ + def __init_subclass__(cls) -> None: + if cls.__module__ != __name__: + warnings._deprecated("AbstractChildWatcher", + "{name!r} is deprecated as of Python 3.12 and will be " + "removed in Python {remove}.", + remove=(3, 14)) + def add_child_handler(self, pid, callback, *args): """Register a new child handler. @@ -912,10 +921,6 @@ class PidfdChildWatcher(AbstractChildWatcher): recent (5.3+) kernels. """ - def __init__(self): - self._loop = None - self._callbacks = {} - def __enter__(self): return self @@ -923,35 +928,22 @@ def __exit__(self, exc_type, exc_value, exc_traceback): pass def is_active(self): - return self._loop is not None and self._loop.is_running() + return True def close(self): - self.attach_loop(None) + pass def attach_loop(self, loop): - if self._loop is not None and loop is None and self._callbacks: - warnings.warn( - 'A loop is being detached ' - 'from a child watcher with pending handlers', - RuntimeWarning) - for pidfd, _, _ in self._callbacks.values(): - self._loop._remove_reader(pidfd) - os.close(pidfd) - self._callbacks.clear() - self._loop = loop + pass def add_child_handler(self, pid, callback, *args): - existing = self._callbacks.get(pid) - if existing is not None: - self._callbacks[pid] = existing[0], callback, args - else: - pidfd = os.pidfd_open(pid) - self._loop._add_reader(pidfd, self._do_wait, pid) - self._callbacks[pid] = pidfd, callback, args + loop = events.get_running_loop() + pidfd = os.pidfd_open(pid) + loop._add_reader(pidfd, self._do_wait, pid, pidfd, callback, args) - def _do_wait(self, pid): - pidfd, callback, args = self._callbacks.pop(pid) - self._loop._remove_reader(pidfd) + def _do_wait(self, pid, pidfd, callback, args): + loop = events.get_running_loop() + loop._remove_reader(pidfd) try: _, status = os.waitpid(pid, 0) except ChildProcessError: @@ -969,12 +961,9 @@ def _do_wait(self, pid): callback(pid, returncode, *args) def remove_child_handler(self, pid): - try: - pidfd, _, _ = self._callbacks.pop(pid) - except KeyError: - return False - self._loop._remove_reader(pidfd) - os.close(pidfd) + # asyncio never calls remove_child_handler() !!! + # The method is no-op but is implemented because + # abstract base classes require it. return True @@ -1042,6 +1031,13 @@ class SafeChildWatcher(BaseChildWatcher): big number of children (O(n) each time SIGCHLD is raised) """ + def __init__(self): + super().__init__() + warnings._deprecated("SafeChildWatcher", + "{name!r} is deprecated as of Python 3.12 and will be " + "removed in Python {remove}.", + remove=(3, 14)) + def close(self): self._callbacks.clear() super().close() @@ -1120,6 +1116,10 @@ def __init__(self): self._lock = threading.Lock() self._zombies = {} self._forks = 0 + warnings._deprecated("FastChildWatcher", + "{name!r} is deprecated as of Python 3.12 and will be " + "removed in Python {remove}.", + remove=(3, 14)) def close(self): self._callbacks.clear() @@ -1232,6 +1232,10 @@ class MultiLoopChildWatcher(AbstractChildWatcher): def __init__(self): self._callbacks = {} self._saved_sighandler = None + warnings._deprecated("MultiLoopChildWatcher", + "{name!r} is deprecated as of Python 3.12 and will be " + "removed in Python {remove}.", + remove=(3, 14)) def is_active(self): return self._saved_sighandler is not None @@ -1423,6 +1427,17 @@ def _do_waitpid(self, loop, expected_pid, callback, args): self._threads.pop(expected_pid) +def can_use_pidfd(): + if not hasattr(os, 'pidfd_open'): + return False + try: + pid = os.getpid() + os.close(os.pidfd_open(pid, 0)) + except OSError: + # blocked by security policy like SECCOMP + return False + return True + class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): """UNIX event loop policy with a watcher for child processes.""" @@ -1435,7 +1450,10 @@ def __init__(self): def _init_watcher(self): with events._lock: if self._watcher is None: # pragma: no branch - self._watcher = ThreadedChildWatcher() + if can_use_pidfd(): + self._watcher = PidfdChildWatcher() + else: + self._watcher = ThreadedChildWatcher() if threading.current_thread() is threading.main_thread(): self._watcher.attach_loop(self._local._loop) @@ -1461,6 +1479,9 @@ def get_child_watcher(self): if self._watcher is None: self._init_watcher() + warnings._deprecated("get_child_watcher", + "{name!r} is deprecated as of Python 3.12 and will be " + "removed in Python {remove}.", remove=(3, 14)) return self._watcher def set_child_watcher(self, watcher): @@ -1472,6 +1493,9 @@ def set_child_watcher(self, watcher): self._watcher.close() self._watcher = watcher + warnings._deprecated("set_child_watcher", + "{name!r} is deprecated as of Python 3.12 and will be " + "removed in Python {remove}.", remove=(3, 14)) SelectorEventLoop = _UnixSelectorEventLoop diff --git a/Lib/asyncio/windows_events.py b/Lib/asyncio/windows_events.py index 90b259cbafead2..4dad436fb4187f 100644 --- a/Lib/asyncio/windows_events.py +++ b/Lib/asyncio/windows_events.py @@ -439,13 +439,28 @@ def select(self, timeout=None): self._poll(timeout) tmp = self._results self._results = [] - return tmp + try: + return tmp + finally: + # Needed to break cycles when an exception occurs. + tmp = None def _result(self, value): fut = self._loop.create_future() fut.set_result(value) return fut + @staticmethod + def finish_socket_func(trans, key, ov): + try: + return ov.getresult() + except OSError as exc: + if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, + _overlapped.ERROR_OPERATION_ABORTED): + raise ConnectionResetError(*exc.args) + else: + raise + def recv(self, conn, nbytes, flags=0): self._register_with_iocp(conn) ov = _overlapped.Overlapped(NULL) @@ -457,17 +472,7 @@ def recv(self, conn, nbytes, flags=0): except BrokenPipeError: return self._result(b'') - def finish_recv(trans, key, ov): - try: - return ov.getresult() - except OSError as exc: - if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, - _overlapped.ERROR_OPERATION_ABORTED): - raise ConnectionResetError(*exc.args) - else: - raise - - return self._register(ov, conn, finish_recv) + return self._register(ov, conn, self.finish_socket_func) def recv_into(self, conn, buf, flags=0): self._register_with_iocp(conn) @@ -480,17 +485,7 @@ def recv_into(self, conn, buf, flags=0): except BrokenPipeError: return self._result(0) - def finish_recv(trans, key, ov): - try: - return ov.getresult() - except OSError as exc: - if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, - _overlapped.ERROR_OPERATION_ABORTED): - raise ConnectionResetError(*exc.args) - else: - raise - - return self._register(ov, conn, finish_recv) + return self._register(ov, conn, self.finish_socket_func) def recvfrom(self, conn, nbytes, flags=0): self._register_with_iocp(conn) @@ -500,17 +495,7 @@ def recvfrom(self, conn, nbytes, flags=0): except BrokenPipeError: return self._result((b'', None)) - def finish_recv(trans, key, ov): - try: - return ov.getresult() - except OSError as exc: - if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, - _overlapped.ERROR_OPERATION_ABORTED): - raise ConnectionResetError(*exc.args) - else: - raise - - return self._register(ov, conn, finish_recv) + return self._register(ov, conn, self.finish_socket_func) def recvfrom_into(self, conn, buf, flags=0): self._register_with_iocp(conn) @@ -538,17 +523,7 @@ def sendto(self, conn, buf, flags=0, addr=None): ov.WSASendTo(conn.fileno(), buf, flags, addr) - def finish_send(trans, key, ov): - try: - return ov.getresult() - except OSError as exc: - if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, - _overlapped.ERROR_OPERATION_ABORTED): - raise ConnectionResetError(*exc.args) - else: - raise - - return self._register(ov, conn, finish_send) + return self._register(ov, conn, self.finish_socket_func) def send(self, conn, buf, flags=0): self._register_with_iocp(conn) @@ -558,17 +533,7 @@ def send(self, conn, buf, flags=0): else: ov.WriteFile(conn.fileno(), buf) - def finish_send(trans, key, ov): - try: - return ov.getresult() - except OSError as exc: - if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, - _overlapped.ERROR_OPERATION_ABORTED): - raise ConnectionResetError(*exc.args) - else: - raise - - return self._register(ov, conn, finish_send) + return self._register(ov, conn, self.finish_socket_func) def accept(self, listener): self._register_with_iocp(listener) @@ -639,16 +604,7 @@ def sendfile(self, sock, file, offset, count): offset_low, offset_high, count, 0, 0) - def finish_sendfile(trans, key, ov): - try: - return ov.getresult() - except OSError as exc: - if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, - _overlapped.ERROR_OPERATION_ABORTED): - raise ConnectionResetError(*exc.args) - else: - raise - return self._register(ov, sock, finish_sendfile) + return self._register(ov, sock, self.finish_socket_func) def accept_pipe(self, pipe): self._register_with_iocp(pipe) @@ -841,6 +797,8 @@ def _poll(self, timeout=None): else: f.set_result(value) self._results.append(f) + finally: + f = None # Remove unregistered futures for ov in self._unregistered: diff --git a/Lib/cProfile.py b/Lib/cProfile.py index 9fc97883020840..f7000a8bfa0ddb 100755 --- a/Lib/cProfile.py +++ b/Lib/cProfile.py @@ -7,6 +7,7 @@ __all__ = ["run", "runctx", "Profile"] import _lsprof +import importlib.machinery import profile as _pyprofile # ____________________________________________________________ @@ -169,9 +170,12 @@ def main(): sys.path.insert(0, os.path.dirname(progname)) with open(progname, 'rb') as fp: code = compile(fp.read(), progname, 'exec') + spec = importlib.machinery.ModuleSpec(name='__main__', loader=None, + origin=progname) globs = { - '__file__': progname, - '__name__': '__main__', + '__spec__': spec, + '__file__': spec.origin, + '__name__': spec.name, '__package__': None, '__cached__': None, } diff --git a/Lib/codecs.py b/Lib/codecs.py index 5a1e7ec804009c..c1c55d8afef389 100644 --- a/Lib/codecs.py +++ b/Lib/codecs.py @@ -878,7 +878,8 @@ def open(filename, mode='r', encoding=None, errors='strict', buffering=-1): codecs. Output is also codec dependent and will usually be Unicode as well. - Underlying encoded files are always opened in binary mode. + If encoding is not None, then the + underlying encoded files are always opened in binary mode. The default file mode is 'r', meaning to open the file in read mode. encoding specifies the encoding which is to be used for the diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index 58607874be93d6..f07ee143a5aff1 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -1011,8 +1011,8 @@ def __len__(self): def __iter__(self): d = {} - for mapping in reversed(self.maps): - d.update(dict.fromkeys(mapping)) # reuses stored hash values if possible + for mapping in map(dict.fromkeys, reversed(self.maps)): + d |= mapping # reuses stored hash values if possible return iter(d) def __contains__(self, key): diff --git a/Lib/copyreg.py b/Lib/copyreg.py index c8a52a2dc63a27..578392409b403c 100644 --- a/Lib/copyreg.py +++ b/Lib/copyreg.py @@ -25,16 +25,10 @@ def constructor(object): # Example: provide pickling support for complex numbers. -try: - complex -except NameError: - pass -else: +def pickle_complex(c): + return complex, (c.real, c.imag) - def pickle_complex(c): - return complex, (c.real, c.imag) - - pickle(complex, pickle_complex, complex) +pickle(complex, pickle_complex, complex) def pickle_union(obj): import functools, operator diff --git a/Lib/csv.py b/Lib/csv.py index 0de5656a4eed7b..309a8f3f486365 100644 --- a/Lib/csv.py +++ b/Lib/csv.py @@ -165,11 +165,6 @@ def writerows(self, rowdicts): __class_getitem__ = classmethod(types.GenericAlias) -# Guard Sniffer's type checking against builds that exclude complex() -try: - complex -except NameError: - complex = float class Sniffer: ''' diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index 65fb8f251861f3..b54e16984cb15c 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -426,13 +426,11 @@ def wrapper(self): def _create_fn(name, args, body, *, globals=None, locals=None, return_type=MISSING): - # Note that we mutate locals when exec() is called. Caller - # beware! The only callers are internal to this module, so no + # Note that we may mutate locals. Callers beware! + # The only callers are internal to this module, so no # worries about external callers. if locals is None: locals = {} - if 'BUILTINS' not in locals: - locals['BUILTINS'] = builtins return_annotation = '' if return_type is not MISSING: locals['_return_type'] = return_type @@ -462,7 +460,7 @@ def _field_assign(frozen, name, value, self_name): # self_name is what "self" is called in this function: don't # hard-code "self", since that might be a field name. if frozen: - return f'BUILTINS.object.__setattr__({self_name},{name!r},{value})' + return f'__dataclass_builtins_object__.__setattr__({self_name},{name!r},{value})' return f'{self_name}.{name}={value}' @@ -569,6 +567,7 @@ def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init, locals.update({ 'MISSING': MISSING, '_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY, + '__dataclass_builtins_object__': object, }) body_lines = [] @@ -1325,6 +1324,14 @@ def _asdict_inner(obj, dict_factory): # generator (which is not true for namedtuples, handled # above). return type(obj)(_asdict_inner(v, dict_factory) for v in obj) + elif isinstance(obj, dict) and hasattr(type(obj), 'default_factory'): + # obj is a defaultdict, which has a different constructor from + # dict as it requires the default_factory as its first arg. + # https://bugs.python.org/issue35540 + result = type(obj)(getattr(obj, 'default_factory')) + for k, v in obj.items(): + result[_asdict_inner(k, dict_factory)] = _asdict_inner(v, dict_factory) + return result elif isinstance(obj, dict): return type(obj)((_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory)) diff --git a/Lib/datetime.py b/Lib/datetime.py index 007114ae622031..1b0c5cb2d1c6ff 100644 --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1032,9 +1032,13 @@ def ctime(self): _MONTHNAMES[self._month], self._day, self._year) - def strftime(self, fmt): - "Format using strftime()." - return _wrap_strftime(self, fmt, self.timetuple()) + def strftime(self, format): + """ + Format using strftime(). + + Example: "%d/%m/%Y, %H:%M:%S" + """ + return _wrap_strftime(self, format, self.timetuple()) def __format__(self, fmt): if not isinstance(fmt, str): diff --git a/Lib/dis.py b/Lib/dis.py index a045d18241b465..523bd01d929565 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -512,9 +512,8 @@ def _get_instructions_bytes(code, varname_from_oparg=None, for i in range(size): offset += 2 # Only show the fancy argrepr for a CACHE instruction when it's - # the first entry for a particular cache value and the - # instruction using it is actually quickened: - if i == 0 and op != deop: + # the first entry for a particular cache value: + if i == 0: data = code[offset: offset + 2 * size] argrepr = f"{name}: {int.from_bytes(data, sys.byteorder)}" else: diff --git a/Lib/distutils/README b/Lib/distutils/README deleted file mode 100644 index 73bd25187c0270..00000000000000 --- a/Lib/distutils/README +++ /dev/null @@ -1,11 +0,0 @@ -This directory contains the Distutils package. - -There's a full documentation available at: - - https://docs.python.org/distutils/ - -The Distutils-SIG web page is also a good starting point: - - https://www.python.org/sigs/distutils-sig/ - -$Id$ diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py deleted file mode 100644 index fdad6f65a78562..00000000000000 --- a/Lib/distutils/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -"""distutils - -The main package for the Python Module Distribution Utilities. Normally -used from a setup script as - - from distutils.core import setup - - setup (...) -""" - -import sys -import warnings - -__version__ = sys.version[:sys.version.index(' ')] - -_DEPRECATION_MESSAGE = ("The distutils package is deprecated and slated for " - "removal in Python 3.12. Use setuptools or check " - "PEP 632 for potential alternatives") -warnings.warn(_DEPRECATION_MESSAGE, - DeprecationWarning, 2) diff --git a/Lib/distutils/_msvccompiler.py b/Lib/distutils/_msvccompiler.py deleted file mode 100644 index af8099a4078192..00000000000000 --- a/Lib/distutils/_msvccompiler.py +++ /dev/null @@ -1,539 +0,0 @@ -"""distutils._msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for Microsoft Visual Studio 2015. - -The module is compatible with VS 2015 and later. You can find legacy support -for older versions in distutils.msvc9compiler and distutils.msvccompiler. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) -# ported to VS 2005 and VS 2008 by Christian Heimes -# ported to VS 2015 by Steve Dower - -import os -import subprocess -import winreg - -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import CCompiler, gen_lib_options -from distutils import log -from distutils.util import get_platform - -from itertools import count - -def _find_vc2015(): - try: - key = winreg.OpenKeyEx( - winreg.HKEY_LOCAL_MACHINE, - r"Software\Microsoft\VisualStudio\SxS\VC7", - access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY - ) - except OSError: - log.debug("Visual C++ is not registered") - return None, None - - best_version = 0 - best_dir = None - with key: - for i in count(): - try: - v, vc_dir, vt = winreg.EnumValue(key, i) - except OSError: - break - if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir): - try: - version = int(float(v)) - except (ValueError, TypeError): - continue - if version >= 14 and version > best_version: - best_version, best_dir = version, vc_dir - return best_version, best_dir - -def _find_vc2017(): - """Returns "15, path" based on the result of invoking vswhere.exe - If no install is found, returns "None, None" - - The version is returned to avoid unnecessarily changing the function - result. It may be ignored when the path is not None. - - If vswhere.exe is not available, by definition, VS 2017 is not - installed. - """ - root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") - if not root: - return None, None - - try: - path = subprocess.check_output([ - os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), - "-latest", - "-prerelease", - "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "-property", "installationPath", - "-products", "*", - ], encoding="mbcs", errors="strict").strip() - except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): - return None, None - - path = os.path.join(path, "VC", "Auxiliary", "Build") - if os.path.isdir(path): - return 15, path - - return None, None - -PLAT_SPEC_TO_RUNTIME = { - 'x86' : 'x86', - 'x86_amd64' : 'x64', - 'x86_arm' : 'arm', - 'x86_arm64' : 'arm64' -} - -def _find_vcvarsall(plat_spec): - # bpo-38597: Removed vcruntime return value - _, best_dir = _find_vc2017() - - if not best_dir: - best_version, best_dir = _find_vc2015() - - if not best_dir: - log.debug("No suitable Visual C++ version found") - return None, None - - vcvarsall = os.path.join(best_dir, "vcvarsall.bat") - if not os.path.isfile(vcvarsall): - log.debug("%s cannot be found", vcvarsall) - return None, None - - return vcvarsall, None - -def _get_vc_env(plat_spec): - if os.getenv("DISTUTILS_USE_SDK"): - return { - key.lower(): value - for key, value in os.environ.items() - } - - vcvarsall, _ = _find_vcvarsall(plat_spec) - if not vcvarsall: - raise DistutilsPlatformError("Unable to find vcvarsall.bat") - - try: - out = subprocess.check_output( - 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec), - stderr=subprocess.STDOUT, - ).decode('utf-16le', errors='replace') - except subprocess.CalledProcessError as exc: - log.error(exc.output) - raise DistutilsPlatformError("Error executing {}" - .format(exc.cmd)) - - env = { - key.lower(): value - for key, _, value in - (line.partition('=') for line in out.splitlines()) - if key and value - } - - return env - -def _find_exe(exe, paths=None): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - if not paths: - paths = os.getenv('path').split(os.pathsep) - for p in paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - return exe - -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Always cross-compile from x86 to work with the -# lighter-weight MSVC installs that do not include native 64-bit tools. -PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'x86_amd64', - 'win-arm32' : 'x86_arm', - 'win-arm64' : 'x86_arm64' -} - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - - def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) - # target platform (.plat_name is consistent with 'bdist') - self.plat_name = None - self.initialized = False - - def initialize(self, plat_name=None): - # multi-init means we would need to check platform same each time... - assert not self.initialized, "don't init multiple times" - if plat_name is None: - plat_name = get_platform() - # sanity check for platforms to prevent obscure errors later. - if plat_name not in PLAT_TO_VCVARS: - raise DistutilsPlatformError("--plat-name must be one of {}" - .format(tuple(PLAT_TO_VCVARS))) - - # Get the vcvarsall.bat spec for the requested platform. - plat_spec = PLAT_TO_VCVARS[plat_name] - - vc_env = _get_vc_env(plat_spec) - if not vc_env: - raise DistutilsPlatformError("Unable to find a compatible " - "Visual Studio installation.") - - self._paths = vc_env.get('path', '') - paths = self._paths.split(os.pathsep) - self.cc = _find_exe("cl.exe", paths) - self.linker = _find_exe("link.exe", paths) - self.lib = _find_exe("lib.exe", paths) - self.rc = _find_exe("rc.exe", paths) # resource compiler - self.mc = _find_exe("mc.exe", paths) # message compiler - self.mt = _find_exe("mt.exe", paths) # message compiler - - for dir in vc_env.get('include', '').split(os.pathsep): - if dir: - self.add_include_dir(dir.rstrip(os.sep)) - - for dir in vc_env.get('lib', '').split(os.pathsep): - if dir: - self.add_library_dir(dir.rstrip(os.sep)) - - self.preprocess_options = None - # bpo-38597: Always compile with dynamic linking - # Future releases of Python 3.x will include all past - # versions of vcruntime*.dll for compatibility. - self.compile_options = [ - '/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG', '/MD' - ] - - self.compile_options_debug = [ - '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG' - ] - - ldflags = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG' - ] - - ldflags_debug = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL' - ] - - self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1'] - self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1'] - self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] - self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] - self.ldflags_static = [*ldflags] - self.ldflags_static_debug = [*ldflags_debug] - - self._ldflags = { - (CCompiler.EXECUTABLE, None): self.ldflags_exe, - (CCompiler.EXECUTABLE, False): self.ldflags_exe, - (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug, - (CCompiler.SHARED_OBJECT, None): self.ldflags_shared, - (CCompiler.SHARED_OBJECT, False): self.ldflags_shared, - (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug, - (CCompiler.SHARED_LIBRARY, None): self.ldflags_static, - (CCompiler.SHARED_LIBRARY, False): self.ldflags_static, - (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug, - } - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - ext_map = { - **{ext: self.obj_extension for ext in self.src_extensions}, - **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions}, - } - - output_dir = output_dir or '' - - def make_out_path(p): - base, ext = os.path.splitext(p) - if strip_dir: - base = os.path.basename(base) - else: - _, base = os.path.splitdrive(base) - if base.startswith((os.path.sep, os.path.altsep)): - base = base[1:] - try: - # XXX: This may produce absurdly long paths. We should check - # the length of the result and trim base until we fit within - # 260 characters. - return os.path.join(output_dir, base + ext_map[ext]) - except LookupError: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError("Don't know how to compile {}".format(p)) - - return list(map(make_out_path, source_filenames)) - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - - add_cpp_opts = False - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - add_cpp_opts = True - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + [output_opt, input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src]) - base, _ = os.path.splitext(os.path.basename (src)) - rc_file = os.path.join(rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc, "/fo" + obj, rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile {} to {}" - .format(src, obj)) - - args = [self.cc] + compile_opts + pp_opts - if add_cpp_opts: - args.append('/EHsc') - args.append(input_opt) - args.append("/Fo" + obj) - args.extend(extra_postargs) - - try: - self.spawn(args) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args)) - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - libraries, library_dirs, runtime_library_dirs = fixed_args - - if runtime_library_dirs: - self.warn("I don't know what to do with 'runtime_library_dirs': " - + str(runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - ldflags = self._ldflags[target_desc, debug] - - export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])] - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - build_temp = os.path.dirname(objects[0]) - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - output_dir = os.path.dirname(os.path.abspath(output_filename)) - self.mkpath(output_dir) - try: - log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args)) - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def spawn(self, cmd): - old_path = os.getenv('path') - try: - os.environ['path'] = self._paths - return super().spawn(cmd) - finally: - os.environ['path'] = old_path - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC") - - def library_option(self, lib): - return self.library_filename(lib) - - def find_library_file(self, dirs, lib, debug=0): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.isfile(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None diff --git a/Lib/distutils/archive_util.py b/Lib/distutils/archive_util.py deleted file mode 100644 index 565a3117b4b5e1..00000000000000 --- a/Lib/distutils/archive_util.py +++ /dev/null @@ -1,256 +0,0 @@ -"""distutils.archive_util - -Utility functions for creating archive files (tarballs, zip files, -that sort of thing).""" - -import os -from warnings import warn -import sys - -try: - import zipfile -except ImportError: - zipfile = None - - -from distutils.errors import DistutilsExecError -from distutils.spawn import spawn -from distutils.dir_util import mkpath -from distutils import log - -try: - from pwd import getpwnam -except ImportError: - getpwnam = None - -try: - from grp import getgrnam -except ImportError: - getgrnam = None - -def _get_gid(name): - """Returns a gid, given a group name.""" - if getgrnam is None or name is None: - return None - try: - result = getgrnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def _get_uid(name): - """Returns an uid, given a user name.""" - if getpwnam is None or name is None: - return None - try: - result = getpwnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, - owner=None, group=None): - """Create a (possibly compressed) tar file from all the files under - 'base_dir'. - - 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or - None. ("compress" will be deprecated in Python 3.2) - - 'owner' and 'group' can be used to define an owner and a group for the - archive that is being built. If not provided, the current owner and group - will be used. - - The output tar file will be named 'base_dir' + ".tar", possibly plus - the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). - - Returns the output filename. - """ - tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '', - 'compress': ''} - compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', - 'compress': '.Z'} - - # flags for compression program, each element of list will be an argument - if compress is not None and compress not in compress_ext.keys(): - raise ValueError( - "bad value for 'compress': must be None, 'gzip', 'bzip2', " - "'xz' or 'compress'") - - archive_name = base_name + '.tar' - if compress != 'compress': - archive_name += compress_ext.get(compress, '') - - mkpath(os.path.dirname(archive_name), dry_run=dry_run) - - # creating the tarball - import tarfile # late import so Python build itself doesn't break - - log.info('Creating tar archive') - - uid = _get_uid(owner) - gid = _get_gid(group) - - def _set_uid_gid(tarinfo): - if gid is not None: - tarinfo.gid = gid - tarinfo.gname = group - if uid is not None: - tarinfo.uid = uid - tarinfo.uname = owner - return tarinfo - - if not dry_run: - tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) - try: - tar.add(base_dir, filter=_set_uid_gid) - finally: - tar.close() - - # compression using `compress` - if compress == 'compress': - warn("'compress' will be deprecated.", PendingDeprecationWarning) - # the option varies depending on the platform - compressed_name = archive_name + compress_ext[compress] - if sys.platform == 'win32': - cmd = [compress, archive_name, compressed_name] - else: - cmd = [compress, '-f', archive_name] - spawn(cmd, dry_run=dry_run) - return compressed_name - - return archive_name - -def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): - """Create a zip file from all the files under 'base_dir'. - - The output zip file will be named 'base_name' + ".zip". Uses either the - "zipfile" Python module (if available) or the InfoZIP "zip" utility - (if installed and found on the default search path). If neither tool is - available, raises DistutilsExecError. Returns the name of the output zip - file. - """ - zip_filename = base_name + ".zip" - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - - # If zipfile module is not available, try spawning an external - # 'zip' command. - if zipfile is None: - if verbose: - zipoptions = "-r" - else: - zipoptions = "-rq" - - try: - spawn(["zip", zipoptions, zip_filename, base_dir], - dry_run=dry_run) - except DistutilsExecError: - # XXX really should distinguish between "couldn't find - # external 'zip' command" and "zip failed". - raise DistutilsExecError(("unable to create zip file '%s': " - "could neither import the 'zipfile' module nor " - "find a standalone zip utility") % zip_filename) - - else: - log.info("creating '%s' and adding '%s' to it", - zip_filename, base_dir) - - if not dry_run: - try: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_DEFLATED) - except RuntimeError: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_STORED) - - with zip: - if base_dir != os.curdir: - path = os.path.normpath(os.path.join(base_dir, '')) - zip.write(path, path) - log.info("adding '%s'", path) - for dirpath, dirnames, filenames in os.walk(base_dir): - for name in dirnames: - path = os.path.normpath(os.path.join(dirpath, name, '')) - zip.write(path, path) - log.info("adding '%s'", path) - for name in filenames: - path = os.path.normpath(os.path.join(dirpath, name)) - if os.path.isfile(path): - zip.write(path, path) - log.info("adding '%s'", path) - - return zip_filename - -ARCHIVE_FORMATS = { - 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), - 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), - 'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"), - 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), - 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), - 'zip': (make_zipfile, [],"ZIP file") - } - -def check_archive_formats(formats): - """Returns the first format from the 'format' list that is unknown. - - If all formats are known, returns None - """ - for format in formats: - if format not in ARCHIVE_FORMATS: - return format - return None - -def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, - dry_run=0, owner=None, group=None): - """Create an archive file (eg. zip or tar). - - 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "gztar", - "bztar", "xztar", or "ztar". - - 'root_dir' is a directory that will be the root directory of the - archive; ie. we typically chdir into 'root_dir' before creating the - archive. 'base_dir' is the directory where we start archiving from; - ie. 'base_dir' will be the common prefix of all files and - directories in the archive. 'root_dir' and 'base_dir' both default - to the current directory. Returns the name of the archive file. - - 'owner' and 'group' are used when creating a tar archive. By default, - uses the current owner and group. - """ - save_cwd = os.getcwd() - if root_dir is not None: - log.debug("changing into '%s'", root_dir) - base_name = os.path.abspath(base_name) - if not dry_run: - os.chdir(root_dir) - - if base_dir is None: - base_dir = os.curdir - - kwargs = {'dry_run': dry_run} - - try: - format_info = ARCHIVE_FORMATS[format] - except KeyError: - raise ValueError("unknown archive format '%s'" % format) - - func = format_info[0] - for arg, val in format_info[1]: - kwargs[arg] = val - - if format != 'zip': - kwargs['owner'] = owner - kwargs['group'] = group - - try: - filename = func(base_name, base_dir, **kwargs) - finally: - if root_dir is not None: - log.debug("changing back to '%s'", save_cwd) - os.chdir(save_cwd) - - return filename diff --git a/Lib/distutils/bcppcompiler.py b/Lib/distutils/bcppcompiler.py deleted file mode 100644 index 071fea5d038cb0..00000000000000 --- a/Lib/distutils/bcppcompiler.py +++ /dev/null @@ -1,393 +0,0 @@ -"""distutils.bcppcompiler - -Contains BorlandCCompiler, an implementation of the abstract CCompiler class -for the Borland C++ compiler. -""" - -# This implementation by Lyle Johnson, based on the original msvccompiler.py -# module and using the directions originally published by Gordon Williams. - -# XXX looks like there's a LOT of overlap between these two classes: -# someone should sit down and factor out the common code as -# WindowsCCompiler! --GPW - - -import os -from distutils.errors import \ - DistutilsExecError, \ - CompileError, LibError, LinkError, UnknownFileError -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options -from distutils.file_util import write_file -from distutils.dep_util import newer -from distutils import log - -class BCPPCompiler(CCompiler) : - """Concrete class that implements an interface to the Borland C/C++ - compiler, as defined by the CCompiler abstract class. - """ - - compiler_type = 'bcpp' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = _c_extensions + _cpp_extensions - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - - def __init__ (self, - verbose=0, - dry_run=0, - force=0): - - CCompiler.__init__ (self, verbose, dry_run, force) - - # These executables are assumed to all be in the path. - # Borland doesn't seem to use any special registry settings to - # indicate their installation locations. - - self.cc = "bcc32.exe" - self.linker = "ilink32.exe" - self.lib = "tlib.exe" - - self.preprocess_options = None - self.compile_options = ['/tWM', '/O2', '/q', '/g0'] - self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0'] - - self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x'] - self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] - self.ldflags_static = [] - self.ldflags_exe = ['/Gn', '/q', '/x'] - self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r'] - - - # -- Worker methods ------------------------------------------------ - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - compile_opts = extra_preargs or [] - compile_opts.append ('-c') - if debug: - compile_opts.extend (self.compile_options_debug) - else: - compile_opts.extend (self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - # XXX why do the normpath here? - src = os.path.normpath(src) - obj = os.path.normpath(obj) - # XXX _setup_compile() did a mkpath() too but before the normpath. - # Is it possible to skip the normpath? - self.mkpath(os.path.dirname(obj)) - - if ext == '.res': - # This is already a binary file -- skip it. - continue # the 'for' loop - if ext == '.rc': - # This needs to be compiled to a .res file -- do it now. - try: - self.spawn (["brcc32", "-fo", obj, src]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue # the 'for' loop - - # The next two are both for the real compiler. - if ext in self._c_extensions: - input_opt = "" - elif ext in self._cpp_extensions: - input_opt = "-P" - else: - # Unknown file type -- no extra options. The compiler - # will probably fail, but let it just in case this is a - # file the compiler recognizes even if we don't. - input_opt = "" - - output_opt = "-o" + obj - - # Compiler command line syntax is: "bcc32 [options] file(s)". - # Note that the source file names must appear at the end of - # the command line. - try: - self.spawn ([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs + [src]) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - # compile () - - - def create_static_lib (self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - (objects, output_dir) = self._fix_object_args (objects, output_dir) - output_filename = \ - self.library_filename (output_libname, output_dir=output_dir) - - if self._need_link (objects, output_filename): - lib_args = [output_filename, '/u'] + objects - if debug: - pass # XXX what goes here? - try: - self.spawn ([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # create_static_lib () - - - def link (self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - # XXX this ignores 'build_temp'! should follow the lead of - # msvccompiler.py - - (objects, output_dir) = self._fix_object_args (objects, output_dir) - (libraries, library_dirs, runtime_library_dirs) = \ - self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) - - if runtime_library_dirs: - log.warn("I don't know what to do with 'runtime_library_dirs': %s", - str(runtime_library_dirs)) - - if output_dir is not None: - output_filename = os.path.join (output_dir, output_filename) - - if self._need_link (objects, output_filename): - - # Figure out linker args based on type of target. - if target_desc == CCompiler.EXECUTABLE: - startup_obj = 'c0w32' - if debug: - ld_args = self.ldflags_exe_debug[:] - else: - ld_args = self.ldflags_exe[:] - else: - startup_obj = 'c0d32' - if debug: - ld_args = self.ldflags_shared_debug[:] - else: - ld_args = self.ldflags_shared[:] - - - # Create a temporary exports file for use by the linker - if export_symbols is None: - def_file = '' - else: - head, tail = os.path.split (output_filename) - modname, ext = os.path.splitext (tail) - temp_dir = os.path.dirname(objects[0]) # preserve tree structure - def_file = os.path.join (temp_dir, '%s.def' % modname) - contents = ['EXPORTS'] - for sym in (export_symbols or []): - contents.append(' %s=_%s' % (sym, sym)) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) - - # Borland C++ has problems with '/' in paths - objects2 = map(os.path.normpath, objects) - # split objects in .obj and .res files - # Borland C++ needs them at different positions in the command line - objects = [startup_obj] - resources = [] - for file in objects2: - (base, ext) = os.path.splitext(os.path.normcase(file)) - if ext == '.res': - resources.append(file) - else: - objects.append(file) - - - for l in library_dirs: - ld_args.append("/L%s" % os.path.normpath(l)) - ld_args.append("/L.") # we sometimes use relative paths - - # list of object files - ld_args.extend(objects) - - # XXX the command-line syntax for Borland C++ is a bit wonky; - # certain filenames are jammed together in one big string, but - # comma-delimited. This doesn't mesh too well with the - # Unix-centric attitude (with a DOS/Windows quoting hack) of - # 'spawn()', so constructing the argument list is a bit - # awkward. Note that doing the obvious thing and jamming all - # the filenames and commas into one argument would be wrong, - # because 'spawn()' would quote any filenames with spaces in - # them. Arghghh!. Apparently it works fine as coded... - - # name of dll/exe file - ld_args.extend([',',output_filename]) - # no map file and start libraries - ld_args.append(',,') - - for lib in libraries: - # see if we find it and if there is a bcpp specific lib - # (xxx_bcpp.lib) - libfile = self.find_library_file(library_dirs, lib, debug) - if libfile is None: - ld_args.append(lib) - # probably a BCPP internal library -- don't warn - else: - # full name which prefers bcpp_xxx.lib over xxx.lib - ld_args.append(libfile) - - # some default libraries - ld_args.append ('import32') - ld_args.append ('cw32mt') - - # def file for export symbols - ld_args.extend([',',def_file]) - # add resource files - ld_args.append(',') - ld_args.extend(resources) - - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath (os.path.dirname (output_filename)) - try: - self.spawn ([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # link () - - # -- Miscellaneous methods ----------------------------------------- - - - def find_library_file (self, dirs, lib, debug=0): - # List of effective library names to try, in order of preference: - # xxx_bcpp.lib is better than xxx.lib - # and xxx_d.lib is better than xxx.lib if debug is set - # - # The "_bcpp" suffix is to handle a Python installation for people - # with multiple compilers (primarily Distutils hackers, I suspect - # ;-). The idea is they'd have one static library for each - # compiler they care about, since (almost?) every Windows compiler - # seems to have a different format for static libraries. - if debug: - dlib = (lib + "_d") - try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) - else: - try_names = (lib + "_bcpp", lib) - - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # overwrite the one from CCompiler to support rc and res-files - def object_filenames (self, - source_filenames, - strip_dir=0, - output_dir=''): - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - (base, ext) = os.path.splitext (os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) - if strip_dir: - base = os.path.basename (base) - if ext == '.res': - # these can go unchanged - obj_names.append (os.path.join (output_dir, base + ext)) - elif ext == '.rc': - # these need to be compiled to .res-files - obj_names.append (os.path.join (output_dir, base + '.res')) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - # object_filenames () - - def preprocess (self, - source, - output_file=None, - macros=None, - include_dirs=None, - extra_preargs=None, - extra_postargs=None): - - (_, macros, include_dirs) = \ - self._fix_compile_args(None, macros, include_dirs) - pp_opts = gen_preprocess_options(macros, include_dirs) - pp_args = ['cpp32.exe'] + pp_opts - if output_file is not None: - pp_args.append('-o' + output_file) - if extra_preargs: - pp_args[:0] = extra_preargs - if extra_postargs: - pp_args.extend(extra_postargs) - pp_args.append(source) - - # We need to preprocess: either we're being forced to, or the - # source file is newer than the target (or the target doesn't - # exist). - if self.force or output_file is None or newer(source, output_file): - if output_file: - self.mkpath(os.path.dirname(output_file)) - try: - self.spawn(pp_args) - except DistutilsExecError as msg: - print(msg) - raise CompileError(msg) - - # preprocess() diff --git a/Lib/distutils/ccompiler.py b/Lib/distutils/ccompiler.py deleted file mode 100644 index 4c47f2ed245d4f..00000000000000 --- a/Lib/distutils/ccompiler.py +++ /dev/null @@ -1,1116 +0,0 @@ -"""distutils.ccompiler - -Contains CCompiler, an abstract base class that defines the interface -for the Distutils compiler abstraction model.""" - -import sys, os, re -from distutils.errors import * -from distutils.spawn import spawn -from distutils.file_util import move_file -from distutils.dir_util import mkpath -from distutils.dep_util import newer_group -from distutils.util import split_quoted, execute -from distutils import log - -class CCompiler: - """Abstract base class to define the interface that must be implemented - by real compiler classes. Also has some utility methods used by - several compiler classes. - - The basic idea behind a compiler abstraction class is that each - instance can be used for all the compile/link steps in building a - single project. Thus, attributes common to all of those compile and - link steps -- include directories, macros to define, libraries to link - against, etc. -- are attributes of the compiler instance. To allow for - variability in how individual files are treated, most of those - attributes may be varied on a per-compilation or per-link basis. - """ - - # 'compiler_type' is a class attribute that identifies this class. It - # keeps code that wants to know what kind of compiler it's dealing with - # from having to import all possible compiler classes just to do an - # 'isinstance'. In concrete CCompiler subclasses, 'compiler_type' - # should really, really be one of the keys of the 'compiler_class' - # dictionary (see below -- used by the 'new_compiler()' factory - # function) -- authors of new compiler interface classes are - # responsible for updating 'compiler_class'! - compiler_type = None - - # XXX things not handled by this compiler abstraction model: - # * client can't provide additional options for a compiler, - # e.g. warning, optimization, debugging flags. Perhaps this - # should be the domain of concrete compiler abstraction classes - # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base - # class should have methods for the common ones. - # * can't completely override the include or library searchg - # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2". - # I'm not sure how widely supported this is even by Unix - # compilers, much less on other platforms. And I'm even less - # sure how useful it is; maybe for cross-compiling, but - # support for that is a ways off. (And anyways, cross - # compilers probably have a dedicated binary with the - # right paths compiled in. I hope.) - # * can't do really freaky things with the library list/library - # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against - # different versions of libfoo.a in different locations. I - # think this is useless without the ability to null out the - # library search path anyways. - - - # Subclasses that rely on the standard filename generation methods - # implemented below should override these; see the comment near - # those methods ('object_filenames()' et. al.) for details: - src_extensions = None # list of strings - obj_extension = None # string - static_lib_extension = None - shared_lib_extension = None # string - static_lib_format = None # format string - shared_lib_format = None # prob. same as static_lib_format - exe_extension = None # string - - # Default language settings. language_map is used to detect a source - # file or Extension target language, checking source filenames. - # language_order is used to detect the language precedence, when deciding - # what language to use when mixing source types. For example, if some - # extension has two files with ".c" extension, and one with ".cpp", it - # is still linked as c++. - language_map = {".c" : "c", - ".cc" : "c++", - ".cpp" : "c++", - ".cxx" : "c++", - ".m" : "objc", - } - language_order = ["c++", "objc", "c"] - - def __init__(self, verbose=0, dry_run=0, force=0): - self.dry_run = dry_run - self.force = force - self.verbose = verbose - - # 'output_dir': a common output directory for object, library, - # shared object, and shared library files - self.output_dir = None - - # 'macros': a list of macro definitions (or undefinitions). A - # macro definition is a 2-tuple (name, value), where the value is - # either a string or None (no explicit value). A macro - # undefinition is a 1-tuple (name,). - self.macros = [] - - # 'include_dirs': a list of directories to search for include files - self.include_dirs = [] - - # 'libraries': a list of libraries to include in any link - # (library names, not filenames: eg. "foo" not "libfoo.a") - self.libraries = [] - - # 'library_dirs': a list of directories to search for libraries - self.library_dirs = [] - - # 'runtime_library_dirs': a list of directories to search for - # shared libraries/objects at runtime - self.runtime_library_dirs = [] - - # 'objects': a list of object files (or similar, such as explicitly - # named library files) to include on any link - self.objects = [] - - for key in self.executables.keys(): - self.set_executable(key, self.executables[key]) - - def set_executables(self, **kwargs): - """Define the executables (and options for them) that will be run - to perform the various stages of compilation. The exact set of - executables that may be specified here depends on the compiler - class (via the 'executables' class attribute), but most will have: - compiler the C/C++ compiler - linker_so linker used to create shared objects and libraries - linker_exe linker used to create binary executables - archiver static library creator - - On platforms with a command-line (Unix, DOS/Windows), each of these - is a string that will be split into executable name and (optional) - list of arguments. (Splitting the string is done similarly to how - Unix shells operate: words are delimited by spaces, but quotes and - backslashes can override this. See - 'distutils.util.split_quoted()'.) - """ - - # Note that some CCompiler implementation classes will define class - # attributes 'cpp', 'cc', etc. with hard-coded executable names; - # this is appropriate when a compiler class is for exactly one - # compiler/OS combination (eg. MSVCCompiler). Other compiler - # classes (UnixCCompiler, in particular) are driven by information - # discovered at run-time, since there are many different ways to do - # basically the same things with Unix C compilers. - - for key in kwargs: - if key not in self.executables: - raise ValueError("unknown executable '%s' for class %s" % - (key, self.__class__.__name__)) - self.set_executable(key, kwargs[key]) - - def set_executable(self, key, value): - if isinstance(value, str): - setattr(self, key, split_quoted(value)) - else: - setattr(self, key, value) - - def _find_macro(self, name): - i = 0 - for defn in self.macros: - if defn[0] == name: - return i - i += 1 - return None - - def _check_macro_definitions(self, definitions): - """Ensures that every element of 'definitions' is a valid macro - definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do - nothing if all definitions are OK, raise TypeError otherwise. - """ - for defn in definitions: - if not (isinstance(defn, tuple) and - (len(defn) in (1, 2) and - (isinstance (defn[1], str) or defn[1] is None)) and - isinstance (defn[0], str)): - raise TypeError(("invalid macro definition '%s': " % defn) + \ - "must be tuple (string,), (string, string), or " + \ - "(string, None)") - - - # -- Bookkeeping methods ------------------------------------------- - - def define_macro(self, name, value=None): - """Define a preprocessor macro for all compilations driven by this - compiler object. The optional parameter 'value' should be a - string; if it is not supplied, then the macro will be defined - without an explicit value and the exact outcome depends on the - compiler used (XXX true? does ANSI say anything about this?) - """ - # Delete from the list of macro definitions/undefinitions if - # already there (so that this one will take precedence). - i = self._find_macro (name) - if i is not None: - del self.macros[i] - - self.macros.append((name, value)) - - def undefine_macro(self, name): - """Undefine a preprocessor macro for all compilations driven by - this compiler object. If the same macro is defined by - 'define_macro()' and undefined by 'undefine_macro()' the last call - takes precedence (including multiple redefinitions or - undefinitions). If the macro is redefined/undefined on a - per-compilation basis (ie. in the call to 'compile()'), then that - takes precedence. - """ - # Delete from the list of macro definitions/undefinitions if - # already there (so that this one will take precedence). - i = self._find_macro (name) - if i is not None: - del self.macros[i] - - undefn = (name,) - self.macros.append(undefn) - - def add_include_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - header files. The compiler is instructed to search directories in - the order in which they are supplied by successive calls to - 'add_include_dir()'. - """ - self.include_dirs.append(dir) - - def set_include_dirs(self, dirs): - """Set the list of directories that will be searched to 'dirs' (a - list of strings). Overrides any preceding calls to - 'add_include_dir()'; subsequence calls to 'add_include_dir()' add - to the list passed to 'set_include_dirs()'. This does not affect - any list of standard include directories that the compiler may - search by default. - """ - self.include_dirs = dirs[:] - - def add_library(self, libname): - """Add 'libname' to the list of libraries that will be included in - all links driven by this compiler object. Note that 'libname' - should *not* be the name of a file containing a library, but the - name of the library itself: the actual filename will be inferred by - the linker, the compiler, or the compiler class (depending on the - platform). - - The linker will be instructed to link against libraries in the - order they were supplied to 'add_library()' and/or - 'set_libraries()'. It is perfectly valid to duplicate library - names; the linker will be instructed to link against libraries as - many times as they are mentioned. - """ - self.libraries.append(libname) - - def set_libraries(self, libnames): - """Set the list of libraries to be included in all links driven by - this compiler object to 'libnames' (a list of strings). This does - not affect any standard system libraries that the linker may - include by default. - """ - self.libraries = libnames[:] - - def add_library_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - libraries specified to 'add_library()' and 'set_libraries()'. The - linker will be instructed to search for libraries in the order they - are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. - """ - self.library_dirs.append(dir) - - def set_library_dirs(self, dirs): - """Set the list of library search directories to 'dirs' (a list of - strings). This does not affect any standard library search path - that the linker may search by default. - """ - self.library_dirs = dirs[:] - - def add_runtime_library_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - shared libraries at runtime. - """ - self.runtime_library_dirs.append(dir) - - def set_runtime_library_dirs(self, dirs): - """Set the list of directories to search for shared libraries at - runtime to 'dirs' (a list of strings). This does not affect any - standard search path that the runtime linker may search by - default. - """ - self.runtime_library_dirs = dirs[:] - - def add_link_object(self, object): - """Add 'object' to the list of object files (or analogues, such as - explicitly named library files or the output of "resource - compilers") to be included in every link driven by this compiler - object. - """ - self.objects.append(object) - - def set_link_objects(self, objects): - """Set the list of object files (or analogues) to be included in - every link to 'objects'. This does not affect any standard object - files that the linker may include by default (such as system - libraries). - """ - self.objects = objects[:] - - - # -- Private utility methods -------------------------------------- - # (here for the convenience of subclasses) - - # Helper method to prep compiler in subclass compile() methods - - def _setup_compile(self, outdir, macros, incdirs, sources, depends, - extra): - """Process arguments and decide which source files to compile.""" - if outdir is None: - outdir = self.output_dir - elif not isinstance(outdir, str): - raise TypeError("'output_dir' must be a string or None") - - if macros is None: - macros = self.macros - elif isinstance(macros, list): - macros = macros + (self.macros or []) - else: - raise TypeError("'macros' (if supplied) must be a list of tuples") - - if incdirs is None: - incdirs = self.include_dirs - elif isinstance(incdirs, (list, tuple)): - incdirs = list(incdirs) + (self.include_dirs or []) - else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") - - if extra is None: - extra = [] - - # Get the list of expected output (object) files - objects = self.object_filenames(sources, strip_dir=0, - output_dir=outdir) - assert len(objects) == len(sources) - - pp_opts = gen_preprocess_options(macros, incdirs) - - build = {} - for i in range(len(sources)): - src = sources[i] - obj = objects[i] - ext = os.path.splitext(src)[1] - self.mkpath(os.path.dirname(obj)) - build[obj] = (src, ext) - - return macros, objects, extra, pp_opts, build - - def _get_cc_args(self, pp_opts, debug, before): - # works for unixccompiler, cygwinccompiler - cc_args = pp_opts + ['-c'] - if debug: - cc_args[:0] = ['-g'] - if before: - cc_args[:0] = before - return cc_args - - def _fix_compile_args(self, output_dir, macros, include_dirs): - """Typecheck and fix-up some of the arguments to the 'compile()' - method, and return fixed-up values. Specifically: if 'output_dir' - is None, replaces it with 'self.output_dir'; ensures that 'macros' - is a list, and augments it with 'self.macros'; ensures that - 'include_dirs' is a list, and augments it with 'self.include_dirs'. - Guarantees that the returned values are of the correct type, - i.e. for 'output_dir' either string or None, and for 'macros' and - 'include_dirs' either list or None. - """ - if output_dir is None: - output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError("'output_dir' must be a string or None") - - if macros is None: - macros = self.macros - elif isinstance(macros, list): - macros = macros + (self.macros or []) - else: - raise TypeError("'macros' (if supplied) must be a list of tuples") - - if include_dirs is None: - include_dirs = self.include_dirs - elif isinstance(include_dirs, (list, tuple)): - include_dirs = list(include_dirs) + (self.include_dirs or []) - else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") - - return output_dir, macros, include_dirs - - def _prep_compile(self, sources, output_dir, depends=None): - """Decide which source files must be recompiled. - - Determine the list of object files corresponding to 'sources', - and figure out which ones really need to be recompiled. - Return a list of all object files and a dictionary telling - which source files can be skipped. - """ - # Get the list of expected output (object) files - objects = self.object_filenames(sources, output_dir=output_dir) - assert len(objects) == len(sources) - - # Return an empty dict for the "which source files can be skipped" - # return value to preserve API compatibility. - return objects, {} - - def _fix_object_args(self, objects, output_dir): - """Typecheck and fix up some arguments supplied to various methods. - Specifically: ensure that 'objects' is a list; if output_dir is - None, replace with self.output_dir. Return fixed versions of - 'objects' and 'output_dir'. - """ - if not isinstance(objects, (list, tuple)): - raise TypeError("'objects' must be a list or tuple of strings") - objects = list(objects) - - if output_dir is None: - output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError("'output_dir' must be a string or None") - - return (objects, output_dir) - - def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): - """Typecheck and fix up some of the arguments supplied to the - 'link_*' methods. Specifically: ensure that all arguments are - lists, and augment them with their permanent versions - (eg. 'self.libraries' augments 'libraries'). Return a tuple with - fixed versions of all arguments. - """ - if libraries is None: - libraries = self.libraries - elif isinstance(libraries, (list, tuple)): - libraries = list (libraries) + (self.libraries or []) - else: - raise TypeError( - "'libraries' (if supplied) must be a list of strings") - - if library_dirs is None: - library_dirs = self.library_dirs - elif isinstance(library_dirs, (list, tuple)): - library_dirs = list (library_dirs) + (self.library_dirs or []) - else: - raise TypeError( - "'library_dirs' (if supplied) must be a list of strings") - - if runtime_library_dirs is None: - runtime_library_dirs = self.runtime_library_dirs - elif isinstance(runtime_library_dirs, (list, tuple)): - runtime_library_dirs = (list(runtime_library_dirs) + - (self.runtime_library_dirs or [])) - else: - raise TypeError("'runtime_library_dirs' (if supplied) " - "must be a list of strings") - - return (libraries, library_dirs, runtime_library_dirs) - - def _need_link(self, objects, output_file): - """Return true if we need to relink the files listed in 'objects' - to recreate 'output_file'. - """ - if self.force: - return True - else: - if self.dry_run: - newer = newer_group (objects, output_file, missing='newer') - else: - newer = newer_group (objects, output_file) - return newer - - def detect_language(self, sources): - """Detect the language of a given file, or list of files. Uses - language_map, and language_order to do the job. - """ - if not isinstance(sources, list): - sources = [sources] - lang = None - index = len(self.language_order) - for source in sources: - base, ext = os.path.splitext(source) - extlang = self.language_map.get(ext) - try: - extindex = self.language_order.index(extlang) - if extindex < index: - lang = extlang - index = extindex - except ValueError: - pass - return lang - - - # -- Worker methods ------------------------------------------------ - # (must be implemented by subclasses) - - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): - """Preprocess a single C/C++ source file, named in 'source'. - Output will be written to file named 'output_file', or stdout if - 'output_file' not supplied. 'macros' is a list of macro - definitions as for 'compile()', which will augment the macros set - with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a - list of directory names that will be added to the default list. - - Raises PreprocessError on failure. - """ - pass - - def compile(self, sources, output_dir=None, macros=None, - include_dirs=None, debug=0, extra_preargs=None, - extra_postargs=None, depends=None): - """Compile one or more source files. - - 'sources' must be a list of filenames, most likely C/C++ - files, but in reality anything that can be handled by a - particular compiler and compiler class (eg. MSVCCompiler can - handle resource files in 'sources'). Return a list of object - filenames, one per source filename in 'sources'. Depending on - the implementation, not all source files will necessarily be - compiled, but all corresponding object filenames will be - returned. - - If 'output_dir' is given, object files will be put under it, while - retaining their original path component. That is, "foo/bar.c" - normally compiles to "foo/bar.o" (for a Unix implementation); if - 'output_dir' is "build", then it would compile to - "build/foo/bar.o". - - 'macros', if given, must be a list of macro definitions. A macro - definition is either a (name, value) 2-tuple or a (name,) 1-tuple. - The former defines a macro; if the value is None, the macro is - defined without an explicit value. The 1-tuple case undefines a - macro. Later definitions/redefinitions/ undefinitions take - precedence. - - 'include_dirs', if given, must be a list of strings, the - directories to add to the default include file search path for this - compilation only. - - 'debug' is a boolean; if true, the compiler will be instructed to - output debug symbols in (or alongside) the object file(s). - - 'extra_preargs' and 'extra_postargs' are implementation- dependent. - On platforms that have the notion of a command-line (e.g. Unix, - DOS/Windows), they are most likely lists of strings: extra - command-line arguments to prepend/append to the compiler command - line. On other platforms, consult the implementation class - documentation. In any event, they are intended as an escape hatch - for those occasions when the abstract compiler framework doesn't - cut the mustard. - - 'depends', if given, is a list of filenames that all targets - depend on. If a source file is older than any file in - depends, then the source file will be recompiled. This - supports dependency tracking, but only at a coarse - granularity. - - Raises CompileError on failure. - """ - # A concrete compiler class can either override this method - # entirely or implement _compile(). - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) - - # Return *all* object filenames, not just the ones we just built. - return objects - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - """Compile 'src' to product 'obj'.""" - # A concrete compiler class that does not override compile() - # should implement _compile(). - pass - - def create_static_lib(self, objects, output_libname, output_dir=None, - debug=0, target_lang=None): - """Link a bunch of stuff together to create a static library file. - The "bunch of stuff" consists of the list of object files supplied - as 'objects', the extra object files supplied to - 'add_link_object()' and/or 'set_link_objects()', the libraries - supplied to 'add_library()' and/or 'set_libraries()', and the - libraries supplied as 'libraries' (if any). - - 'output_libname' should be a library name, not a filename; the - filename will be inferred from the library name. 'output_dir' is - the directory where the library file will be put. - - 'debug' is a boolean; if true, debugging information will be - included in the library (note that on most platforms, it is the - compile step where this matters: the 'debug' flag is included here - just for consistency). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LibError on failure. - """ - pass - - - # values for target_desc parameter in link() - SHARED_OBJECT = "shared_object" - SHARED_LIBRARY = "shared_library" - EXECUTABLE = "executable" - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - """Link a bunch of stuff together to create an executable or - shared library file. - - The "bunch of stuff" consists of the list of object files supplied - as 'objects'. 'output_filename' should be a filename. If - 'output_dir' is supplied, 'output_filename' is relative to it - (i.e. 'output_filename' can provide directory components if - needed). - - 'libraries' is a list of libraries to link against. These are - library names, not filenames, since they're translated into - filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" - on Unix and "foo.lib" on DOS/Windows). However, they can include a - directory component, which means the linker will look in that - specific directory rather than searching all the normal locations. - - 'library_dirs', if supplied, should be a list of directories to - search for libraries that were specified as bare library names - (ie. no directory component). These are on top of the system - default and those supplied to 'add_library_dir()' and/or - 'set_library_dirs()'. 'runtime_library_dirs' is a list of - directories that will be embedded into the shared library and used - to search for other shared libraries that *it* depends on at - run-time. (This may only be relevant on Unix.) - - 'export_symbols' is a list of symbols that the shared library will - export. (This appears to be relevant only on Windows.) - - 'debug' is as for 'compile()' and 'create_static_lib()', with the - slight distinction that it actually matters on most platforms (as - opposed to 'create_static_lib()', which includes a 'debug' flag - mostly for form's sake). - - 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except - of course that they supply command-line arguments for the - particular linker being used). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LinkError on failure. - """ - raise NotImplementedError - - - # Old 'link_*()' methods, rewritten to use the new 'link()' method. - - def link_shared_lib(self, - objects, - output_libname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_LIBRARY, objects, - self.library_filename(output_libname, lib_type='shared'), - output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_shared_object(self, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_OBJECT, objects, - output_filename, output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_executable(self, - objects, - output_progname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - target_lang=None): - self.link(CCompiler.EXECUTABLE, objects, - self.executable_filename(output_progname), output_dir, - libraries, library_dirs, runtime_library_dirs, None, - debug, extra_preargs, extra_postargs, None, target_lang) - - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function; there is - # no appropriate default implementation so subclasses should - # implement all of these. - - def library_dir_option(self, dir): - """Return the compiler option to add 'dir' to the list of - directories searched for libraries. - """ - raise NotImplementedError - - def runtime_library_dir_option(self, dir): - """Return the compiler option to add 'dir' to the list of - directories searched for runtime libraries. - """ - raise NotImplementedError - - def library_option(self, lib): - """Return the compiler option to add 'lib' to the list of libraries - linked into the shared library or executable. - """ - raise NotImplementedError - - def has_function(self, funcname, includes=None, include_dirs=None, - libraries=None, library_dirs=None): - """Return a boolean indicating whether funcname is supported on - the current platform. The optional arguments can be used to - augment the compilation environment. - """ - # this can't be included at module scope because it tries to - # import math which might not be available at that point - maybe - # the necessary logic should just be inlined? - import tempfile - if includes is None: - includes = [] - if include_dirs is None: - include_dirs = [] - if libraries is None: - libraries = [] - if library_dirs is None: - library_dirs = [] - fd, fname = tempfile.mkstemp(".c", funcname, text=True) - f = os.fdopen(fd, "w") - try: - for incl in includes: - f.write("""#include "%s"\n""" % incl) - f.write("""\ -int main (int argc, char **argv) { - %s(); - return 0; -} -""" % funcname) - finally: - f.close() - try: - objects = self.compile([fname], include_dirs=include_dirs) - except CompileError: - return False - - try: - self.link_executable(objects, "a.out", - libraries=libraries, - library_dirs=library_dirs) - except (LinkError, TypeError): - return False - return True - - def find_library_file (self, dirs, lib, debug=0): - """Search the specified list of directories for a static or shared - library file 'lib' and return the full path to that file. If - 'debug' true, look for a debugging version (if that makes sense on - the current platform). Return None if 'lib' wasn't found in any of - the specified directories. - """ - raise NotImplementedError - - # -- Filename generation methods ----------------------------------- - - # The default implementation of the filename generating methods are - # prejudiced towards the Unix/DOS/Windows view of the world: - # * object files are named by replacing the source file extension - # (eg. .c/.cpp -> .o/.obj) - # * library files (shared or static) are named by plugging the - # library name and extension into a format string, eg. - # "lib%s.%s" % (lib_name, ".a") for Unix static libraries - # * executables are named by appending an extension (possibly - # empty) to the program name: eg. progname + ".exe" for - # Windows - # - # To reduce redundant code, these methods expect to find - # several attributes in the current object (presumably defined - # as class attributes): - # * src_extensions - - # list of C/C++ source file extensions, eg. ['.c', '.cpp'] - # * obj_extension - - # object file extension, eg. '.o' or '.obj' - # * static_lib_extension - - # extension for static library files, eg. '.a' or '.lib' - # * shared_lib_extension - - # extension for shared library/object files, eg. '.so', '.dll' - # * static_lib_format - - # format string for generating static library filenames, - # eg. 'lib%s.%s' or '%s.%s' - # * shared_lib_format - # format string for generating shared library filenames - # (probably same as static_lib_format, since the extension - # is one of the intended parameters to the format string) - # * exe_extension - - # extension for executable files, eg. '' or '.exe' - - def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - base, ext = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - raise UnknownFileError( - "unknown file type '%s' (from '%s')" % (ext, src_name)) - if strip_dir: - base = os.path.basename(base) - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - - def shared_object_filename(self, basename, strip_dir=0, output_dir=''): - assert output_dir is not None - if strip_dir: - basename = os.path.basename(basename) - return os.path.join(output_dir, basename + self.shared_lib_extension) - - def executable_filename(self, basename, strip_dir=0, output_dir=''): - assert output_dir is not None - if strip_dir: - basename = os.path.basename(basename) - return os.path.join(output_dir, basename + (self.exe_extension or '')) - - def library_filename(self, libname, lib_type='static', # or 'shared' - strip_dir=0, output_dir=''): - assert output_dir is not None - if lib_type not in ("static", "shared", "dylib", "xcode_stub"): - raise ValueError( - "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"") - fmt = getattr(self, lib_type + "_lib_format") - ext = getattr(self, lib_type + "_lib_extension") - - dir, base = os.path.split(libname) - filename = fmt % (base, ext) - if strip_dir: - dir = '' - - return os.path.join(output_dir, dir, filename) - - - # -- Utility methods ----------------------------------------------- - - def announce(self, msg, level=1): - log.debug(msg) - - def debug_print(self, msg): - from distutils.debug import DEBUG - if DEBUG: - print(msg) - - def warn(self, msg): - sys.stderr.write("warning: %s\n" % msg) - - def execute(self, func, args, msg=None, level=1): - execute(func, args, msg, self.dry_run) - - def spawn(self, cmd): - spawn(cmd, dry_run=self.dry_run) - - def move_file(self, src, dst): - return move_file(src, dst, dry_run=self.dry_run) - - def mkpath (self, name, mode=0o777): - mkpath(name, mode, dry_run=self.dry_run) - - -# Map a sys.platform/os.name ('posix', 'nt') to the default compiler -# type for that platform. Keys are interpreted as re match -# patterns. Order is important; platform mappings are preferred over -# OS names. -_default_compilers = ( - - # Platform string mappings - - # on a cygwin built python we can use gcc like an ordinary UNIXish - # compiler - ('cygwin.*', 'unix'), - - # OS name mappings - ('posix', 'unix'), - ('nt', 'msvc'), - - ) - -def get_default_compiler(osname=None, platform=None): - """Determine the default compiler to use for the given platform. - - osname should be one of the standard Python OS names (i.e. the - ones returned by os.name) and platform the common value - returned by sys.platform for the platform in question. - - The default values are os.name and sys.platform in case the - parameters are not given. - """ - if osname is None: - osname = os.name - if platform is None: - platform = sys.platform - for pattern, compiler in _default_compilers: - if re.match(pattern, platform) is not None or \ - re.match(pattern, osname) is not None: - return compiler - # Default to Unix compiler - return 'unix' - -# Map compiler types to (module_name, class_name) pairs -- ie. where to -# find the code that implements an interface to this compiler. (The module -# is assumed to be in the 'distutils' package.) -compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler', - "standard UNIX-style compiler"), - 'msvc': ('_msvccompiler', 'MSVCCompiler', - "Microsoft Visual C++"), - 'cygwin': ('cygwinccompiler', 'CygwinCCompiler', - "Cygwin port of GNU C Compiler for Win32"), - 'mingw32': ('cygwinccompiler', 'Mingw32CCompiler', - "Mingw32 port of GNU C Compiler for Win32"), - 'bcpp': ('bcppcompiler', 'BCPPCompiler', - "Borland C++ Compiler"), - } - -def show_compilers(): - """Print list of available compilers (used by the "--help-compiler" - options to "build", "build_ext", "build_clib"). - """ - # XXX this "knows" that the compiler option it's describing is - # "--compiler", which just happens to be the case for the three - # commands that use it. - from distutils.fancy_getopt import FancyGetopt - compilers = [] - for compiler in compiler_class.keys(): - compilers.append(("compiler="+compiler, None, - compiler_class[compiler][2])) - compilers.sort() - pretty_printer = FancyGetopt(compilers) - pretty_printer.print_help("List of available compilers:") - - -def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0): - """Generate an instance of some CCompiler subclass for the supplied - platform/compiler combination. 'plat' defaults to 'os.name' - (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler - for that platform. Currently only 'posix' and 'nt' are supported, and - the default compilers are "traditional Unix interface" (UnixCCompiler - class) and Visual C++ (MSVCCompiler class). Note that it's perfectly - possible to ask for a Unix compiler object under Windows, and a - Microsoft compiler object under Unix -- if you supply a value for - 'compiler', 'plat' is ignored. - """ - if plat is None: - plat = os.name - - try: - if compiler is None: - compiler = get_default_compiler(plat) - - (module_name, class_name, long_description) = compiler_class[compiler] - except KeyError: - msg = "don't know how to compile C/C++ code on platform '%s'" % plat - if compiler is not None: - msg = msg + " with '%s' compiler" % compiler - raise DistutilsPlatformError(msg) - - try: - module_name = "distutils." + module_name - __import__ (module_name) - module = sys.modules[module_name] - klass = vars(module)[class_name] - except ImportError: - raise DistutilsModuleError( - "can't compile C/C++ code: unable to load module '%s'" % \ - module_name) - except KeyError: - raise DistutilsModuleError( - "can't compile C/C++ code: unable to find class '%s' " - "in module '%s'" % (class_name, module_name)) - - # XXX The None is necessary to preserve backwards compatibility - # with classes that expect verbose to be the first positional - # argument. - return klass(None, dry_run, force) - - -def gen_preprocess_options(macros, include_dirs): - """Generate C pre-processor options (-D, -U, -I) as used by at least - two types of compilers: the typical Unix compiler and Visual C++. - 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) - means undefine (-U) macro 'name', and (name,value) means define (-D) - macro 'name' to 'value'. 'include_dirs' is just a list of directory - names to be added to the header file search path (-I). Returns a list - of command-line options suitable for either Unix compilers or Visual - C++. - """ - # XXX it would be nice (mainly aesthetic, and so we don't generate - # stupid-looking command lines) to go over 'macros' and eliminate - # redundant definitions/undefinitions (ie. ensure that only the - # latest mention of a particular macro winds up on the command - # line). I don't think it's essential, though, since most (all?) - # Unix C compilers only pay attention to the latest -D or -U - # mention of a macro on their command line. Similar situation for - # 'include_dirs'. I'm punting on both for now. Anyways, weeding out - # redundancies like this should probably be the province of - # CCompiler, since the data structures used are inherited from it - # and therefore common to all CCompiler classes. - pp_opts = [] - for macro in macros: - if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2): - raise TypeError( - "bad macro definition '%s': " - "each element of 'macros' list must be a 1- or 2-tuple" - % macro) - - if len(macro) == 1: # undefine this macro - pp_opts.append("-U%s" % macro[0]) - elif len(macro) == 2: - if macro[1] is None: # define with no explicit value - pp_opts.append("-D%s" % macro[0]) - else: - # XXX *don't* need to be clever about quoting the - # macro value here, because we're going to avoid the - # shell at all costs when we spawn the command! - pp_opts.append("-D%s=%s" % macro) - - for dir in include_dirs: - pp_opts.append("-I%s" % dir) - return pp_opts - - -def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): - """Generate linker options for searching library directories and - linking with specific libraries. 'libraries' and 'library_dirs' are, - respectively, lists of library names (not filenames!) and search - directories. Returns a list of command-line options suitable for use - with some compiler (depending on the two format strings passed in). - """ - lib_opts = [] - - for dir in library_dirs: - lib_opts.append(compiler.library_dir_option(dir)) - - for dir in runtime_library_dirs: - opt = compiler.runtime_library_dir_option(dir) - if isinstance(opt, list): - lib_opts = lib_opts + opt - else: - lib_opts.append(opt) - - # XXX it's important that we *not* remove redundant library mentions! - # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to - # resolve all symbols. I just hope we never have to say "-lfoo obj.o - # -lbar" to get things to work -- that's certainly a possibility, but a - # pretty nasty way to arrange your C code. - - for lib in libraries: - (lib_dir, lib_name) = os.path.split(lib) - if lib_dir: - lib_file = compiler.find_library_file([lib_dir], lib_name) - if lib_file: - lib_opts.append(lib_file) - else: - compiler.warn("no library file corresponding to " - "'%s' found (skipping)" % lib) - else: - lib_opts.append(compiler.library_option (lib)) - return lib_opts diff --git a/Lib/distutils/cmd.py b/Lib/distutils/cmd.py deleted file mode 100644 index dba3191e58474c..00000000000000 --- a/Lib/distutils/cmd.py +++ /dev/null @@ -1,403 +0,0 @@ -"""distutils.cmd - -Provides the Command class, the base class for the command classes -in the distutils.command package. -""" - -import sys, os, re -from distutils.errors import DistutilsOptionError -from distutils import util, dir_util, file_util, archive_util, dep_util -from distutils import log - -class Command: - """Abstract base class for defining command classes, the "worker bees" - of the Distutils. A useful analogy for command classes is to think of - them as subroutines with local variables called "options". The options - are "declared" in 'initialize_options()' and "defined" (given their - final values, aka "finalized") in 'finalize_options()', both of which - must be defined by every command class. The distinction between the - two is necessary because option values might come from the outside - world (command line, config file, ...), and any options dependent on - other options must be computed *after* these outside influences have - been processed -- hence 'finalize_options()'. The "body" of the - subroutine, where it does all its work based on the values of its - options, is the 'run()' method, which must also be implemented by every - command class. - """ - - # 'sub_commands' formalizes the notion of a "family" of commands, - # eg. "install" as the parent with sub-commands "install_lib", - # "install_headers", etc. The parent of a family of commands - # defines 'sub_commands' as a class attribute; it's a list of - # (command_name : string, predicate : unbound_method | string | None) - # tuples, where 'predicate' is a method of the parent command that - # determines whether the corresponding command is applicable in the - # current situation. (Eg. we "install_headers" is only applicable if - # we have any C header files to install.) If 'predicate' is None, - # that command is always applicable. - # - # 'sub_commands' is usually defined at the *end* of a class, because - # predicates can be unbound methods, so they must already have been - # defined. The canonical example is the "install" command. - sub_commands = [] - - - # -- Creation/initialization methods ------------------------------- - - def __init__(self, dist): - """Create and initialize a new Command object. Most importantly, - invokes the 'initialize_options()' method, which is the real - initializer and depends on the actual command being - instantiated. - """ - # late import because of mutual dependence between these classes - from distutils.dist import Distribution - - if not isinstance(dist, Distribution): - raise TypeError("dist must be a Distribution instance") - if self.__class__ is Command: - raise RuntimeError("Command is an abstract class") - - self.distribution = dist - self.initialize_options() - - # Per-command versions of the global flags, so that the user can - # customize Distutils' behaviour command-by-command and let some - # commands fall back on the Distribution's behaviour. None means - # "not defined, check self.distribution's copy", while 0 or 1 mean - # false and true (duh). Note that this means figuring out the real - # value of each flag is a touch complicated -- hence "self._dry_run" - # will be handled by __getattr__, below. - # XXX This needs to be fixed. - self._dry_run = None - - # verbose is largely ignored, but needs to be set for - # backwards compatibility (I think)? - self.verbose = dist.verbose - - # Some commands define a 'self.force' option to ignore file - # timestamps, but methods defined *here* assume that - # 'self.force' exists for all commands. So define it here - # just to be safe. - self.force = None - - # The 'help' flag is just used for command-line parsing, so - # none of that complicated bureaucracy is needed. - self.help = 0 - - # 'finalized' records whether or not 'finalize_options()' has been - # called. 'finalize_options()' itself should not pay attention to - # this flag: it is the business of 'ensure_finalized()', which - # always calls 'finalize_options()', to respect/update it. - self.finalized = 0 - - # XXX A more explicit way to customize dry_run would be better. - def __getattr__(self, attr): - if attr == 'dry_run': - myval = getattr(self, "_" + attr) - if myval is None: - return getattr(self.distribution, attr) - else: - return myval - else: - raise AttributeError(attr) - - def ensure_finalized(self): - if not self.finalized: - self.finalize_options() - self.finalized = 1 - - # Subclasses must define: - # initialize_options() - # provide default values for all options; may be customized by - # setup script, by options from config file(s), or by command-line - # options - # finalize_options() - # decide on the final values for all options; this is called - # after all possible intervention from the outside world - # (command-line, option file, etc.) has been processed - # run() - # run the command: do whatever it is we're here to do, - # controlled by the command's various option values - - def initialize_options(self): - """Set default values for all the options that this command - supports. Note that these defaults may be overridden by other - commands, by the setup script, by config files, or by the - command-line. Thus, this is not the place to code dependencies - between options; generally, 'initialize_options()' implementations - are just a bunch of "self.foo = None" assignments. - - This method must be implemented by all command classes. - """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - - def finalize_options(self): - """Set final values for all the options that this command supports. - This is always called as late as possible, ie. after any option - assignments from the command-line or from other commands have been - done. Thus, this is the place to code option dependencies: if - 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as - long as 'foo' still has the same value it was assigned in - 'initialize_options()'. - - This method must be implemented by all command classes. - """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - - - def dump_options(self, header=None, indent=""): - from distutils.fancy_getopt import longopt_xlate - if header is None: - header = "command options for '%s':" % self.get_command_name() - self.announce(indent + header, level=log.INFO) - indent = indent + " " - for (option, _, _) in self.user_options: - option = option.translate(longopt_xlate) - if option[-1] == "=": - option = option[:-1] - value = getattr(self, option) - self.announce(indent + "%s = %s" % (option, value), - level=log.INFO) - - def run(self): - """A command's raison d'etre: carry out the action it exists to - perform, controlled by the options initialized in - 'initialize_options()', customized by other commands, the setup - script, the command-line, and config files, and finalized in - 'finalize_options()'. All terminal output and filesystem - interaction should be done by 'run()'. - - This method must be implemented by all command classes. - """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - - def announce(self, msg, level=1): - """If the current verbosity level is of greater than or equal to - 'level' print 'msg' to stdout. - """ - log.log(level, msg) - - def debug_print(self, msg): - """Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - from distutils.debug import DEBUG - if DEBUG: - print(msg) - sys.stdout.flush() - - - # -- Option validation methods ------------------------------------- - # (these are very handy in writing the 'finalize_options()' method) - # - # NB. the general philosophy here is to ensure that a particular option - # value meets certain type and value constraints. If not, we try to - # force it into conformance (eg. if we expect a list but have a string, - # split the string on comma and/or whitespace). If we can't force the - # option into conformance, raise DistutilsOptionError. Thus, command - # classes need do nothing more than (eg.) - # self.ensure_string_list('foo') - # and they can be guaranteed that thereafter, self.foo will be - # a list of strings. - - def _ensure_stringlike(self, option, what, default=None): - val = getattr(self, option) - if val is None: - setattr(self, option, default) - return default - elif not isinstance(val, str): - raise DistutilsOptionError("'%s' must be a %s (got `%s`)" - % (option, what, val)) - return val - - def ensure_string(self, option, default=None): - """Ensure that 'option' is a string; if not defined, set it to - 'default'. - """ - self._ensure_stringlike(option, "string", default) - - def ensure_string_list(self, option): - r"""Ensure that 'option' is a list of strings. If 'option' is - currently a string, we split it either on /,\s*/ or /\s+/, so - "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become - ["foo", "bar", "baz"]. - """ - val = getattr(self, option) - if val is None: - return - elif isinstance(val, str): - setattr(self, option, re.split(r',\s*|\s+', val)) - else: - if isinstance(val, list): - ok = all(isinstance(v, str) for v in val) - else: - ok = False - if not ok: - raise DistutilsOptionError( - "'%s' must be a list of strings (got %r)" - % (option, val)) - - def _ensure_tested_string(self, option, tester, what, error_fmt, - default=None): - val = self._ensure_stringlike(option, what, default) - if val is not None and not tester(val): - raise DistutilsOptionError(("error in '%s' option: " + error_fmt) - % (option, val)) - - def ensure_filename(self, option): - """Ensure that 'option' is the name of an existing file.""" - self._ensure_tested_string(option, os.path.isfile, - "filename", - "'%s' does not exist or is not a file") - - def ensure_dirname(self, option): - self._ensure_tested_string(option, os.path.isdir, - "directory name", - "'%s' does not exist or is not a directory") - - - # -- Convenience methods for commands ------------------------------ - - def get_command_name(self): - if hasattr(self, 'command_name'): - return self.command_name - else: - return self.__class__.__name__ - - def set_undefined_options(self, src_cmd, *option_pairs): - """Set the values of any "undefined" options from corresponding - option values in some other command object. "Undefined" here means - "is None", which is the convention used to indicate that an option - has not been changed between 'initialize_options()' and - 'finalize_options()'. Usually called from 'finalize_options()' for - options that depend on some other command rather than another - option of the same command. 'src_cmd' is the other command from - which option values will be taken (a command object will be created - for it if necessary); the remaining arguments are - '(src_option,dst_option)' tuples which mean "take the value of - 'src_option' in the 'src_cmd' command object, and copy it to - 'dst_option' in the current command object". - """ - # Option_pairs: list of (src_option, dst_option) tuples - src_cmd_obj = self.distribution.get_command_obj(src_cmd) - src_cmd_obj.ensure_finalized() - for (src_option, dst_option) in option_pairs: - if getattr(self, dst_option) is None: - setattr(self, dst_option, getattr(src_cmd_obj, src_option)) - - def get_finalized_command(self, command, create=1): - """Wrapper around Distribution's 'get_command_obj()' method: find - (create if necessary and 'create' is true) the command object for - 'command', call its 'ensure_finalized()' method, and return the - finalized command object. - """ - cmd_obj = self.distribution.get_command_obj(command, create) - cmd_obj.ensure_finalized() - return cmd_obj - - # XXX rename to 'get_reinitialized_command()'? (should do the - # same in dist.py, if so) - def reinitialize_command(self, command, reinit_subcommands=0): - return self.distribution.reinitialize_command(command, - reinit_subcommands) - - def run_command(self, command): - """Run some other command: uses the 'run_command()' method of - Distribution, which creates and finalizes the command object if - necessary and then invokes its 'run()' method. - """ - self.distribution.run_command(command) - - def get_sub_commands(self): - """Determine the sub-commands that are relevant in the current - distribution (ie., that need to be run). This is based on the - 'sub_commands' class attribute: each tuple in that list may include - a method that we call to determine if the subcommand needs to be - run for the current distribution. Return a list of command names. - """ - commands = [] - for (cmd_name, method) in self.sub_commands: - if method is None or method(self): - commands.append(cmd_name) - return commands - - - # -- External world manipulation ----------------------------------- - - def warn(self, msg): - log.warn("warning: %s: %s\n", self.get_command_name(), msg) - - def execute(self, func, args, msg=None, level=1): - util.execute(func, args, msg, dry_run=self.dry_run) - - def mkpath(self, name, mode=0o777): - dir_util.mkpath(name, mode, dry_run=self.dry_run) - - def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1, - link=None, level=1): - """Copy a file respecting verbose, dry-run and force flags. (The - former two default to whatever is in the Distribution object, and - the latter defaults to false for commands that don't define it.)""" - return file_util.copy_file(infile, outfile, preserve_mode, - preserve_times, not self.force, link, - dry_run=self.dry_run) - - def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, level=1): - """Copy an entire directory tree respecting verbose, dry-run, - and force flags. - """ - return dir_util.copy_tree(infile, outfile, preserve_mode, - preserve_times, preserve_symlinks, - not self.force, dry_run=self.dry_run) - - def move_file (self, src, dst, level=1): - """Move a file respecting dry-run flag.""" - return file_util.move_file(src, dst, dry_run=self.dry_run) - - def spawn(self, cmd, search_path=1, level=1): - """Spawn an external command respecting dry-run flag.""" - from distutils.spawn import spawn - spawn(cmd, search_path, dry_run=self.dry_run) - - def make_archive(self, base_name, format, root_dir=None, base_dir=None, - owner=None, group=None): - return archive_util.make_archive(base_name, format, root_dir, base_dir, - dry_run=self.dry_run, - owner=owner, group=group) - - def make_file(self, infiles, outfile, func, args, - exec_msg=None, skip_msg=None, level=1): - """Special case of 'execute()' for operations that process one or - more input files and generate one output file. Works just like - 'execute()', except the operation is skipped and a different - message printed if 'outfile' already exists and is newer than all - files listed in 'infiles'. If the command defined 'self.force', - and it is true, then the command is unconditionally run -- does no - timestamp checks. - """ - if skip_msg is None: - skip_msg = "skipping %s (inputs unchanged)" % outfile - - # Allow 'infiles' to be a single string - if isinstance(infiles, str): - infiles = (infiles,) - elif not isinstance(infiles, (list, tuple)): - raise TypeError( - "'infiles' must be a string, or a list or tuple of strings") - - if exec_msg is None: - exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles)) - - # If 'outfile' must be regenerated (either because it doesn't - # exist, is out-of-date, or the 'force' flag is true) then - # perform the action that presumably regenerates it - if self.force or dep_util.newer_group(infiles, outfile): - self.execute(func, args, exec_msg, level) - # Otherwise, print the "skip" message - else: - log.debug(skip_msg) diff --git a/Lib/distutils/command/__init__.py b/Lib/distutils/command/__init__.py deleted file mode 100644 index fd0bfae7ade6ff..00000000000000 --- a/Lib/distutils/command/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -"""distutils.command - -Package containing implementation of all the standard Distutils -commands.""" - -__all__ = ['build', - 'build_py', - 'build_ext', - 'build_clib', - 'build_scripts', - 'clean', - 'install', - 'install_lib', - 'install_headers', - 'install_scripts', - 'install_data', - 'sdist', - 'register', - 'bdist', - 'bdist_dumb', - 'bdist_rpm', - 'check', - 'upload', - # These two are reserved for future use: - #'bdist_sdux', - #'bdist_pkgtool', - # Note: - # bdist_packager is not included because it only provides - # an abstract base class - ] diff --git a/Lib/distutils/command/bdist.py b/Lib/distutils/command/bdist.py deleted file mode 100644 index 60309e1ff2fce3..00000000000000 --- a/Lib/distutils/command/bdist.py +++ /dev/null @@ -1,138 +0,0 @@ -"""distutils.command.bdist - -Implements the Distutils 'bdist' command (create a built [binary] -distribution).""" - -import os -from distutils.core import Command -from distutils.errors import * -from distutils.util import get_platform - - -def show_formats(): - """Print list of available formats (arguments to "--format" option). - """ - from distutils.fancy_getopt import FancyGetopt - formats = [] - for format in bdist.format_commands: - formats.append(("formats=" + format, None, - bdist.format_command[format][1])) - pretty_printer = FancyGetopt(formats) - pretty_printer.print_help("List of available distribution formats:") - - -class bdist(Command): - - description = "create a built (binary) distribution" - - user_options = [('bdist-base=', 'b', - "temporary directory for creating built distributions"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('formats=', None, - "formats for distribution (comma-separated list)"), - ('dist-dir=', 'd', - "directory to put final built distributions in " - "[default: dist]"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] - - boolean_options = ['skip-build'] - - help_options = [ - ('help-formats', None, - "lists available distribution formats", show_formats), - ] - - # The following commands do not take a format option from bdist - no_format_option = ('bdist_rpm',) - - # This won't do in reality: will need to distinguish RPM-ish Linux, - # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. - default_format = {'posix': 'gztar', - 'nt': 'zip'} - - # Establish the preferred order (for the --help-formats option). - format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar', 'zip'] - - # And the real information. - format_command = {'rpm': ('bdist_rpm', "RPM distribution"), - 'gztar': ('bdist_dumb', "gzip'ed tar file"), - 'bztar': ('bdist_dumb', "bzip2'ed tar file"), - 'xztar': ('bdist_dumb', "xz'ed tar file"), - 'ztar': ('bdist_dumb', "compressed tar file"), - 'tar': ('bdist_dumb', "tar file"), - 'zip': ('bdist_dumb', "ZIP file"), - } - - def initialize_options(self): - self.bdist_base = None - self.plat_name = None - self.formats = None - self.dist_dir = None - self.skip_build = 0 - self.group = None - self.owner = None - - def finalize_options(self): - # have to finalize 'plat_name' before 'bdist_base' - if self.plat_name is None: - if self.skip_build: - self.plat_name = get_platform() - else: - self.plat_name = self.get_finalized_command('build').plat_name - - # 'bdist_base' -- parent of per-built-distribution-format - # temporary directories (eg. we'll probably have - # "build/bdist./dumb", "build/bdist./rpm", etc.) - if self.bdist_base is None: - build_base = self.get_finalized_command('build').build_base - self.bdist_base = os.path.join(build_base, - 'bdist.' + self.plat_name) - - self.ensure_string_list('formats') - if self.formats is None: - try: - self.formats = [self.default_format[os.name]] - except KeyError: - raise DistutilsPlatformError( - "don't know how to create built distributions " - "on platform %s" % os.name) - - if self.dist_dir is None: - self.dist_dir = "dist" - - def run(self): - # Figure out which sub-commands we need to run. - commands = [] - for format in self.formats: - try: - commands.append(self.format_command[format][0]) - except KeyError: - raise DistutilsOptionError("invalid format '%s'" % format) - - # Reinitialize and run each command. - for i in range(len(self.formats)): - cmd_name = commands[i] - sub_cmd = self.reinitialize_command(cmd_name) - if cmd_name not in self.no_format_option: - sub_cmd.format = self.formats[i] - - # passing the owner and group names for tar archiving - if cmd_name == 'bdist_dumb': - sub_cmd.owner = self.owner - sub_cmd.group = self.group - - # If we're going to need to run this command again, tell it to - # keep its temporary files around so subsequent runs go faster. - if cmd_name in commands[i+1:]: - sub_cmd.keep_temp = 1 - self.run_command(cmd_name) diff --git a/Lib/distutils/command/bdist_dumb.py b/Lib/distutils/command/bdist_dumb.py deleted file mode 100644 index f0d6b5b8cd8ab3..00000000000000 --- a/Lib/distutils/command/bdist_dumb.py +++ /dev/null @@ -1,123 +0,0 @@ -"""distutils.command.bdist_dumb - -Implements the Distutils 'bdist_dumb' command (create a "dumb" built -distribution -- i.e., just an archive to be unpacked under $prefix or -$exec_prefix).""" - -import os -from distutils.core import Command -from distutils.util import get_platform -from distutils.dir_util import remove_tree, ensure_relative -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log - -class bdist_dumb(Command): - - description = "create a \"dumb\" built distribution" - - user_options = [('bdist-dir=', 'd', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('format=', 'f', - "archive format to create (tar, gztar, bztar, xztar, " - "ztar, zip)"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('relative', None, - "build the archive using relative paths " - "(default: false)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] - - boolean_options = ['keep-temp', 'skip-build', 'relative'] - - default_format = { 'posix': 'gztar', - 'nt': 'zip' } - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.format = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = None - self.relative = 0 - self.owner = None - self.group = None - - def finalize_options(self): - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'dumb') - - if self.format is None: - try: - self.format = self.default_format[os.name] - except KeyError: - raise DistutilsPlatformError( - "don't know how to create dumb built distributions " - "on platform %s" % os.name) - - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ('skip_build', 'skip_build')) - - def run(self): - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', reinit_subcommands=1) - install.root = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = 0 - - log.info("installing to %s", self.bdist_dir) - self.run_command('install') - - # And make an archive relative to the root of the - # pseudo-installation tree. - archive_basename = "%s.%s" % (self.distribution.get_fullname(), - self.plat_name) - - pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) - if not self.relative: - archive_root = self.bdist_dir - else: - if (self.distribution.has_ext_modules() and - (install.install_base != install.install_platbase)): - raise DistutilsPlatformError( - "can't make a dumb built distribution where " - "base and platbase are different (%s, %s)" - % (repr(install.install_base), - repr(install.install_platbase))) - else: - archive_root = os.path.join(self.bdist_dir, - ensure_relative(install.install_base)) - - # Make the archive - filename = self.make_archive(pseudoinstall_root, - self.format, root_dir=archive_root, - owner=self.owner, group=self.group) - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - self.distribution.dist_files.append(('bdist_dumb', pyversion, - filename)) - - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) diff --git a/Lib/distutils/command/bdist_rpm.py b/Lib/distutils/command/bdist_rpm.py deleted file mode 100644 index 550cbfa1e28a23..00000000000000 --- a/Lib/distutils/command/bdist_rpm.py +++ /dev/null @@ -1,579 +0,0 @@ -"""distutils.command.bdist_rpm - -Implements the Distutils 'bdist_rpm' command (create RPM source and binary -distributions).""" - -import subprocess, sys, os -from distutils.core import Command -from distutils.debug import DEBUG -from distutils.file_util import write_file -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log - -class bdist_rpm(Command): - - description = "create an RPM distribution" - - user_options = [ - ('bdist-base=', None, - "base directory for creating built distributions"), - ('rpm-base=', None, - "base directory for creating RPMs (defaults to \"rpm\" under " - "--bdist-base; must be specified for RPM 2)"), - ('dist-dir=', 'd', - "directory to put final RPM files in " - "(and .spec files if --spec-only)"), - ('python=', None, - "path to Python interpreter to hard-code in the .spec file " - "(default: \"python\")"), - ('fix-python', None, - "hard-code the exact path to the current Python interpreter in " - "the .spec file"), - ('spec-only', None, - "only regenerate spec file"), - ('source-only', None, - "only generate source RPM"), - ('binary-only', None, - "only generate binary RPM"), - ('use-bzip2', None, - "use bzip2 instead of gzip to create source distribution"), - - # More meta-data: too RPM-specific to put in the setup script, - # but needs to go in the .spec file -- so we make these options - # to "bdist_rpm". The idea is that packagers would put this - # info in setup.cfg, although they are of course free to - # supply it on the command line. - ('distribution-name=', None, - "name of the (Linux) distribution to which this " - "RPM applies (*not* the name of the module distribution!)"), - ('group=', None, - "package classification [default: \"Development/Libraries\"]"), - ('release=', None, - "RPM release number"), - ('serial=', None, - "RPM serial number"), - ('vendor=', None, - "RPM \"vendor\" (eg. \"Joe Blow \") " - "[default: maintainer or author from setup script]"), - ('packager=', None, - "RPM packager (eg. \"Jane Doe \") " - "[default: vendor]"), - ('doc-files=', None, - "list of documentation files (space or comma-separated)"), - ('changelog=', None, - "RPM changelog"), - ('icon=', None, - "name of icon file"), - ('provides=', None, - "capabilities provided by this package"), - ('requires=', None, - "capabilities required by this package"), - ('conflicts=', None, - "capabilities which conflict with this package"), - ('build-requires=', None, - "capabilities required to build this package"), - ('obsoletes=', None, - "capabilities made obsolete by this package"), - ('no-autoreq', None, - "do not automatically calculate dependencies"), - - # Actions to take when building RPM - ('keep-temp', 'k', - "don't clean up RPM build directory"), - ('no-keep-temp', None, - "clean up RPM build directory [default]"), - ('use-rpm-opt-flags', None, - "compile with RPM_OPT_FLAGS when building from source RPM"), - ('no-rpm-opt-flags', None, - "do not pass any RPM CFLAGS to compiler"), - ('rpm3-mode', None, - "RPM 3 compatibility mode (default)"), - ('rpm2-mode', None, - "RPM 2 compatibility mode"), - - # Add the hooks necessary for specifying custom scripts - ('prep-script=', None, - "Specify a script for the PREP phase of RPM building"), - ('build-script=', None, - "Specify a script for the BUILD phase of RPM building"), - - ('pre-install=', None, - "Specify a script for the pre-INSTALL phase of RPM building"), - ('install-script=', None, - "Specify a script for the INSTALL phase of RPM building"), - ('post-install=', None, - "Specify a script for the post-INSTALL phase of RPM building"), - - ('pre-uninstall=', None, - "Specify a script for the pre-UNINSTALL phase of RPM building"), - ('post-uninstall=', None, - "Specify a script for the post-UNINSTALL phase of RPM building"), - - ('clean-script=', None, - "Specify a script for the CLEAN phase of RPM building"), - - ('verify-script=', None, - "Specify a script for the VERIFY phase of the RPM build"), - - # Allow a packager to explicitly force an architecture - ('force-arch=', None, - "Force an architecture onto the RPM build process"), - - ('quiet', 'q', - "Run the INSTALL phase of RPM building in quiet mode"), - ] - - boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode', - 'no-autoreq', 'quiet'] - - negative_opt = {'no-keep-temp': 'keep-temp', - 'no-rpm-opt-flags': 'use-rpm-opt-flags', - 'rpm2-mode': 'rpm3-mode'} - - - def initialize_options(self): - self.bdist_base = None - self.rpm_base = None - self.dist_dir = None - self.python = None - self.fix_python = None - self.spec_only = None - self.binary_only = None - self.source_only = None - self.use_bzip2 = None - - self.distribution_name = None - self.group = None - self.release = None - self.serial = None - self.vendor = None - self.packager = None - self.doc_files = None - self.changelog = None - self.icon = None - - self.prep_script = None - self.build_script = None - self.install_script = None - self.clean_script = None - self.verify_script = None - self.pre_install = None - self.post_install = None - self.pre_uninstall = None - self.post_uninstall = None - self.prep = None - self.provides = None - self.requires = None - self.conflicts = None - self.build_requires = None - self.obsoletes = None - - self.keep_temp = 0 - self.use_rpm_opt_flags = 1 - self.rpm3_mode = 1 - self.no_autoreq = 0 - - self.force_arch = None - self.quiet = 0 - - def finalize_options(self): - self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) - if self.rpm_base is None: - if not self.rpm3_mode: - raise DistutilsOptionError( - "you must specify --rpm-base in RPM 2 mode") - self.rpm_base = os.path.join(self.bdist_base, "rpm") - - if self.python is None: - if self.fix_python: - self.python = sys.executable - else: - self.python = "python3" - elif self.fix_python: - raise DistutilsOptionError( - "--python and --fix-python are mutually exclusive options") - - if os.name != 'posix': - raise DistutilsPlatformError("don't know how to create RPM " - "distributions on platform %s" % os.name) - if self.binary_only and self.source_only: - raise DistutilsOptionError( - "cannot supply both '--source-only' and '--binary-only'") - - # don't pass CFLAGS to pure python distributions - if not self.distribution.has_ext_modules(): - self.use_rpm_opt_flags = 0 - - self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) - self.finalize_package_data() - - def finalize_package_data(self): - self.ensure_string('group', "Development/Libraries") - self.ensure_string('vendor', - "%s <%s>" % (self.distribution.get_contact(), - self.distribution.get_contact_email())) - self.ensure_string('packager') - self.ensure_string_list('doc_files') - if isinstance(self.doc_files, list): - for readme in ('README', 'README.txt'): - if os.path.exists(readme) and readme not in self.doc_files: - self.doc_files.append(readme) - - self.ensure_string('release', "1") - self.ensure_string('serial') # should it be an int? - - self.ensure_string('distribution_name') - - self.ensure_string('changelog') - # Format changelog correctly - self.changelog = self._format_changelog(self.changelog) - - self.ensure_filename('icon') - - self.ensure_filename('prep_script') - self.ensure_filename('build_script') - self.ensure_filename('install_script') - self.ensure_filename('clean_script') - self.ensure_filename('verify_script') - self.ensure_filename('pre_install') - self.ensure_filename('post_install') - self.ensure_filename('pre_uninstall') - self.ensure_filename('post_uninstall') - - # XXX don't forget we punted on summaries and descriptions -- they - # should be handled here eventually! - - # Now *this* is some meta-data that belongs in the setup script... - self.ensure_string_list('provides') - self.ensure_string_list('requires') - self.ensure_string_list('conflicts') - self.ensure_string_list('build_requires') - self.ensure_string_list('obsoletes') - - self.ensure_string('force_arch') - - def run(self): - if DEBUG: - print("before _get_package_data():") - print("vendor =", self.vendor) - print("packager =", self.packager) - print("doc_files =", self.doc_files) - print("changelog =", self.changelog) - - # make directories - if self.spec_only: - spec_dir = self.dist_dir - self.mkpath(spec_dir) - else: - rpm_dir = {} - for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'): - rpm_dir[d] = os.path.join(self.rpm_base, d) - self.mkpath(rpm_dir[d]) - spec_dir = rpm_dir['SPECS'] - - # Spec file goes into 'dist_dir' if '--spec-only specified', - # build/rpm. otherwise. - spec_path = os.path.join(spec_dir, - "%s.spec" % self.distribution.get_name()) - self.execute(write_file, - (spec_path, - self._make_spec_file()), - "writing '%s'" % spec_path) - - if self.spec_only: # stop if requested - return - - # Make a source distribution and copy to SOURCES directory with - # optional icon. - saved_dist_files = self.distribution.dist_files[:] - sdist = self.reinitialize_command('sdist') - if self.use_bzip2: - sdist.formats = ['bztar'] - else: - sdist.formats = ['gztar'] - self.run_command('sdist') - self.distribution.dist_files = saved_dist_files - - source = sdist.get_archive_files()[0] - source_dir = rpm_dir['SOURCES'] - self.copy_file(source, source_dir) - - if self.icon: - if os.path.exists(self.icon): - self.copy_file(self.icon, source_dir) - else: - raise DistutilsFileError( - "icon file '%s' does not exist" % self.icon) - - # build package - log.info("building RPMs") - rpm_cmd = ['rpmbuild'] - - if self.source_only: # what kind of RPMs? - rpm_cmd.append('-bs') - elif self.binary_only: - rpm_cmd.append('-bb') - else: - rpm_cmd.append('-ba') - rpm_cmd.extend(['--define', '__python %s' % self.python]) - if self.rpm3_mode: - rpm_cmd.extend(['--define', - '_topdir %s' % os.path.abspath(self.rpm_base)]) - if not self.keep_temp: - rpm_cmd.append('--clean') - - if self.quiet: - rpm_cmd.append('--quiet') - - rpm_cmd.append(spec_path) - # Determine the binary rpm names that should be built out of this spec - # file - # Note that some of these may not be really built (if the file - # list is empty) - nvr_string = "%{name}-%{version}-%{release}" - src_rpm = nvr_string + ".src.rpm" - non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm" - q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % ( - src_rpm, non_src_rpm, spec_path) - - out = os.popen(q_cmd) - try: - binary_rpms = [] - source_rpm = None - while True: - line = out.readline() - if not line: - break - l = line.strip().split() - assert(len(l) == 2) - binary_rpms.append(l[1]) - # The source rpm is named after the first entry in the spec file - if source_rpm is None: - source_rpm = l[0] - - status = out.close() - if status: - raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd)) - - finally: - out.close() - - self.spawn(rpm_cmd) - - if not self.dry_run: - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - - if not self.binary_only: - srpm = os.path.join(rpm_dir['SRPMS'], source_rpm) - assert(os.path.exists(srpm)) - self.move_file(srpm, self.dist_dir) - filename = os.path.join(self.dist_dir, source_rpm) - self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) - - if not self.source_only: - for rpm in binary_rpms: - rpm = os.path.join(rpm_dir['RPMS'], rpm) - if os.path.exists(rpm): - self.move_file(rpm, self.dist_dir) - filename = os.path.join(self.dist_dir, - os.path.basename(rpm)) - self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) - - def _dist_path(self, path): - return os.path.join(self.dist_dir, os.path.basename(path)) - - def _make_spec_file(self): - """Generate the text of an RPM spec file and return it as a - list of strings (one per line). - """ - # definitions and headers - spec_file = [ - '%define name ' + self.distribution.get_name(), - '%define version ' + self.distribution.get_version().replace('-','_'), - '%define unmangled_version ' + self.distribution.get_version(), - '%define release ' + self.release.replace('-','_'), - '', - 'Summary: ' + self.distribution.get_description(), - ] - - # Workaround for #14443 which affects some RPM based systems such as - # RHEL6 (and probably derivatives) - vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}') - # Generate a potential replacement value for __os_install_post (whilst - # normalizing the whitespace to simplify the test for whether the - # invocation of brp-python-bytecompile passes in __python): - vendor_hook = '\n'.join([' %s \\' % line.strip() - for line in vendor_hook.splitlines()]) - problem = "brp-python-bytecompile \\\n" - fixed = "brp-python-bytecompile %{__python} \\\n" - fixed_hook = vendor_hook.replace(problem, fixed) - if fixed_hook != vendor_hook: - spec_file.append('# Workaround for http://bugs.python.org/issue14443') - spec_file.append('%define __os_install_post ' + fixed_hook + '\n') - - # put locale summaries into spec file - # XXX not supported for now (hard to put a dictionary - # in a config file -- arg!) - #for locale in self.summaries.keys(): - # spec_file.append('Summary(%s): %s' % (locale, - # self.summaries[locale])) - - spec_file.extend([ - 'Name: %{name}', - 'Version: %{version}', - 'Release: %{release}',]) - - # XXX yuck! this filename is available from the "sdist" command, - # but only after it has run: and we create the spec file before - # running "sdist", in case of --spec-only. - if self.use_bzip2: - spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2') - else: - spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz') - - spec_file.extend([ - 'License: ' + self.distribution.get_license(), - 'Group: ' + self.group, - 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', - 'Prefix: %{_prefix}', ]) - - if not self.force_arch: - # noarch if no extension modules - if not self.distribution.has_ext_modules(): - spec_file.append('BuildArch: noarch') - else: - spec_file.append( 'BuildArch: %s' % self.force_arch ) - - for field in ('Vendor', - 'Packager', - 'Provides', - 'Requires', - 'Conflicts', - 'Obsoletes', - ): - val = getattr(self, field.lower()) - if isinstance(val, list): - spec_file.append('%s: %s' % (field, ' '.join(val))) - elif val is not None: - spec_file.append('%s: %s' % (field, val)) - - - if self.distribution.get_url() != 'UNKNOWN': - spec_file.append('Url: ' + self.distribution.get_url()) - - if self.distribution_name: - spec_file.append('Distribution: ' + self.distribution_name) - - if self.build_requires: - spec_file.append('BuildRequires: ' + - ' '.join(self.build_requires)) - - if self.icon: - spec_file.append('Icon: ' + os.path.basename(self.icon)) - - if self.no_autoreq: - spec_file.append('AutoReq: 0') - - spec_file.extend([ - '', - '%description', - self.distribution.get_long_description() - ]) - - # put locale descriptions into spec file - # XXX again, suppressed because config file syntax doesn't - # easily support this ;-( - #for locale in self.descriptions.keys(): - # spec_file.extend([ - # '', - # '%description -l ' + locale, - # self.descriptions[locale], - # ]) - - # rpm scripts - # figure out default build script - def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0])) - def_build = "%s build" % def_setup_call - if self.use_rpm_opt_flags: - def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build - - # insert contents of files - - # XXX this is kind of misleading: user-supplied options are files - # that we open and interpolate into the spec file, but the defaults - # are just text that we drop in as-is. Hmmm. - - install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT ' - '--record=INSTALLED_FILES') % def_setup_call - - script_options = [ - ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"), - ('build', 'build_script', def_build), - ('install', 'install_script', install_cmd), - ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"), - ('verifyscript', 'verify_script', None), - ('pre', 'pre_install', None), - ('post', 'post_install', None), - ('preun', 'pre_uninstall', None), - ('postun', 'post_uninstall', None), - ] - - for (rpm_opt, attr, default) in script_options: - # Insert contents of file referred to, if no file is referred to - # use 'default' as contents of script - val = getattr(self, attr) - if val or default: - spec_file.extend([ - '', - '%' + rpm_opt,]) - if val: - with open(val) as f: - spec_file.extend(f.read().split('\n')) - else: - spec_file.append(default) - - - # files section - spec_file.extend([ - '', - '%files -f INSTALLED_FILES', - '%defattr(-,root,root)', - ]) - - if self.doc_files: - spec_file.append('%doc ' + ' '.join(self.doc_files)) - - if self.changelog: - spec_file.extend([ - '', - '%changelog',]) - spec_file.extend(self.changelog) - - return spec_file - - def _format_changelog(self, changelog): - """Format the changelog correctly and convert it to a list of strings - """ - if not changelog: - return changelog - new_changelog = [] - for line in changelog.strip().split('\n'): - line = line.strip() - if line[0] == '*': - new_changelog.extend(['', line]) - elif line[0] == '-': - new_changelog.append(line) - else: - new_changelog.append(' ' + line) - - # strip trailing newline inserted by first changelog entry - if not new_changelog[0]: - del new_changelog[0] - - return new_changelog diff --git a/Lib/distutils/command/build.py b/Lib/distutils/command/build.py deleted file mode 100644 index a86df0bc7f9218..00000000000000 --- a/Lib/distutils/command/build.py +++ /dev/null @@ -1,157 +0,0 @@ -"""distutils.command.build - -Implements the Distutils 'build' command.""" - -import sys, os -from distutils.core import Command -from distutils.errors import DistutilsOptionError -from distutils.util import get_platform - - -def show_compilers(): - from distutils.ccompiler import show_compilers - show_compilers() - - -class build(Command): - - description = "build everything needed to install" - - user_options = [ - ('build-base=', 'b', - "base directory for build library"), - ('build-purelib=', None, - "build directory for platform-neutral distributions"), - ('build-platlib=', None, - "build directory for platform-specific distributions"), - ('build-lib=', None, - "build directory for all distribution (defaults to either " + - "build-purelib or build-platlib"), - ('build-scripts=', None, - "build directory for scripts"), - ('build-temp=', 't', - "temporary build directory"), - ('plat-name=', 'p', - "platform name to build for, if supported " - "(default: %s)" % get_platform()), - ('compiler=', 'c', - "specify the compiler type"), - ('parallel=', 'j', - "number of parallel build jobs"), - ('debug', 'g', - "compile extensions and libraries with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('executable=', 'e', - "specify final destination interpreter path (build.py)"), - ] - - boolean_options = ['debug', 'force'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.build_base = 'build' - # these are decided only after 'build_base' has its final value - # (unless overridden by the user or client) - self.build_purelib = None - self.build_platlib = None - self.build_lib = None - self.build_temp = None - self.build_scripts = None - self.compiler = None - self.plat_name = None - self.debug = None - self.force = 0 - self.executable = None - self.parallel = None - - def finalize_options(self): - if self.plat_name is None: - self.plat_name = get_platform() - else: - # plat-name only supported for windows (other platforms are - # supported via ./configure flags, if at all). Avoid misleading - # other platforms. - if os.name != 'nt': - raise DistutilsOptionError( - "--plat-name only supported on Windows (try " - "using './configure --help' on your platform)") - - plat_specifier = ".%s-%d.%d" % (self.plat_name, *sys.version_info[:2]) - - # Make it so Python 2.x and Python 2.x with --with-pydebug don't - # share the same build directories. Doing so confuses the build - # process for C modules - if hasattr(sys, 'gettotalrefcount'): - plat_specifier += '-pydebug' - - # 'build_purelib' and 'build_platlib' just default to 'lib' and - # 'lib.' under the base build directory. We only use one of - # them for a given distribution, though -- - if self.build_purelib is None: - self.build_purelib = os.path.join(self.build_base, 'lib') - if self.build_platlib is None: - self.build_platlib = os.path.join(self.build_base, - 'lib' + plat_specifier) - - # 'build_lib' is the actual directory that we will use for this - # particular module distribution -- if user didn't supply it, pick - # one of 'build_purelib' or 'build_platlib'. - if self.build_lib is None: - if self.distribution.ext_modules: - self.build_lib = self.build_platlib - else: - self.build_lib = self.build_purelib - - # 'build_temp' -- temporary directory for compiler turds, - # "build/temp." - if self.build_temp is None: - self.build_temp = os.path.join(self.build_base, - 'temp' + plat_specifier) - if self.build_scripts is None: - self.build_scripts = os.path.join(self.build_base, - 'scripts-%d.%d' % sys.version_info[:2]) - - if self.executable is None and sys.executable: - self.executable = os.path.normpath(sys.executable) - - if isinstance(self.parallel, str): - try: - self.parallel = int(self.parallel) - except ValueError: - raise DistutilsOptionError("parallel should be an integer") - - def run(self): - # Run all relevant sub-commands. This will be some subset of: - # - build_py - pure Python modules - # - build_clib - standalone C libraries - # - build_ext - Python extensions - # - build_scripts - (Python) scripts - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - - # -- Predicates for the sub-command list --------------------------- - - def has_pure_modules(self): - return self.distribution.has_pure_modules() - - def has_c_libraries(self): - return self.distribution.has_c_libraries() - - def has_ext_modules(self): - return self.distribution.has_ext_modules() - - def has_scripts(self): - return self.distribution.has_scripts() - - - sub_commands = [('build_py', has_pure_modules), - ('build_clib', has_c_libraries), - ('build_ext', has_ext_modules), - ('build_scripts', has_scripts), - ] diff --git a/Lib/distutils/command/build_clib.py b/Lib/distutils/command/build_clib.py deleted file mode 100644 index 3e20ef23cd81e0..00000000000000 --- a/Lib/distutils/command/build_clib.py +++ /dev/null @@ -1,209 +0,0 @@ -"""distutils.command.build_clib - -Implements the Distutils 'build_clib' command, to build a C/C++ library -that is included in the module distribution and needed by an extension -module.""" - - -# XXX this module has *lots* of code ripped-off quite transparently from -# build_ext.py -- not surprisingly really, as the work required to build -# a static library from a collection of C source files is not really all -# that different from what's required to build a shared object file from -# a collection of C source files. Nevertheless, I haven't done the -# necessary refactoring to account for the overlap in code between the -# two modules, mainly because a number of subtle details changed in the -# cut 'n paste. Sigh. - -import os -from distutils.core import Command -from distutils.errors import * -from distutils.sysconfig import customize_compiler -from distutils import log - -def show_compilers(): - from distutils.ccompiler import show_compilers - show_compilers() - - -class build_clib(Command): - - description = "build C/C++ libraries used by Python extensions" - - user_options = [ - ('build-clib=', 'b', - "directory to build C/C++ libraries to"), - ('build-temp=', 't', - "directory to put temporary build by-products"), - ('debug', 'g', - "compile with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ] - - boolean_options = ['debug', 'force'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.build_clib = None - self.build_temp = None - - # List of libraries to build - self.libraries = None - - # Compilation options for all libraries - self.include_dirs = None - self.define = None - self.undef = None - self.debug = None - self.force = 0 - self.compiler = None - - - def finalize_options(self): - # This might be confusing: both build-clib and build-temp default - # to build-temp as defined by the "build" command. This is because - # I think that C libraries are really just temporary build - # by-products, at least from the point of view of building Python - # extensions -- but I want to keep my options open. - self.set_undefined_options('build', - ('build_temp', 'build_clib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force')) - - self.libraries = self.distribution.libraries - if self.libraries: - self.check_library_list(self.libraries) - - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - if isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - # XXX same as for build_ext -- what about 'self.define' and - # 'self.undef' ? - - - def run(self): - if not self.libraries: - return - - # Yech -- this is cut 'n pasted from build_ext.py! - from distutils.ccompiler import new_compiler - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, - force=self.force) - customize_compiler(self.compiler) - - if self.include_dirs is not None: - self.compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: - self.compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - self.compiler.undefine_macro(macro) - - self.build_libraries(self.libraries) - - - def check_library_list(self, libraries): - """Ensure that the list of libraries is valid. - - `library` is presumably provided as a command option 'libraries'. - This method checks that it is a list of 2-tuples, where the tuples - are (library_name, build_info_dict). - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - if not isinstance(libraries, list): - raise DistutilsSetupError( - "'libraries' option must be a list of tuples") - - for lib in libraries: - if not isinstance(lib, tuple) and len(lib) != 2: - raise DistutilsSetupError( - "each element of 'libraries' must a 2-tuple") - - name, build_info = lib - - if not isinstance(name, str): - raise DistutilsSetupError( - "first element of each tuple in 'libraries' " - "must be a string (the library name)") - - if '/' in name or (os.sep != '/' and os.sep in name): - raise DistutilsSetupError("bad library name '%s': " - "may not contain directory separators" % lib[0]) - - if not isinstance(build_info, dict): - raise DistutilsSetupError( - "second element of each tuple in 'libraries' " - "must be a dictionary (build info)") - - - def get_library_names(self): - # Assume the library list is valid -- 'check_library_list()' is - # called from 'finalize_options()', so it should be! - if not self.libraries: - return None - - lib_names = [] - for (lib_name, build_info) in self.libraries: - lib_names.append(lib_name) - return lib_names - - - def get_source_files(self): - self.check_library_list(self.libraries) - filenames = [] - for (lib_name, build_info) in self.libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) - - filenames.extend(sources) - return filenames - - - def build_libraries(self, libraries): - for (lib_name, build_info) in libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) - sources = list(sources) - - log.info("building '%s' library", lib_name) - - # First, compile the source code to object files in the library - # directory. (This should probably change to putting object - # files in a temporary build directory.) - macros = build_info.get('macros') - include_dirs = build_info.get('include_dirs') - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=include_dirs, - debug=self.debug) - - # Now "link" the object files together into a static library. - # (On Unix at least, this isn't really linking -- it just - # builds an archive. Whatever.) - self.compiler.create_static_lib(objects, lib_name, - output_dir=self.build_clib, - debug=self.debug) diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py deleted file mode 100644 index f287b349984ff0..00000000000000 --- a/Lib/distutils/command/build_ext.py +++ /dev/null @@ -1,754 +0,0 @@ -"""distutils.command.build_ext - -Implements the Distutils 'build_ext' command, for building extension -modules (currently limited to C extensions, should accommodate C++ -extensions ASAP).""" - -import contextlib -import os -import re -import sys -from distutils.core import Command -from distutils.errors import * -from distutils.sysconfig import customize_compiler, get_python_version -from distutils.sysconfig import get_config_h_filename -from distutils.dep_util import newer_group -from distutils.extension import Extension -from distutils.util import get_platform -from distutils import log - -from site import USER_BASE - -# An extension name is just a dot-separated list of Python NAMEs (ie. -# the same as a fully-qualified module name). -extension_name_re = re.compile \ - (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') - - -def show_compilers (): - from distutils.ccompiler import show_compilers - show_compilers() - - -class build_ext(Command): - - description = "build C/C++ extensions (compile/link to build directory)" - - # XXX thoughts on how to deal with complex command-line options like - # these, i.e. how to make it so fancy_getopt can suck them off the - # command line and make it look like setup.py defined the appropriate - # lists of tuples of what-have-you. - # - each command needs a callback to process its command-line options - # - Command.__init__() needs access to its share of the whole - # command line (must ultimately come from - # Distribution.parse_command_line()) - # - it then calls the current command class' option-parsing - # callback to deal with weird options like -D, which have to - # parse the option text and churn out some custom data - # structure - # - that data structure (in this case, a list of 2-tuples) - # will then be present in the command object by the time - # we get to finalize_options() (i.e. the constructor - # takes care of both command-line and client options - # in between initialize_options() and finalize_options()) - - sep_by = " (separated by '%s')" % os.pathsep - user_options = [ - ('build-lib=', 'b', - "directory for compiled extension modules"), - ('build-temp=', 't', - "directory for temporary files (build by-products)"), - ('plat-name=', 'p', - "platform name to cross-compile for, if supported " - "(default: %s)" % get_platform()), - ('inplace', 'i', - "ignore build-lib and put compiled extensions into the source " + - "directory alongside your pure Python modules"), - ('include-dirs=', 'I', - "list of directories to search for header files" + sep_by), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries" + sep_by), - ('rpath=', 'R', - "directories to search for shared C libraries at runtime"), - ('link-objects=', 'O', - "extra explicit link objects to include in the link"), - ('debug', 'g', - "compile/link with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ('parallel=', 'j', - "number of parallel build jobs"), - ('swig-cpp', None, - "make SWIG create C++ files (default is C)"), - ('swig-opts=', None, - "list of SWIG command line options"), - ('swig=', None, - "path to the SWIG executable"), - ('user', None, - "add user include, library and rpath") - ] - - boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.extensions = None - self.build_lib = None - self.plat_name = None - self.build_temp = None - self.inplace = 0 - self.package = None - - self.include_dirs = None - self.define = None - self.undef = None - self.libraries = None - self.library_dirs = None - self.rpath = None - self.link_objects = None - self.debug = None - self.force = None - self.compiler = None - self.swig = None - self.swig_cpp = None - self.swig_opts = None - self.user = None - self.parallel = None - - def finalize_options(self): - from distutils import sysconfig - - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force'), - ('parallel', 'parallel'), - ('plat_name', 'plat_name'), - ) - - if self.package is None: - self.package = self.distribution.ext_package - - self.extensions = self.distribution.ext_modules - - # Make sure Python's include directories (for Python.h, pyconfig.h, - # etc.) are in the include search path. - py_include = sysconfig.get_python_inc() - plat_py_include = sysconfig.get_python_inc(plat_specific=1) - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - if isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - # If in a virtualenv, add its include directory - # Issue 16116 - if sys.exec_prefix != sys.base_exec_prefix: - self.include_dirs.append(os.path.join(sys.exec_prefix, 'include')) - - # Put the Python "system" include dir at the end, so that - # any local include dirs take precedence. - self.include_dirs.extend(py_include.split(os.path.pathsep)) - if plat_py_include != py_include: - self.include_dirs.extend( - plat_py_include.split(os.path.pathsep)) - - self.ensure_string_list('libraries') - self.ensure_string_list('link_objects') - - # Life is easier if we're not forever checking for None, so - # simplify these options to empty lists if unset - if self.libraries is None: - self.libraries = [] - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, str): - self.library_dirs = self.library_dirs.split(os.pathsep) - - if self.rpath is None: - self.rpath = [] - elif isinstance(self.rpath, str): - self.rpath = self.rpath.split(os.pathsep) - - # for extensions under windows use different directories - # for Release and Debug builds. - # also Python's library directory must be appended to library_dirs - if os.name == 'nt': - # the 'libs' directory is for binary installs - we assume that - # must be the *native* platform. But we don't really support - # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) - if sys.base_exec_prefix != sys.prefix: # Issue 16116 - self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs')) - if self.debug: - self.build_temp = os.path.join(self.build_temp, "Debug") - else: - self.build_temp = os.path.join(self.build_temp, "Release") - - # Append the source distribution include and library directories, - # this allows distutils on windows to work in the source tree - self.include_dirs.append(os.path.dirname(get_config_h_filename())) - _sys_home = getattr(sys, '_home', None) - if _sys_home: - self.library_dirs.append(_sys_home) - - # Use the .lib files for the correct architecture - if self.plat_name == 'win32': - suffix = 'win32' - else: - # win-amd64 - suffix = self.plat_name[4:] - new_lib = os.path.join(sys.exec_prefix, 'PCbuild') - if suffix: - new_lib = os.path.join(new_lib, suffix) - self.library_dirs.append(new_lib) - - # For extensions under Cygwin, Python's library directory must be - # appended to library_dirs - if sys.platform[:6] == 'cygwin': - if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")): - # building third party extensions - self.library_dirs.append(os.path.join(sys.prefix, "lib", - "python" + get_python_version(), - "config")) - else: - # building python standard extensions - self.library_dirs.append('.') - - # For building extensions with a shared Python library, - # Python's library directory must be appended to library_dirs - # See Issues: #1600860, #4366 - if (sysconfig.get_config_var('Py_ENABLE_SHARED')): - if not sysconfig.python_build: - # building third party extensions - self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) - else: - # building python standard extensions - self.library_dirs.append('.') - - # The argument parsing will result in self.define being a string, but - # it has to be a list of 2-tuples. All the preprocessor symbols - # specified by the 'define' option will be set to '1'. Multiple - # symbols can be separated with commas. - - if self.define: - defines = self.define.split(',') - self.define = [(symbol, '1') for symbol in defines] - - # The option for macros to undefine is also a string from the - # option parsing, but has to be a list. Multiple symbols can also - # be separated with commas here. - if self.undef: - self.undef = self.undef.split(',') - - if self.swig_opts is None: - self.swig_opts = [] - else: - self.swig_opts = self.swig_opts.split(' ') - - # Finally add the user include and library directories if requested - if self.user: - user_include = os.path.join(USER_BASE, "include") - user_lib = os.path.join(USER_BASE, "lib") - if os.path.isdir(user_include): - self.include_dirs.append(user_include) - if os.path.isdir(user_lib): - self.library_dirs.append(user_lib) - self.rpath.append(user_lib) - - if isinstance(self.parallel, str): - try: - self.parallel = int(self.parallel) - except ValueError: - raise DistutilsOptionError("parallel should be an integer") - - def run(self): - from distutils.ccompiler import new_compiler - - # 'self.extensions', as supplied by setup.py, is a list of - # Extension instances. See the documentation for Extension (in - # distutils.extension) for details. - # - # For backwards compatibility with Distutils 0.8.2 and earlier, we - # also allow the 'extensions' list to be a list of tuples: - # (ext_name, build_info) - # where build_info is a dictionary containing everything that - # Extension instances do except the name, with a few things being - # differently named. We convert these 2-tuples to Extension - # instances as needed. - - if not self.extensions: - return - - # If we were asked to build any C/C++ libraries, make sure that the - # directory where we put them is in the library search path for - # linking extensions. - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.libraries.extend(build_clib.get_library_names() or []) - self.library_dirs.append(build_clib.build_clib) - - # Setup the CCompiler object that we'll use to do all the - # compiling and linking - self.compiler = new_compiler(compiler=self.compiler, - verbose=self.verbose, - dry_run=self.dry_run, - force=self.force) - customize_compiler(self.compiler) - # If we are cross-compiling, init the compiler now (if we are not - # cross-compiling, init would not hurt, but people may rely on - # late initialization of compiler even if they shouldn't...) - if os.name == 'nt' and self.plat_name != get_platform(): - self.compiler.initialize(self.plat_name) - - # And make sure that any compile/link-related options (which might - # come from the command-line or from the setup script) are set in - # that CCompiler object -- that way, they automatically apply to - # all compiling and linking done here. - if self.include_dirs is not None: - self.compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name, value) in self.define: - self.compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - self.compiler.undefine_macro(macro) - if self.libraries is not None: - self.compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - self.compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - self.compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - self.compiler.set_link_objects(self.link_objects) - - # Now actually compile and link everything. - self.build_extensions() - - def check_extensions_list(self, extensions): - """Ensure that the list of extensions (presumably provided as a - command option 'extensions') is valid, i.e. it is a list of - Extension objects. We also support the old-style list of 2-tuples, - where the tuples are (ext_name, build_info), which are converted to - Extension instances here. - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - if not isinstance(extensions, list): - raise DistutilsSetupError( - "'ext_modules' option must be a list of Extension instances") - - for i, ext in enumerate(extensions): - if isinstance(ext, Extension): - continue # OK! (assume type-checking done - # by Extension constructor) - - if not isinstance(ext, tuple) or len(ext) != 2: - raise DistutilsSetupError( - "each element of 'ext_modules' option must be an " - "Extension instance or 2-tuple") - - ext_name, build_info = ext - - log.warn("old-style (ext_name, build_info) tuple found in " - "ext_modules for extension '%s' " - "-- please convert to Extension instance", ext_name) - - if not (isinstance(ext_name, str) and - extension_name_re.match(ext_name)): - raise DistutilsSetupError( - "first element of each tuple in 'ext_modules' " - "must be the extension name (a string)") - - if not isinstance(build_info, dict): - raise DistutilsSetupError( - "second element of each tuple in 'ext_modules' " - "must be a dictionary (build info)") - - # OK, the (ext_name, build_info) dict is type-safe: convert it - # to an Extension instance. - ext = Extension(ext_name, build_info['sources']) - - # Easy stuff: one-to-one mapping from dict elements to - # instance attributes. - for key in ('include_dirs', 'library_dirs', 'libraries', - 'extra_objects', 'extra_compile_args', - 'extra_link_args'): - val = build_info.get(key) - if val is not None: - setattr(ext, key, val) - - # Medium-easy stuff: same syntax/semantics, different names. - ext.runtime_library_dirs = build_info.get('rpath') - if 'def_file' in build_info: - log.warn("'def_file' element of build info dict " - "no longer supported") - - # Non-trivial stuff: 'macros' split into 'define_macros' - # and 'undef_macros'. - macros = build_info.get('macros') - if macros: - ext.define_macros = [] - ext.undef_macros = [] - for macro in macros: - if not (isinstance(macro, tuple) and len(macro) in (1, 2)): - raise DistutilsSetupError( - "'macros' element of build info dict " - "must be 1- or 2-tuple") - if len(macro) == 1: - ext.undef_macros.append(macro[0]) - elif len(macro) == 2: - ext.define_macros.append(macro) - - extensions[i] = ext - - def get_source_files(self): - self.check_extensions_list(self.extensions) - filenames = [] - - # Wouldn't it be neat if we knew the names of header files too... - for ext in self.extensions: - filenames.extend(ext.sources) - return filenames - - def get_outputs(self): - # Sanity check the 'extensions' list -- can't assume this is being - # done in the same run as a 'build_extensions()' call (in fact, we - # can probably assume that it *isn't*!). - self.check_extensions_list(self.extensions) - - # And build the list of output (built) filenames. Note that this - # ignores the 'inplace' flag, and assumes everything goes in the - # "build" tree. - outputs = [] - for ext in self.extensions: - outputs.append(self.get_ext_fullpath(ext.name)) - return outputs - - def build_extensions(self): - # First, sanity-check the 'extensions' list - self.check_extensions_list(self.extensions) - if self.parallel: - self._build_extensions_parallel() - else: - self._build_extensions_serial() - - def _build_extensions_parallel(self): - workers = self.parallel - if self.parallel is True: - workers = os.cpu_count() # may return None - try: - from concurrent.futures import ThreadPoolExecutor - except ImportError: - workers = None - - if workers is None: - self._build_extensions_serial() - return - - with ThreadPoolExecutor(max_workers=workers) as executor: - futures = [executor.submit(self.build_extension, ext) - for ext in self.extensions] - for ext, fut in zip(self.extensions, futures): - with self._filter_build_errors(ext): - fut.result() - - def _build_extensions_serial(self): - for ext in self.extensions: - with self._filter_build_errors(ext): - self.build_extension(ext) - - @contextlib.contextmanager - def _filter_build_errors(self, ext): - try: - yield - except (CCompilerError, DistutilsError, CompileError) as e: - if not ext.optional: - raise - self.warn('building extension "%s" failed: %s' % - (ext.name, e)) - - def build_extension(self, ext): - sources = ext.sources - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'ext_modules' option (extension '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % ext.name) - # sort to make the resulting .so file build reproducible - sources = sorted(sources) - - ext_path = self.get_ext_fullpath(ext.name) - depends = sources + ext.depends - if not (self.force or newer_group(depends, ext_path, 'newer')): - log.debug("skipping '%s' extension (up-to-date)", ext.name) - return - else: - log.info("building '%s' extension", ext.name) - - # First, scan the sources for SWIG definition files (.i), run - # SWIG on 'em to create .c files, and modify the sources list - # accordingly. - sources = self.swig_sources(sources, ext) - - # Next, compile the source code to object files. - - # XXX not honouring 'define_macros' or 'undef_macros' -- the - # CCompiler API needs to change to accommodate this, and I - # want to do one thing at a time! - - # Two possible sources for extra compiler arguments: - # - 'extra_compile_args' in Extension object - # - CFLAGS environment variable (not particularly - # elegant, but people seem to expect it and I - # guess it's useful) - # The environment variable should take precedence, and - # any sensible compiler will give precedence to later - # command line args. Hence we combine them in order: - extra_args = ext.extra_compile_args or [] - - macros = ext.define_macros[:] - for undef in ext.undef_macros: - macros.append((undef,)) - - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=ext.include_dirs, - debug=self.debug, - extra_postargs=extra_args, - depends=ext.depends) - - # XXX outdated variable, kept here in case third-part code - # needs it. - self._built_objects = objects[:] - - # Now link the object files together into a "shared object" -- - # of course, first we have to figure out all the other things - # that go into the mix. - if ext.extra_objects: - objects.extend(ext.extra_objects) - extra_args = ext.extra_link_args or [] - - # Detect target language, if not provided - language = ext.language or self.compiler.detect_language(sources) - - self.compiler.link_shared_object( - objects, ext_path, - libraries=self.get_libraries(ext), - library_dirs=ext.library_dirs, - runtime_library_dirs=ext.runtime_library_dirs, - extra_postargs=extra_args, - export_symbols=self.get_export_symbols(ext), - debug=self.debug, - build_temp=self.build_temp, - target_lang=language) - - def swig_sources(self, sources, extension): - """Walk the list of source files in 'sources', looking for SWIG - interface (.i) files. Run SWIG on all that are found, and - return a modified 'sources' list with SWIG source files replaced - by the generated C (or C++) files. - """ - new_sources = [] - swig_sources = [] - swig_targets = {} - - # XXX this drops generated C/C++ files into the source tree, which - # is fine for developers who want to distribute the generated - # source -- but there should be an option to put SWIG output in - # the temp dir. - - if self.swig_cpp: - log.warn("--swig-cpp is deprecated - use --swig-opts=-c++") - - if self.swig_cpp or ('-c++' in self.swig_opts) or \ - ('-c++' in extension.swig_opts): - target_ext = '.cpp' - else: - target_ext = '.c' - - for source in sources: - (base, ext) = os.path.splitext(source) - if ext == ".i": # SWIG interface file - new_sources.append(base + '_wrap' + target_ext) - swig_sources.append(source) - swig_targets[source] = new_sources[-1] - else: - new_sources.append(source) - - if not swig_sources: - return new_sources - - swig = self.swig or self.find_swig() - swig_cmd = [swig, "-python"] - swig_cmd.extend(self.swig_opts) - if self.swig_cpp: - swig_cmd.append("-c++") - - # Do not override commandline arguments - if not self.swig_opts: - for o in extension.swig_opts: - swig_cmd.append(o) - - for source in swig_sources: - target = swig_targets[source] - log.info("swigging %s to %s", source, target) - self.spawn(swig_cmd + ["-o", target, source]) - - return new_sources - - def find_swig(self): - """Return the name of the SWIG executable. On Unix, this is - just "swig" -- it should be in the PATH. Tries a bit harder on - Windows. - """ - if os.name == "posix": - return "swig" - elif os.name == "nt": - # Look for SWIG in its standard installation directory on - # Windows (or so I presume!). If we find it there, great; - # if not, act like Unix and assume it's in the PATH. - for vers in ("1.3", "1.2", "1.1"): - fn = os.path.join("c:\\swig%s" % vers, "swig.exe") - if os.path.isfile(fn): - return fn - else: - return "swig.exe" - else: - raise DistutilsPlatformError( - "I don't know how to find (much less run) SWIG " - "on platform '%s'" % os.name) - - # -- Name generators ----------------------------------------------- - # (extension names, filenames, whatever) - def get_ext_fullpath(self, ext_name): - """Returns the path of the filename for a given extension. - - The file is located in `build_lib` or directly in the package - (inplace option). - """ - fullname = self.get_ext_fullname(ext_name) - modpath = fullname.split('.') - filename = self.get_ext_filename(modpath[-1]) - - if not self.inplace: - # no further work needed - # returning : - # build_dir/package/path/filename - filename = os.path.join(*modpath[:-1]+[filename]) - return os.path.join(self.build_lib, filename) - - # the inplace option requires to find the package directory - # using the build_py command for that - package = '.'.join(modpath[0:-1]) - build_py = self.get_finalized_command('build_py') - package_dir = os.path.abspath(build_py.get_package_dir(package)) - - # returning - # package_dir/filename - return os.path.join(package_dir, filename) - - def get_ext_fullname(self, ext_name): - """Returns the fullname of a given extension name. - - Adds the `package.` prefix""" - if self.package is None: - return ext_name - else: - return self.package + '.' + ext_name - - def get_ext_filename(self, ext_name): - r"""Convert the name of an extension (eg. "foo.bar") into the name - of the file from which it will be loaded (eg. "foo/bar.so", or - "foo\bar.pyd"). - """ - from distutils.sysconfig import get_config_var - ext_path = ext_name.split('.') - ext_suffix = get_config_var('EXT_SUFFIX') - return os.path.join(*ext_path) + ext_suffix - - def get_export_symbols(self, ext): - """Return the list of symbols that a shared extension has to - export. This either uses 'ext.export_symbols' or, if it's not - provided, "PyInit_" + module_name. Only relevant on Windows, where - the .pyd file (DLL) must export the module "PyInit_" function. - """ - suffix = '_' + ext.name.split('.')[-1] - try: - # Unicode module name support as defined in PEP-489 - # https://peps.python.org/pep-0489/#export-hook-name - suffix.encode('ascii') - except UnicodeEncodeError: - suffix = 'U' + suffix.encode('punycode').replace(b'-', b'_').decode('ascii') - - initfunc_name = "PyInit" + suffix - if initfunc_name not in ext.export_symbols: - ext.export_symbols.append(initfunc_name) - return ext.export_symbols - - def get_libraries(self, ext): - """Return the list of libraries to link against when building a - shared extension. On most platforms, this is just 'ext.libraries'; - on Windows, we add the Python library (eg. python20.dll). - """ - # The python library is always needed on Windows. For MSVC, this - # is redundant, since the library is mentioned in a pragma in - # pyconfig.h that MSVC groks. The other Windows compilers all seem - # to need it mentioned explicitly, though, so that's what we do. - # Append '_d' to the python import library on debug builds. - if sys.platform == "win32": - from distutils._msvccompiler import MSVCCompiler - if not isinstance(self.compiler, MSVCCompiler): - template = "python%d%d" - if self.debug: - template = template + '_d' - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) - # don't extend ext.libraries, it may be shared with other - # extensions, it is a reference to the original list - return ext.libraries + [pythonlib] - else: - # On Android only the main executable and LD_PRELOADs are considered - # to be RTLD_GLOBAL, all the dependencies of the main executable - # remain RTLD_LOCAL and so the shared libraries must be linked with - # libpython when python is built with a shared python library (issue - # bpo-21536). - # On Cygwin (and if required, other POSIX-like platforms based on - # Windows like MinGW) it is simply necessary that all symbols in - # shared libraries are resolved at link time. - from distutils.sysconfig import get_config_var - link_libpython = False - if get_config_var('Py_ENABLE_SHARED'): - # A native build on an Android device or on Cygwin - if hasattr(sys, 'getandroidapilevel'): - link_libpython = True - elif sys.platform == 'cygwin': - link_libpython = True - elif '_PYTHON_HOST_PLATFORM' in os.environ: - # We are cross-compiling for one of the relevant platforms - if get_config_var('ANDROID_API_LEVEL') != 0: - link_libpython = True - elif get_config_var('MACHDEP') == 'cygwin': - link_libpython = True - - if link_libpython: - ldversion = get_config_var('LDVERSION') - return ext.libraries + ['python' + ldversion] - - return ext.libraries diff --git a/Lib/distutils/command/build_py.py b/Lib/distutils/command/build_py.py deleted file mode 100644 index edc2171cd122dd..00000000000000 --- a/Lib/distutils/command/build_py.py +++ /dev/null @@ -1,416 +0,0 @@ -"""distutils.command.build_py - -Implements the Distutils 'build_py' command.""" - -import os -import importlib.util -import sys -import glob - -from distutils.core import Command -from distutils.errors import * -from distutils.util import convert_path, Mixin2to3 -from distutils import log - -class build_py (Command): - - description = "\"build\" pure Python modules (copy to build directory)" - - user_options = [ - ('build-lib=', 'd', "directory to \"build\" (copy) to"), - ('compile', 'c', "compile .py to .pyc"), - ('no-compile', None, "don't compile .py files [default]"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('force', 'f', "forcibly build everything (ignore file timestamps)"), - ] - - boolean_options = ['compile', 'force'] - negative_opt = {'no-compile' : 'compile'} - - def initialize_options(self): - self.build_lib = None - self.py_modules = None - self.package = None - self.package_data = None - self.package_dir = None - self.compile = 0 - self.optimize = 0 - self.force = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('force', 'force')) - - # Get the distribution options that are aliases for build_py - # options -- list of packages and list of modules. - self.packages = self.distribution.packages - self.py_modules = self.distribution.py_modules - self.package_data = self.distribution.package_data - self.package_dir = {} - if self.distribution.package_dir: - for name, path in self.distribution.package_dir.items(): - self.package_dir[name] = convert_path(path) - self.data_files = self.get_data_files() - - # Ick, copied straight from install_lib.py (fancy_getopt needs a - # type system! Hell, *everything* needs a type system!!!) - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - assert 0 <= self.optimize <= 2 - except (ValueError, AssertionError): - raise DistutilsOptionError("optimize must be 0, 1, or 2") - - def run(self): - # XXX copy_file by default preserves atime and mtime. IMHO this is - # the right thing to do, but perhaps it should be an option -- in - # particular, a site administrator might want installed files to - # reflect the time of installation rather than the last - # modification time before the installed release. - - # XXX copy_file by default preserves mode, which appears to be the - # wrong thing to do: if a file is read-only in the working - # directory, we want it to be installed read/write so that the next - # installation of the same module distribution can overwrite it - # without problems. (This might be a Unix-specific issue.) Thus - # we turn off 'preserve_mode' when copying to the build directory, - # since the build directory is supposed to be exactly what the - # installation will look like (ie. we preserve mode when - # installing). - - # Two options control which modules will be installed: 'packages' - # and 'py_modules'. The former lets us work with whole packages, not - # specifying individual modules at all; the latter is for - # specifying modules one-at-a-time. - - if self.py_modules: - self.build_modules() - if self.packages: - self.build_packages() - self.build_package_data() - - self.byte_compile(self.get_outputs(include_bytecode=0)) - - def get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - data = [] - if not self.packages: - return data - for package in self.packages: - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = 0 - if src_dir: - plen = len(src_dir)+1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append((package, src_dir, build_dir, filenames)) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = [] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern))) - # Files that match more than one pattern are only added once - files.extend([fn for fn in filelist if fn not in files - and os.path.isfile(fn)]) - return files - - def build_package_data(self): - """Copy data files into build directory""" - lastdir = None - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - self.copy_file(os.path.join(src_dir, filename), target, - preserve_mode=False) - - def get_package_dir(self, package): - """Return the directory, relative to the top of the source - distribution, where package 'package' should be found - (at least according to the 'package_dir' option, if any).""" - path = package.split('.') - - if not self.package_dir: - if path: - return os.path.join(*path) - else: - return '' - else: - tail = [] - while path: - try: - pdir = self.package_dir['.'.join(path)] - except KeyError: - tail.insert(0, path[-1]) - del path[-1] - else: - tail.insert(0, pdir) - return os.path.join(*tail) - else: - # Oops, got all the way through 'path' without finding a - # match in package_dir. If package_dir defines a directory - # for the root (nameless) package, then fallback on it; - # otherwise, we might as well have not consulted - # package_dir at all, as we just use the directory implied - # by 'tail' (which should be the same as the original value - # of 'path' at this point). - pdir = self.package_dir.get('') - if pdir is not None: - tail.insert(0, pdir) - - if tail: - return os.path.join(*tail) - else: - return '' - - def check_package(self, package, package_dir): - # Empty dir name means current directory, which we can probably - # assume exists. Also, os.path.exists and isdir don't know about - # my "empty string means current dir" convention, so we have to - # circumvent them. - if package_dir != "": - if not os.path.exists(package_dir): - raise DistutilsFileError( - "package directory '%s' does not exist" % package_dir) - if not os.path.isdir(package_dir): - raise DistutilsFileError( - "supposed package directory '%s' exists, " - "but is not a directory" % package_dir) - - # Require __init__.py for all but the "root package" - if package: - init_py = os.path.join(package_dir, "__init__.py") - if os.path.isfile(init_py): - return init_py - else: - log.warn(("package init file '%s' not found " + - "(or not a regular file)"), init_py) - - # Either not in a package at all (__init__.py not expected), or - # __init__.py doesn't exist -- so don't return the filename. - return None - - def check_module(self, module, module_file): - if not os.path.isfile(module_file): - log.warn("file %s (for module %s) not found", module_file, module) - return False - else: - return True - - def find_package_modules(self, package, package_dir): - self.check_package(package, package_dir) - module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py")) - modules = [] - setup_script = os.path.abspath(self.distribution.script_name) - - for f in module_files: - abs_f = os.path.abspath(f) - if abs_f != setup_script: - module = os.path.splitext(os.path.basename(f))[0] - modules.append((package, module, f)) - else: - self.debug_print("excluding %s" % setup_script) - return modules - - def find_modules(self): - """Finds individually-specified Python modules, ie. those listed by - module name in 'self.py_modules'. Returns a list of tuples (package, - module_base, filename): 'package' is a tuple of the path through - package-space to the module; 'module_base' is the bare (no - packages, no dots) module name, and 'filename' is the path to the - ".py" file (relative to the distribution root) that implements the - module. - """ - # Map package names to tuples of useful info about the package: - # (package_dir, checked) - # package_dir - the directory where we'll find source files for - # this package - # checked - true if we have checked that the package directory - # is valid (exists, contains __init__.py, ... ?) - packages = {} - - # List of (package, module, filename) tuples to return - modules = [] - - # We treat modules-in-packages almost the same as toplevel modules, - # just the "package" for a toplevel is empty (either an empty - # string or empty list, depending on context). Differences: - # - don't check for __init__.py in directory for empty package - for module in self.py_modules: - path = module.split('.') - package = '.'.join(path[0:-1]) - module_base = path[-1] - - try: - (package_dir, checked) = packages[package] - except KeyError: - package_dir = self.get_package_dir(package) - checked = 0 - - if not checked: - init_py = self.check_package(package, package_dir) - packages[package] = (package_dir, 1) - if init_py: - modules.append((package, "__init__", init_py)) - - # XXX perhaps we should also check for just .pyc files - # (so greedy closed-source bastards can distribute Python - # modules too) - module_file = os.path.join(package_dir, module_base + ".py") - if not self.check_module(module, module_file): - continue - - modules.append((package, module_base, module_file)) - - return modules - - def find_all_modules(self): - """Compute the list of all modules that will be built, whether - they are specified one-module-at-a-time ('self.py_modules') or - by whole packages ('self.packages'). Return a list of tuples - (package, module, module_file), just like 'find_modules()' and - 'find_package_modules()' do.""" - modules = [] - if self.py_modules: - modules.extend(self.find_modules()) - if self.packages: - for package in self.packages: - package_dir = self.get_package_dir(package) - m = self.find_package_modules(package, package_dir) - modules.extend(m) - return modules - - def get_source_files(self): - return [module[-1] for module in self.find_all_modules()] - - def get_module_outfile(self, build_dir, package, module): - outfile_path = [build_dir] + list(package) + [module + ".py"] - return os.path.join(*outfile_path) - - def get_outputs(self, include_bytecode=1): - modules = self.find_all_modules() - outputs = [] - for (package, module, module_file) in modules: - package = package.split('.') - filename = self.get_module_outfile(self.build_lib, package, module) - outputs.append(filename) - if include_bytecode: - if self.compile: - outputs.append(importlib.util.cache_from_source( - filename, optimization='')) - if self.optimize > 0: - outputs.append(importlib.util.cache_from_source( - filename, optimization=self.optimize)) - - outputs += [ - os.path.join(build_dir, filename) - for package, src_dir, build_dir, filenames in self.data_files - for filename in filenames - ] - - return outputs - - def build_module(self, module, module_file, package): - if isinstance(package, str): - package = package.split('.') - elif not isinstance(package, (list, tuple)): - raise TypeError( - "'package' must be a string (dot-separated), list, or tuple") - - # Now put the module source file into the "build" area -- this is - # easy, we just copy it somewhere under self.build_lib (the build - # directory for Python source). - outfile = self.get_module_outfile(self.build_lib, package, module) - dir = os.path.dirname(outfile) - self.mkpath(dir) - return self.copy_file(module_file, outfile, preserve_mode=0) - - def build_modules(self): - modules = self.find_modules() - for (package, module, module_file) in modules: - # Now "build" the module -- ie. copy the source file to - # self.build_lib (the build directory for Python source). - # (Actually, it gets copied to the directory for this package - # under self.build_lib.) - self.build_module(module, module_file, package) - - def build_packages(self): - for package in self.packages: - # Get list of (package, module, module_file) tuples based on - # scanning the package directory. 'package' is only included - # in the tuple so that 'find_modules()' and - # 'find_package_tuples()' have a consistent interface; it's - # ignored here (apart from a sanity check). Also, 'module' is - # the *unqualified* module name (ie. no dots, no package -- we - # already know its package!), and 'module_file' is the path to - # the .py file, relative to the current directory - # (ie. including 'package_dir'). - package_dir = self.get_package_dir(package) - modules = self.find_package_modules(package, package_dir) - - # Now loop over the modules we found, "building" each one (just - # copy it to self.build_lib). - for (package_, module, module_file) in modules: - assert package == package_ - self.build_module(module, module_file, package) - - def byte_compile(self, files): - if sys.dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - prefix = self.build_lib - if prefix[-1] != os.sep: - prefix = prefix + os.sep - - # XXX this code is essentially the same as the 'byte_compile() - # method of the "install_lib" command, except for the determination - # of the 'prefix' string. Hmmm. - if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=prefix, dry_run=self.dry_run) - if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=prefix, dry_run=self.dry_run) - -class build_py_2to3(build_py, Mixin2to3): - def run(self): - self.updated_files = [] - - # Base class code - if self.py_modules: - self.build_modules() - if self.packages: - self.build_packages() - self.build_package_data() - - # 2to3 - self.run_2to3(self.updated_files) - - # Remaining base class code - self.byte_compile(self.get_outputs(include_bytecode=0)) - - def build_module(self, module, module_file, package): - res = build_py.build_module(self, module, module_file, package) - if res[1]: - # file was copied - self.updated_files.append(res[0]) - return res diff --git a/Lib/distutils/command/build_scripts.py b/Lib/distutils/command/build_scripts.py deleted file mode 100644 index ccc70e6465016e..00000000000000 --- a/Lib/distutils/command/build_scripts.py +++ /dev/null @@ -1,160 +0,0 @@ -"""distutils.command.build_scripts - -Implements the Distutils 'build_scripts' command.""" - -import os, re -from stat import ST_MODE -from distutils import sysconfig -from distutils.core import Command -from distutils.dep_util import newer -from distutils.util import convert_path, Mixin2to3 -from distutils import log -import tokenize - -# check if Python is called on the first line with this expression -first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') - -class build_scripts(Command): - - description = "\"build\" scripts (copy and fixup #! line)" - - user_options = [ - ('build-dir=', 'd', "directory to \"build\" (copy) to"), - ('force', 'f', "forcibly build everything (ignore file timestamps"), - ('executable=', 'e', "specify final destination interpreter path"), - ] - - boolean_options = ['force'] - - - def initialize_options(self): - self.build_dir = None - self.scripts = None - self.force = None - self.executable = None - self.outfiles = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_scripts', 'build_dir'), - ('force', 'force'), - ('executable', 'executable')) - self.scripts = self.distribution.scripts - - def get_source_files(self): - return self.scripts - - def run(self): - if not self.scripts: - return - self.copy_scripts() - - - def copy_scripts(self): - r"""Copy each script listed in 'self.scripts'; if it's marked as a - Python script in the Unix way (first line matches 'first_line_re', - ie. starts with "\#!" and contains "python"), then adjust the first - line to refer to the current Python interpreter as we copy. - """ - self.mkpath(self.build_dir) - outfiles = [] - updated_files = [] - for script in self.scripts: - adjust = False - script = convert_path(script) - outfile = os.path.join(self.build_dir, os.path.basename(script)) - outfiles.append(outfile) - - if not self.force and not newer(script, outfile): - log.debug("not copying %s (up-to-date)", script) - continue - - # Always open the file, but ignore failures in dry-run mode -- - # that way, we'll get accurate feedback if we can read the - # script. - try: - f = open(script, "rb") - except OSError: - if not self.dry_run: - raise - f = None - else: - encoding, lines = tokenize.detect_encoding(f.readline) - f.seek(0) - first_line = f.readline() - if not first_line: - self.warn("%s is an empty file (skipping)" % script) - continue - - match = first_line_re.match(first_line) - if match: - adjust = True - post_interp = match.group(1) or b'' - - if adjust: - log.info("copying and adjusting %s -> %s", script, - self.build_dir) - updated_files.append(outfile) - if not self.dry_run: - if not sysconfig.python_build: - executable = self.executable - else: - executable = os.path.join( - sysconfig.get_config_var("BINDIR"), - "python%s%s" % (sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"))) - executable = os.fsencode(executable) - shebang = b"#!" + executable + post_interp + b"\n" - # Python parser starts to read a script using UTF-8 until - # it gets a #coding:xxx cookie. The shebang has to be the - # first line of a file, the #coding:xxx cookie cannot be - # written before. So the shebang has to be decodable from - # UTF-8. - try: - shebang.decode('utf-8') - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from utf-8".format(shebang)) - # If the script is encoded to a custom encoding (use a - # #coding:xxx cookie), the shebang has to be decodable from - # the script encoding too. - try: - shebang.decode(encoding) - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from the script encoding ({})" - .format(shebang, encoding)) - with open(outfile, "wb") as outf: - outf.write(shebang) - outf.writelines(f.readlines()) - if f: - f.close() - else: - if f: - f.close() - updated_files.append(outfile) - self.copy_file(script, outfile) - - if os.name == 'posix': - for file in outfiles: - if self.dry_run: - log.info("changing mode of %s", file) - else: - oldmode = os.stat(file)[ST_MODE] & 0o7777 - newmode = (oldmode | 0o555) & 0o7777 - if newmode != oldmode: - log.info("changing mode of %s from %o to %o", - file, oldmode, newmode) - os.chmod(file, newmode) - # XXX should we modify self.outfiles? - return outfiles, updated_files - -class build_scripts_2to3(build_scripts, Mixin2to3): - - def copy_scripts(self): - outfiles, updated_files = build_scripts.copy_scripts(self) - if not self.dry_run: - self.run_2to3(updated_files) - return outfiles, updated_files diff --git a/Lib/distutils/command/check.py b/Lib/distutils/command/check.py deleted file mode 100644 index 73a30f3afd84a3..00000000000000 --- a/Lib/distutils/command/check.py +++ /dev/null @@ -1,148 +0,0 @@ -"""distutils.command.check - -Implements the Distutils 'check' command. -""" -from distutils.core import Command -from distutils.errors import DistutilsSetupError - -try: - # docutils is installed - from docutils.utils import Reporter - from docutils.parsers.rst import Parser - from docutils import frontend - from docutils import nodes - - class SilentReporter(Reporter): - - def __init__(self, source, report_level, halt_level, stream=None, - debug=0, encoding='ascii', error_handler='replace'): - self.messages = [] - Reporter.__init__(self, source, report_level, halt_level, stream, - debug, encoding, error_handler) - - def system_message(self, level, message, *children, **kwargs): - self.messages.append((level, message, children, kwargs)) - return nodes.system_message(message, level=level, - type=self.levels[level], - *children, **kwargs) - - HAS_DOCUTILS = True -except Exception: - # Catch all exceptions because exceptions besides ImportError probably - # indicate that docutils is not ported to Py3k. - HAS_DOCUTILS = False - -class check(Command): - """This command checks the meta-data of the package. - """ - description = ("perform some checks on the package") - user_options = [('metadata', 'm', 'Verify meta-data'), - ('restructuredtext', 'r', - ('Checks if long string meta-data syntax ' - 'are reStructuredText-compliant')), - ('strict', 's', - 'Will exit with an error if a check fails')] - - boolean_options = ['metadata', 'restructuredtext', 'strict'] - - def initialize_options(self): - """Sets default values for options.""" - self.restructuredtext = 0 - self.metadata = 1 - self.strict = 0 - self._warnings = 0 - - def finalize_options(self): - pass - - def warn(self, msg): - """Counts the number of warnings that occurs.""" - self._warnings += 1 - return Command.warn(self, msg) - - def run(self): - """Runs the command.""" - # perform the various tests - if self.metadata: - self.check_metadata() - if self.restructuredtext: - if HAS_DOCUTILS: - self.check_restructuredtext() - elif self.strict: - raise DistutilsSetupError('The docutils package is needed.') - - # let's raise an error in strict mode, if we have at least - # one warning - if self.strict and self._warnings > 0: - raise DistutilsSetupError('Please correct your package.') - - def check_metadata(self): - """Ensures that all required elements of meta-data are supplied. - - Required fields: - name, version, URL - - Recommended fields: - (author and author_email) or (maintainer and maintainer_email) - - Warns if any are missing. - """ - metadata = self.distribution.metadata - - missing = [] - for attr in ('name', 'version', 'url'): - if not (hasattr(metadata, attr) and getattr(metadata, attr)): - missing.append(attr) - - if missing: - self.warn("missing required meta-data: %s" % ', '.join(missing)) - if metadata.author: - if not metadata.author_email: - self.warn("missing meta-data: if 'author' supplied, " + - "'author_email' should be supplied too") - elif metadata.maintainer: - if not metadata.maintainer_email: - self.warn("missing meta-data: if 'maintainer' supplied, " + - "'maintainer_email' should be supplied too") - else: - self.warn("missing meta-data: either (author and author_email) " + - "or (maintainer and maintainer_email) " + - "should be supplied") - - def check_restructuredtext(self): - """Checks if the long string fields are reST-compliant.""" - data = self.distribution.get_long_description() - for warning in self._check_rst_data(data): - line = warning[-1].get('line') - if line is None: - warning = warning[1] - else: - warning = '%s (line %s)' % (warning[1], line) - self.warn(warning) - - def _check_rst_data(self, data): - """Returns warnings when the provided data doesn't compile.""" - # the include and csv_table directives need this to be a path - source_path = self.distribution.script_name or 'setup.py' - parser = Parser() - settings = frontend.OptionParser(components=(Parser,)).get_default_values() - settings.tab_width = 4 - settings.pep_references = None - settings.rfc_references = None - reporter = SilentReporter(source_path, - settings.report_level, - settings.halt_level, - stream=settings.warning_stream, - debug=settings.debug, - encoding=settings.error_encoding, - error_handler=settings.error_encoding_error_handler) - - document = nodes.document(settings, reporter, source=source_path) - document.note_source(source_path, -1) - try: - parser.parse(data, document) - except AttributeError as e: - reporter.messages.append( - (-1, 'Could not finish the parsing: %s.' % e, '', {})) - - return reporter.messages diff --git a/Lib/distutils/command/clean.py b/Lib/distutils/command/clean.py deleted file mode 100644 index 0cb270166211fe..00000000000000 --- a/Lib/distutils/command/clean.py +++ /dev/null @@ -1,76 +0,0 @@ -"""distutils.command.clean - -Implements the Distutils 'clean' command.""" - -# contributed by Bastian Kleineidam , added 2000-03-18 - -import os -from distutils.core import Command -from distutils.dir_util import remove_tree -from distutils import log - -class clean(Command): - - description = "clean up temporary files from 'build' command" - user_options = [ - ('build-base=', 'b', - "base build directory (default: 'build.build-base')"), - ('build-lib=', None, - "build directory for all modules (default: 'build.build-lib')"), - ('build-temp=', 't', - "temporary build directory (default: 'build.build-temp')"), - ('build-scripts=', None, - "build directory for scripts (default: 'build.build-scripts')"), - ('bdist-base=', None, - "temporary directory for built distributions"), - ('all', 'a', - "remove all build output, not just temporary by-products") - ] - - boolean_options = ['all'] - - def initialize_options(self): - self.build_base = None - self.build_lib = None - self.build_temp = None - self.build_scripts = None - self.bdist_base = None - self.all = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib'), - ('build_scripts', 'build_scripts'), - ('build_temp', 'build_temp')) - self.set_undefined_options('bdist', - ('bdist_base', 'bdist_base')) - - def run(self): - # remove the build/temp. directory (unless it's already - # gone) - if os.path.exists(self.build_temp): - remove_tree(self.build_temp, dry_run=self.dry_run) - else: - log.debug("'%s' does not exist -- can't clean it", - self.build_temp) - - if self.all: - # remove build directories - for directory in (self.build_lib, - self.bdist_base, - self.build_scripts): - if os.path.exists(directory): - remove_tree(directory, dry_run=self.dry_run) - else: - log.warn("'%s' does not exist -- can't clean it", - directory) - - # just for the heck of it, try to remove the base build directory: - # we might have emptied it right now, but if not we don't care - if not self.dry_run: - try: - os.rmdir(self.build_base) - log.info("removing '%s'", self.build_base) - except OSError: - pass diff --git a/Lib/distutils/command/command_template b/Lib/distutils/command/command_template deleted file mode 100644 index 6106819db843b5..00000000000000 --- a/Lib/distutils/command/command_template +++ /dev/null @@ -1,33 +0,0 @@ -"""distutils.command.x - -Implements the Distutils 'x' command. -""" - -# created 2000/mm/dd, John Doe - -__revision__ = "$Id$" - -from distutils.core import Command - - -class x(Command): - - # Brief (40-50 characters) description of the command - description = "" - - # List of option tuples: long name, short name (None if no short - # name), and help string. - user_options = [('', '', - ""), - ] - - def initialize_options(self): - self. = None - self. = None - self. = None - - def finalize_options(self): - if self.x is None: - self.x = - - def run(self): diff --git a/Lib/distutils/command/config.py b/Lib/distutils/command/config.py deleted file mode 100644 index aeda408e731979..00000000000000 --- a/Lib/distutils/command/config.py +++ /dev/null @@ -1,344 +0,0 @@ -"""distutils.command.config - -Implements the Distutils 'config' command, a (mostly) empty command class -that exists mainly to be sub-classed by specific module distributions and -applications. The idea is that while every "config" command is different, -at least they're all named the same, and users always see "config" in the -list of standard commands. Also, this is a good place to put common -configure-like tasks: "try to compile this C code", or "figure out where -this header file lives". -""" - -import os, re - -from distutils.core import Command -from distutils.errors import DistutilsExecError -from distutils.sysconfig import customize_compiler -from distutils import log - -LANG_EXT = {"c": ".c", "c++": ".cxx"} - -class config(Command): - - description = "prepare to build" - - user_options = [ - ('compiler=', None, - "specify the compiler type"), - ('cc=', None, - "specify the compiler executable"), - ('include-dirs=', 'I', - "list of directories to search for header files"), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries"), - - ('noisy', None, - "show every action (compile, link, run, ...) taken"), - ('dump-source', None, - "dump generated source files before attempting to compile them"), - ] - - - # The three standard command methods: since the "config" command - # does nothing by default, these are empty. - - def initialize_options(self): - self.compiler = None - self.cc = None - self.include_dirs = None - self.libraries = None - self.library_dirs = None - - # maximal output for now - self.noisy = 1 - self.dump_source = 1 - - # list of temporary files generated along-the-way that we have - # to clean at some point - self.temp_files = [] - - def finalize_options(self): - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - elif isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - if self.libraries is None: - self.libraries = [] - elif isinstance(self.libraries, str): - self.libraries = [self.libraries] - - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, str): - self.library_dirs = self.library_dirs.split(os.pathsep) - - def run(self): - pass - - # Utility methods for actual "config" commands. The interfaces are - # loosely based on Autoconf macros of similar names. Sub-classes - # may use these freely. - - def _check_compiler(self): - """Check that 'self.compiler' really is a CCompiler object; - if not, make it one. - """ - # We do this late, and only on-demand, because this is an expensive - # import. - from distutils.ccompiler import CCompiler, new_compiler - if not isinstance(self.compiler, CCompiler): - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, force=1) - customize_compiler(self.compiler) - if self.include_dirs: - self.compiler.set_include_dirs(self.include_dirs) - if self.libraries: - self.compiler.set_libraries(self.libraries) - if self.library_dirs: - self.compiler.set_library_dirs(self.library_dirs) - - def _gen_temp_sourcefile(self, body, headers, lang): - filename = "_configtest" + LANG_EXT[lang] - with open(filename, "w") as file: - if headers: - for header in headers: - file.write("#include <%s>\n" % header) - file.write("\n") - file.write(body) - if body[-1] != "\n": - file.write("\n") - return filename - - def _preprocess(self, body, headers, include_dirs, lang): - src = self._gen_temp_sourcefile(body, headers, lang) - out = "_configtest.i" - self.temp_files.extend([src, out]) - self.compiler.preprocess(src, out, include_dirs=include_dirs) - return (src, out) - - def _compile(self, body, headers, include_dirs, lang): - src = self._gen_temp_sourcefile(body, headers, lang) - if self.dump_source: - dump_file(src, "compiling '%s':" % src) - (obj,) = self.compiler.object_filenames([src]) - self.temp_files.extend([src, obj]) - self.compiler.compile([src], include_dirs=include_dirs) - return (src, obj) - - def _link(self, body, headers, include_dirs, libraries, library_dirs, - lang): - (src, obj) = self._compile(body, headers, include_dirs, lang) - prog = os.path.splitext(os.path.basename(src))[0] - self.compiler.link_executable([obj], prog, - libraries=libraries, - library_dirs=library_dirs, - target_lang=lang) - - if self.compiler.exe_extension is not None: - prog = prog + self.compiler.exe_extension - self.temp_files.append(prog) - - return (src, obj, prog) - - def _clean(self, *filenames): - if not filenames: - filenames = self.temp_files - self.temp_files = [] - log.info("removing: %s", ' '.join(filenames)) - for filename in filenames: - try: - os.remove(filename) - except OSError: - pass - - - # XXX these ignore the dry-run flag: what to do, what to do? even if - # you want a dry-run build, you still need some sort of configuration - # info. My inclination is to make it up to the real config command to - # consult 'dry_run', and assume a default (minimal) configuration if - # true. The problem with trying to do it here is that you'd have to - # return either true or false from all the 'try' methods, neither of - # which is correct. - - # XXX need access to the header search path and maybe default macros. - - def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"): - """Construct a source file from 'body' (a string containing lines - of C/C++ code) and 'headers' (a list of header files to include) - and run it through the preprocessor. Return true if the - preprocessor succeeded, false if there were any errors. - ('body' probably isn't of much use, but what the heck.) - """ - from distutils.ccompiler import CompileError - self._check_compiler() - ok = True - try: - self._preprocess(body, headers, include_dirs, lang) - except CompileError: - ok = False - - self._clean() - return ok - - def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, - lang="c"): - """Construct a source file (just like 'try_cpp()'), run it through - the preprocessor, and return true if any line of the output matches - 'pattern'. 'pattern' should either be a compiled regex object or a - string containing a regex. If both 'body' and 'headers' are None, - preprocesses an empty file -- which can be useful to determine the - symbols the preprocessor and compiler set by default. - """ - self._check_compiler() - src, out = self._preprocess(body, headers, include_dirs, lang) - - if isinstance(pattern, str): - pattern = re.compile(pattern) - - with open(out) as file: - match = False - while True: - line = file.readline() - if line == '': - break - if pattern.search(line): - match = True - break - - self._clean() - return match - - def try_compile(self, body, headers=None, include_dirs=None, lang="c"): - """Try to compile a source file built from 'body' and 'headers'. - Return true on success, false otherwise. - """ - from distutils.ccompiler import CompileError - self._check_compiler() - try: - self._compile(body, headers, include_dirs, lang) - ok = True - except CompileError: - ok = False - - log.info(ok and "success!" or "failure.") - self._clean() - return ok - - def try_link(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): - """Try to compile and link a source file, built from 'body' and - 'headers', to executable form. Return true on success, false - otherwise. - """ - from distutils.ccompiler import CompileError, LinkError - self._check_compiler() - try: - self._link(body, headers, include_dirs, - libraries, library_dirs, lang) - ok = True - except (CompileError, LinkError): - ok = False - - log.info(ok and "success!" or "failure.") - self._clean() - return ok - - def try_run(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): - """Try to compile, link to an executable, and run a program - built from 'body' and 'headers'. Return true on success, false - otherwise. - """ - from distutils.ccompiler import CompileError, LinkError - self._check_compiler() - try: - src, obj, exe = self._link(body, headers, include_dirs, - libraries, library_dirs, lang) - self.spawn([exe]) - ok = True - except (CompileError, LinkError, DistutilsExecError): - ok = False - - log.info(ok and "success!" or "failure.") - self._clean() - return ok - - - # -- High-level methods -------------------------------------------- - # (these are the ones that are actually likely to be useful - # when implementing a real-world config command!) - - def check_func(self, func, headers=None, include_dirs=None, - libraries=None, library_dirs=None, decl=0, call=0): - """Determine if function 'func' is available by constructing a - source file that refers to 'func', and compiles and links it. - If everything succeeds, returns true; otherwise returns false. - - The constructed source file starts out by including the header - files listed in 'headers'. If 'decl' is true, it then declares - 'func' (as "int func()"); you probably shouldn't supply 'headers' - and set 'decl' true in the same call, or you might get errors about - a conflicting declarations for 'func'. Finally, the constructed - 'main()' function either references 'func' or (if 'call' is true) - calls it. 'libraries' and 'library_dirs' are used when - linking. - """ - self._check_compiler() - body = [] - if decl: - body.append("int %s ();" % func) - body.append("int main () {") - if call: - body.append(" %s();" % func) - else: - body.append(" %s;" % func) - body.append("}") - body = "\n".join(body) + "\n" - - return self.try_link(body, headers, include_dirs, - libraries, library_dirs) - - def check_lib(self, library, library_dirs=None, headers=None, - include_dirs=None, other_libraries=[]): - """Determine if 'library' is available to be linked against, - without actually checking that any particular symbols are provided - by it. 'headers' will be used in constructing the source file to - be compiled, but the only effect of this is to check if all the - header files listed are available. Any libraries listed in - 'other_libraries' will be included in the link, in case 'library' - has symbols that depend on other libraries. - """ - self._check_compiler() - return self.try_link("int main (void) { }", headers, include_dirs, - [library] + other_libraries, library_dirs) - - def check_header(self, header, include_dirs=None, library_dirs=None, - lang="c"): - """Determine if the system header file named by 'header_file' - exists and can be found by the preprocessor; return true if so, - false otherwise. - """ - return self.try_cpp(body="/* No body */", headers=[header], - include_dirs=include_dirs) - -def dump_file(filename, head=None): - """Dumps a file content into log.info. - - If head is not None, will be dumped before the file content. - """ - if head is None: - log.info('%s', filename) - else: - log.info(head) - file = open(filename) - try: - log.info(file.read()) - finally: - file.close() diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py deleted file mode 100644 index a22a5d094d7678..00000000000000 --- a/Lib/distutils/command/install.py +++ /dev/null @@ -1,679 +0,0 @@ -"""distutils.command.install - -Implements the Distutils 'install' command.""" - -import sys -import sysconfig -import os -import re - -from distutils import log -from distutils.core import Command -from distutils.debug import DEBUG -from distutils.sysconfig import get_config_vars -from distutils.errors import DistutilsPlatformError -from distutils.file_util import write_file -from distutils.util import convert_path, subst_vars, change_root -from distutils.util import get_platform -from distutils.errors import DistutilsOptionError - -from site import USER_BASE -from site import USER_SITE - -HAS_USER_SITE = (USER_SITE is not None) - -# The keys to an installation scheme; if any new types of files are to be -# installed, be sure to add an entry to every scheme in -# sysconfig._INSTALL_SCHEMES, and to SCHEME_KEYS here. -SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data') - -# The following code provides backward-compatible INSTALL_SCHEMES -# while making the sysconfig module the single point of truth. -# This makes it easier for OS distributions where they need to -# alter locations for packages installations in a single place. -# Note that this module is deprecated (PEP 632); all consumers -# of this information should switch to using sysconfig directly. -INSTALL_SCHEMES = {"unix_prefix": {}, "unix_home": {}, "nt": {}} - -# Copy from sysconfig._INSTALL_SCHEMES -for key in SCHEME_KEYS: - for distutils_scheme_name, sys_scheme_name in ( - ("unix_prefix", "posix_prefix"), ("unix_home", "posix_home"), - ("nt", "nt")): - sys_key = key - sys_scheme = sysconfig._INSTALL_SCHEMES[sys_scheme_name] - if key == "headers" and key not in sys_scheme: - # On POSIX-y platforms, Python will: - # - Build from .h files in 'headers' (only there when - # building CPython) - # - Install .h files to 'include' - # When 'headers' is missing, fall back to 'include' - sys_key = 'include' - INSTALL_SCHEMES[distutils_scheme_name][key] = sys_scheme[sys_key] - -# Transformation to different template format -for main_key in INSTALL_SCHEMES: - for key, value in INSTALL_SCHEMES[main_key].items(): - # Change all ocurences of {variable} to $variable - value = re.sub(r"\{(.+?)\}", r"$\g<1>", value) - value = value.replace("$installed_base", "$base") - value = value.replace("$py_version_nodot_plat", "$py_version_nodot") - if key == "headers": - value += "/$dist_name" - if sys.version_info >= (3, 9) and key == "platlib": - # platlibdir is available since 3.9: bpo-1294959 - value = value.replace("/lib/", "/$platlibdir/") - INSTALL_SCHEMES[main_key][key] = value - -# The following part of INSTALL_SCHEMES has a different definition -# than the one in sysconfig, but because both depend on the site module, -# the outcomes should be the same. -if HAS_USER_SITE: - INSTALL_SCHEMES['nt_user'] = { - 'purelib': '$usersite', - 'platlib': '$usersite', - 'headers': '$userbase/Python$py_version_nodot/Include/$dist_name', - 'scripts': '$userbase/Python$py_version_nodot/Scripts', - 'data' : '$userbase', - } - - INSTALL_SCHEMES['unix_user'] = { - 'purelib': '$usersite', - 'platlib': '$usersite', - 'headers': - '$userbase/include/python$py_version_short$abiflags/$dist_name', - 'scripts': '$userbase/bin', - 'data' : '$userbase', - } - - -class install(Command): - - description = "install everything from build directory" - - user_options = [ - # Select installation scheme and set base director(y|ies) - ('prefix=', None, - "installation prefix"), - ('exec-prefix=', None, - "(Unix only) prefix for platform-specific files"), - ('home=', None, - "(Unix only) home directory to install under"), - - # Or, just set the base director(y|ies) - ('install-base=', None, - "base installation directory (instead of --prefix or --home)"), - ('install-platbase=', None, - "base installation directory for platform-specific files " + - "(instead of --exec-prefix or --home)"), - ('root=', None, - "install everything relative to this alternate root directory"), - - # Or, explicitly set the installation scheme - ('install-purelib=', None, - "installation directory for pure Python module distributions"), - ('install-platlib=', None, - "installation directory for non-pure module distributions"), - ('install-lib=', None, - "installation directory for all module distributions " + - "(overrides --install-purelib and --install-platlib)"), - - ('install-headers=', None, - "installation directory for C/C++ headers"), - ('install-scripts=', None, - "installation directory for Python scripts"), - ('install-data=', None, - "installation directory for data files"), - - # Byte-compilation options -- see install_lib.py for details, as - # these are duplicated from there (but only install_lib does - # anything with them). - ('compile', 'c', "compile .py to .pyc [default]"), - ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - - # Miscellaneous control options - ('force', 'f', - "force installation (overwrite any existing files)"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - - # Where to install documentation (eventually!) - #('doc-format=', None, "format of documentation to generate"), - #('install-man=', None, "directory for Unix man pages"), - #('install-html=', None, "directory for HTML documentation"), - #('install-info=', None, "directory for GNU info files"), - - ('record=', None, - "filename in which to record list of installed files"), - ] - - boolean_options = ['compile', 'force', 'skip-build'] - - if HAS_USER_SITE: - user_options.append(('user', None, - "install in user site-package '%s'" % USER_SITE)) - boolean_options.append('user') - - negative_opt = {'no-compile' : 'compile'} - - - def initialize_options(self): - """Initializes options.""" - # High-level options: these select both an installation base - # and scheme. - self.prefix = None - self.exec_prefix = None - self.home = None - self.user = 0 - - # These select only the installation base; it's up to the user to - # specify the installation scheme (currently, that means supplying - # the --install-{platlib,purelib,scripts,data} options). - self.install_base = None - self.install_platbase = None - self.root = None - - # These options are the actual installation directories; if not - # supplied by the user, they are filled in using the installation - # scheme implied by prefix/exec-prefix/home and the contents of - # that installation scheme. - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - if HAS_USER_SITE: - self.install_userbase = USER_BASE - self.install_usersite = USER_SITE - - self.compile = None - self.optimize = None - - # Deprecated - # These two are for putting non-packagized distributions into their - # own directory and creating a .pth file if it makes sense. - # 'extra_path' comes from the setup file; 'install_path_file' can - # be turned off if it makes no sense to install a .pth file. (But - # better to install it uselessly than to guess wrong and not - # install it when it's necessary and would be used!) Currently, - # 'install_path_file' is always true unless some outsider meddles - # with it. - self.extra_path = None - self.install_path_file = 1 - - # 'force' forces installation, even if target files are not - # out-of-date. 'skip_build' skips running the "build" command, - # handy if you know it's not necessary. 'warn_dir' (which is *not* - # a user option, it's just there so the bdist_* commands can turn - # it off) determines whether we warn about installing to a - # directory not in sys.path. - self.force = 0 - self.skip_build = 0 - self.warn_dir = 1 - - # These are only here as a conduit from the 'build' command to the - # 'install_*' commands that do the real work. ('build_base' isn't - # actually used anywhere, but it might be useful in future.) They - # are not user options, because if the user told the install - # command where the build directory is, that wouldn't affect the - # build command. - self.build_base = None - self.build_lib = None - - # Not defined yet because we don't know anything about - # documentation yet. - #self.install_man = None - #self.install_html = None - #self.install_info = None - - self.record = None - - - # -- Option finalizing methods ------------------------------------- - # (This is rather more involved than for most commands, - # because this is where the policy for installing third- - # party Python modules on various platforms given a wide - # array of user input is decided. Yes, it's quite complex!) - - def finalize_options(self): - """Finalizes options.""" - # This method (and its helpers, like 'finalize_unix()', - # 'finalize_other()', and 'select_scheme()') is where the default - # installation directories for modules, extension modules, and - # anything else we care to install from a Python module - # distribution. Thus, this code makes a pretty important policy - # statement about how third-party stuff is added to a Python - # installation! Note that the actual work of installation is done - # by the relatively simple 'install_*' commands; they just take - # their orders from the installation directory options determined - # here. - - # Check for errors/inconsistencies in the options; first, stuff - # that's wrong on any platform. - - if ((self.prefix or self.exec_prefix or self.home) and - (self.install_base or self.install_platbase)): - raise DistutilsOptionError( - "must supply either prefix/exec-prefix/home or " + - "install-base/install-platbase -- not both") - - if self.home and (self.prefix or self.exec_prefix): - raise DistutilsOptionError( - "must supply either home or prefix/exec-prefix -- not both") - - if self.user and (self.prefix or self.exec_prefix or self.home or - self.install_base or self.install_platbase): - raise DistutilsOptionError("can't combine user with prefix, " - "exec_prefix/home, or install_(plat)base") - - # Next, stuff that's wrong (or dubious) only on certain platforms. - if os.name != "posix": - if self.exec_prefix: - self.warn("exec-prefix option ignored on this platform") - self.exec_prefix = None - - # Now the interesting logic -- so interesting that we farm it out - # to other methods. The goal of these methods is to set the final - # values for the install_{lib,scripts,data,...} options, using as - # input a heady brew of prefix, exec_prefix, home, install_base, - # install_platbase, user-supplied versions of - # install_{purelib,platlib,lib,scripts,data,...}, and the - # INSTALL_SCHEME dictionary above. Phew! - - self.dump_dirs("pre-finalize_{unix,other}") - - if os.name == 'posix': - self.finalize_unix() - else: - self.finalize_other() - - self.dump_dirs("post-finalize_{unix,other}()") - - # Expand configuration variables, tilde, etc. in self.install_base - # and self.install_platbase -- that way, we can use $base or - # $platbase in the other installation directories and not worry - # about needing recursive variable expansion (shudder). - - py_version = sys.version.split()[0] - (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix') - try: - abiflags = sys.abiflags - except AttributeError: - # sys.abiflags may not be defined on all platforms. - abiflags = '' - self.config_vars = {'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': '%d.%d' % sys.version_info[:2], - 'py_version_nodot': '%d%d' % sys.version_info[:2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - 'abiflags': abiflags, - 'platlibdir': sys.platlibdir, - } - - if HAS_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite - - if sysconfig.is_python_build(): - self.config_vars['srcdir'] = sysconfig.get_config_var('srcdir') - - self.expand_basedirs() - - self.dump_dirs("post-expand_basedirs()") - - # Now define config vars for the base directories so we can expand - # everything else. - self.config_vars['base'] = self.install_base - self.config_vars['platbase'] = self.install_platbase - - if DEBUG: - from pprint import pprint - print("config vars:") - pprint(self.config_vars) - - # Expand "~" and configuration variables in the installation - # directories. - self.expand_dirs() - - self.dump_dirs("post-expand_dirs()") - - # Create directories in the home dir: - if self.user: - self.create_home_path() - - # Pick the actual directory to install all modules to: either - # install_purelib or install_platlib, depending on whether this - # module distribution is pure or not. Of course, if the user - # already specified install_lib, use their selection. - if self.install_lib is None: - if self.distribution.ext_modules: # has extensions: non-pure - self.install_lib = self.install_platlib - else: - self.install_lib = self.install_purelib - - - # Convert directories from Unix /-separated syntax to the local - # convention. - self.convert_paths('lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers') - if HAS_USER_SITE: - self.convert_paths('userbase', 'usersite') - - # Deprecated - # Well, we're not actually fully completely finalized yet: we still - # have to deal with 'extra_path', which is the hack for allowing - # non-packagized module distributions (hello, Numerical Python!) to - # get their own directories. - self.handle_extra_path() - self.install_libbase = self.install_lib # needed for .pth file - self.install_lib = os.path.join(self.install_lib, self.extra_dirs) - - # If a new root directory was supplied, make all the installation - # dirs relative to it. - if self.root is not None: - self.change_roots('libbase', 'lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers') - - self.dump_dirs("after prepending root") - - # Find out the build directories, ie. where to install from. - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib')) - - # Punt on doc directories for now -- after all, we're punting on - # documentation completely! - - def dump_dirs(self, msg): - """Dumps the list of user options.""" - if not DEBUG: - return - from distutils.fancy_getopt import longopt_xlate - log.debug(msg + ":") - for opt in self.user_options: - opt_name = opt[0] - if opt_name[-1] == "=": - opt_name = opt_name[0:-1] - if opt_name in self.negative_opt: - opt_name = self.negative_opt[opt_name] - opt_name = opt_name.translate(longopt_xlate) - val = not getattr(self, opt_name) - else: - opt_name = opt_name.translate(longopt_xlate) - val = getattr(self, opt_name) - log.debug(" %s: %s", opt_name, val) - - def finalize_unix(self): - """Finalizes options for posix platforms.""" - if self.install_base is not None or self.install_platbase is not None: - if ((self.install_lib is None and - self.install_purelib is None and - self.install_platlib is None) or - self.install_headers is None or - self.install_scripts is None or - self.install_data is None): - raise DistutilsOptionError( - "install-base or install-platbase supplied, but " - "installation scheme is incomplete") - return - - if self.user: - if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - self.select_scheme("unix_user") - elif self.home is not None: - self.install_base = self.install_platbase = self.home - self.select_scheme("unix_home") - else: - if self.prefix is None: - if self.exec_prefix is not None: - raise DistutilsOptionError( - "must not supply exec-prefix without prefix") - - self.prefix = os.path.normpath(sys.prefix) - self.exec_prefix = os.path.normpath(sys.exec_prefix) - - else: - if self.exec_prefix is None: - self.exec_prefix = self.prefix - - self.install_base = self.prefix - self.install_platbase = self.exec_prefix - self.select_scheme("unix_prefix") - - def finalize_other(self): - """Finalizes options for non-posix platforms""" - if self.user: - if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - self.select_scheme(os.name + "_user") - elif self.home is not None: - self.install_base = self.install_platbase = self.home - self.select_scheme("unix_home") - else: - if self.prefix is None: - self.prefix = os.path.normpath(sys.prefix) - - self.install_base = self.install_platbase = self.prefix - try: - self.select_scheme(os.name) - except KeyError: - raise DistutilsPlatformError( - "I don't know how to install stuff on '%s'" % os.name) - - def select_scheme(self, name): - """Sets the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - scheme = INSTALL_SCHEMES[name] - for key in SCHEME_KEYS: - attrname = 'install_' + key - if getattr(self, attrname) is None: - setattr(self, attrname, scheme[key]) - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or os.name == 'nt': - val = os.path.expanduser(val) - val = subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Calls `os.path.expanduser` on install_base, install_platbase and - root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data',]) - - def convert_paths(self, *names): - """Call `convert_path` over `names`.""" - for name in names: - attr = "install_" + name - setattr(self, attr, convert_path(getattr(self, attr))) - - def handle_extra_path(self): - """Set `path_file` and `extra_dirs` using `extra_path`.""" - if self.extra_path is None: - self.extra_path = self.distribution.extra_path - - if self.extra_path is not None: - log.warn( - "Distribution option extra_path is deprecated. " - "See issue27919 for details." - ) - if isinstance(self.extra_path, str): - self.extra_path = self.extra_path.split(',') - - if len(self.extra_path) == 1: - path_file = extra_dirs = self.extra_path[0] - elif len(self.extra_path) == 2: - path_file, extra_dirs = self.extra_path - else: - raise DistutilsOptionError( - "'extra_path' option must be a list, tuple, or " - "comma-separated string with 1 or 2 elements") - - # convert to local form in case Unix notation used (as it - # should be in setup scripts) - extra_dirs = convert_path(extra_dirs) - else: - path_file = None - extra_dirs = '' - - # XXX should we warn if path_file and not extra_dirs? (in which - # case the path file would be harmless but pointless) - self.path_file = path_file - self.extra_dirs = extra_dirs - - def change_roots(self, *names): - """Change the install directories pointed by name using root.""" - for name in names: - attr = "install_" + name - setattr(self, attr, change_root(self.root, getattr(self, attr))) - - def create_home_path(self): - """Create directories under ~.""" - if not self.user: - return - home = convert_path(os.path.expanduser("~")) - for name, path in self.config_vars.items(): - if path.startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0o700)" % path) - os.makedirs(path, 0o700) - - # -- Command execution methods ------------------------------------- - - def run(self): - """Runs the command.""" - # Obviously have to build before we can install - if not self.skip_build: - self.run_command('build') - # If we built for any other platform, we can't install. - build_plat = self.distribution.get_command_obj('build').plat_name - # check warn_dir - it is a clue that the 'install' is happening - # internally, and not to sys.path, so we don't check the platform - # matches what we are running. - if self.warn_dir and build_plat != get_platform(): - raise DistutilsPlatformError("Can't install when " - "cross-compiling") - - # Run all sub-commands (at least those that need to be run) - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - if self.path_file: - self.create_path_file() - - # write list of installed files, if requested. - if self.record: - outputs = self.get_outputs() - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in range(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - self.execute(write_file, - (self.record, outputs), - "writing list of installed files to '%s'" % - self.record) - - sys_path = map(os.path.normpath, sys.path) - sys_path = map(os.path.normcase, sys_path) - install_lib = os.path.normcase(os.path.normpath(self.install_lib)) - if (self.warn_dir and - not (self.path_file and self.install_path_file) and - install_lib not in sys_path): - log.debug(("modules installed to '%s', which is not in " - "Python's module search path (sys.path) -- " - "you'll have to change the search path yourself"), - self.install_lib) - - def create_path_file(self): - """Creates the .pth file""" - filename = os.path.join(self.install_libbase, - self.path_file + ".pth") - if self.install_path_file: - self.execute(write_file, - (filename, [self.extra_dirs]), - "creating %s" % filename) - else: - self.warn("path file '%s' not created" % filename) - - - # -- Reporting methods --------------------------------------------- - - def get_outputs(self): - """Assembles the outputs of all the sub-commands.""" - outputs = [] - for cmd_name in self.get_sub_commands(): - cmd = self.get_finalized_command(cmd_name) - # Add the contents of cmd.get_outputs(), ensuring - # that outputs doesn't contain duplicate entries - for filename in cmd.get_outputs(): - if filename not in outputs: - outputs.append(filename) - - if self.path_file and self.install_path_file: - outputs.append(os.path.join(self.install_libbase, - self.path_file + ".pth")) - - return outputs - - def get_inputs(self): - """Returns the inputs of all the sub-commands""" - # XXX gee, this looks familiar ;-( - inputs = [] - for cmd_name in self.get_sub_commands(): - cmd = self.get_finalized_command(cmd_name) - inputs.extend(cmd.get_inputs()) - - return inputs - - # -- Predicates for sub-command list ------------------------------- - - def has_lib(self): - """Returns true if the current distribution has any Python - modules to install.""" - return (self.distribution.has_pure_modules() or - self.distribution.has_ext_modules()) - - def has_headers(self): - """Returns true if the current distribution has any headers to - install.""" - return self.distribution.has_headers() - - def has_scripts(self): - """Returns true if the current distribution has any scripts to. - install.""" - return self.distribution.has_scripts() - - def has_data(self): - """Returns true if the current distribution has any data to. - install.""" - return self.distribution.has_data_files() - - # 'sub_commands': a list of commands this command might have to run to - # get its work done. See cmd.py for more info. - sub_commands = [('install_lib', has_lib), - ('install_headers', has_headers), - ('install_scripts', has_scripts), - ('install_data', has_data), - ('install_egg_info', lambda self:True), - ] diff --git a/Lib/distutils/command/install_data.py b/Lib/distutils/command/install_data.py deleted file mode 100644 index 947cd76a99e5fd..00000000000000 --- a/Lib/distutils/command/install_data.py +++ /dev/null @@ -1,79 +0,0 @@ -"""distutils.command.install_data - -Implements the Distutils 'install_data' command, for installing -platform-independent data files.""" - -# contributed by Bastian Kleineidam - -import os -from distutils.core import Command -from distutils.util import change_root, convert_path - -class install_data(Command): - - description = "install data files" - - user_options = [ - ('install-dir=', 'd', - "base directory for installing data files " - "(default: installation base dir)"), - ('root=', None, - "install everything relative to this alternate root directory"), - ('force', 'f', "force installation (overwrite existing files)"), - ] - - boolean_options = ['force'] - - def initialize_options(self): - self.install_dir = None - self.outfiles = [] - self.root = None - self.force = 0 - self.data_files = self.distribution.data_files - self.warn_dir = 1 - - def finalize_options(self): - self.set_undefined_options('install', - ('install_data', 'install_dir'), - ('root', 'root'), - ('force', 'force'), - ) - - def run(self): - self.mkpath(self.install_dir) - for f in self.data_files: - if isinstance(f, str): - # it's a simple file, so copy it - f = convert_path(f) - if self.warn_dir: - self.warn("setup script did not provide a directory for " - "'%s' -- installing right in '%s'" % - (f, self.install_dir)) - (out, _) = self.copy_file(f, self.install_dir) - self.outfiles.append(out) - else: - # it's a tuple with path to install to and a list of files - dir = convert_path(f[0]) - if not os.path.isabs(dir): - dir = os.path.join(self.install_dir, dir) - elif self.root: - dir = change_root(self.root, dir) - self.mkpath(dir) - - if f[1] == []: - # If there are no files listed, the user must be - # trying to create an empty directory, so add the - # directory to the list of output files. - self.outfiles.append(dir) - else: - # Copy files, adding them to the list of output files. - for data in f[1]: - data = convert_path(data) - (out, _) = self.copy_file(data, dir) - self.outfiles.append(out) - - def get_inputs(self): - return self.data_files or [] - - def get_outputs(self): - return self.outfiles diff --git a/Lib/distutils/command/install_egg_info.py b/Lib/distutils/command/install_egg_info.py deleted file mode 100644 index 0ddc7367cc608d..00000000000000 --- a/Lib/distutils/command/install_egg_info.py +++ /dev/null @@ -1,77 +0,0 @@ -"""distutils.command.install_egg_info - -Implements the Distutils 'install_egg_info' command, for installing -a package's PKG-INFO metadata.""" - - -from distutils.cmd import Command -from distutils import log, dir_util -import os, sys, re - -class install_egg_info(Command): - """Install an .egg-info file for the package""" - - description = "Install package's PKG-INFO metadata as an .egg-info file" - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) - basename = "%s-%s-py%d.%d.egg-info" % ( - to_filename(safe_name(self.distribution.get_name())), - to_filename(safe_version(self.distribution.get_version())), - *sys.version_info[:2] - ) - self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] - - def run(self): - target = self.target - if os.path.isdir(target) and not os.path.islink(target): - dir_util.remove_tree(target, dry_run=self.dry_run) - elif os.path.exists(target): - self.execute(os.unlink,(self.target,),"Removing "+target) - elif not os.path.isdir(self.install_dir): - self.execute(os.makedirs, (self.install_dir,), - "Creating "+self.install_dir) - log.info("Writing %s", target) - if not self.dry_run: - with open(target, 'w', encoding='UTF-8') as f: - self.distribution.metadata.write_pkg_file(f) - - def get_outputs(self): - return self.outputs - - -# The following routines are taken from setuptools' pkg_resources module and -# can be replaced by importing them from pkg_resources once it is included -# in the stdlib. - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') diff --git a/Lib/distutils/command/install_headers.py b/Lib/distutils/command/install_headers.py deleted file mode 100644 index 9bb0b18dc0d809..00000000000000 --- a/Lib/distutils/command/install_headers.py +++ /dev/null @@ -1,47 +0,0 @@ -"""distutils.command.install_headers - -Implements the Distutils 'install_headers' command, to install C/C++ header -files to the Python include directory.""" - -from distutils.core import Command - - -# XXX force is never used -class install_headers(Command): - - description = "install C/C++ header files" - - user_options = [('install-dir=', 'd', - "directory to install header files to"), - ('force', 'f', - "force installation (overwrite existing files)"), - ] - - boolean_options = ['force'] - - def initialize_options(self): - self.install_dir = None - self.force = 0 - self.outfiles = [] - - def finalize_options(self): - self.set_undefined_options('install', - ('install_headers', 'install_dir'), - ('force', 'force')) - - - def run(self): - headers = self.distribution.headers - if not headers: - return - - self.mkpath(self.install_dir) - for header in headers: - (out, _) = self.copy_file(header, self.install_dir) - self.outfiles.append(out) - - def get_inputs(self): - return self.distribution.headers or [] - - def get_outputs(self): - return self.outfiles diff --git a/Lib/distutils/command/install_lib.py b/Lib/distutils/command/install_lib.py deleted file mode 100644 index 6154cf09431f72..00000000000000 --- a/Lib/distutils/command/install_lib.py +++ /dev/null @@ -1,217 +0,0 @@ -"""distutils.command.install_lib - -Implements the Distutils 'install_lib' command -(install all Python modules).""" - -import os -import importlib.util -import sys - -from distutils.core import Command -from distutils.errors import DistutilsOptionError - - -# Extension for Python source files. -PYTHON_SOURCE_EXTENSION = ".py" - -class install_lib(Command): - - description = "install all Python modules (extensions and pure Python)" - - # The byte-compilation options are a tad confusing. Here are the - # possible scenarios: - # 1) no compilation at all (--no-compile --no-optimize) - # 2) compile .pyc only (--compile --no-optimize; default) - # 3) compile .pyc and "opt-1" .pyc (--compile --optimize) - # 4) compile "opt-1" .pyc only (--no-compile --optimize) - # 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more) - # 6) compile "opt-2" .pyc only (--no-compile --optimize-more) - # - # The UI for this is two options, 'compile' and 'optimize'. - # 'compile' is strictly boolean, and only decides whether to - # generate .pyc files. 'optimize' is three-way (0, 1, or 2), and - # decides both whether to generate .pyc files and what level of - # optimization to use. - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ('build-dir=','b', "build directory (where to install from)"), - ('force', 'f', "force installation (overwrite existing files)"), - ('compile', 'c', "compile .py to .pyc [default]"), - ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('skip-build', None, "skip the build steps"), - ] - - boolean_options = ['force', 'compile', 'skip-build'] - negative_opt = {'no-compile' : 'compile'} - - def initialize_options(self): - # let the 'install' command dictate our installation directory - self.install_dir = None - self.build_dir = None - self.force = 0 - self.compile = None - self.optimize = None - self.skip_build = None - - def finalize_options(self): - # Get all the information we need to install pure Python modules - # from the umbrella 'install' command -- build (source) directory, - # install (target) directory, and whether to compile .py files. - self.set_undefined_options('install', - ('build_lib', 'build_dir'), - ('install_lib', 'install_dir'), - ('force', 'force'), - ('compile', 'compile'), - ('optimize', 'optimize'), - ('skip_build', 'skip_build'), - ) - - if self.compile is None: - self.compile = True - if self.optimize is None: - self.optimize = False - - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - if self.optimize not in (0, 1, 2): - raise AssertionError - except (ValueError, AssertionError): - raise DistutilsOptionError("optimize must be 0, 1, or 2") - - def run(self): - # Make sure we have built everything we need first - self.build() - - # Install everything: simply dump the entire contents of the build - # directory to the installation directory (that's the beauty of - # having a build directory!) - outfiles = self.install() - - # (Optionally) compile .py to .pyc - if outfiles is not None and self.distribution.has_pure_modules(): - self.byte_compile(outfiles) - - # -- Top-level worker functions ------------------------------------ - # (called from 'run()') - - def build(self): - if not self.skip_build: - if self.distribution.has_pure_modules(): - self.run_command('build_py') - if self.distribution.has_ext_modules(): - self.run_command('build_ext') - - def install(self): - if os.path.isdir(self.build_dir): - outfiles = self.copy_tree(self.build_dir, self.install_dir) - else: - self.warn("'%s' does not exist -- no Python modules to install" % - self.build_dir) - return - return outfiles - - def byte_compile(self, files): - if sys.dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - - # Get the "--root" directory supplied to the "install" command, - # and use it as a prefix to strip off the purported filename - # encoded in bytecode files. This is far from complete, but it - # should at least generate usable bytecode in RPM distributions. - install_root = self.get_finalized_command('install').root - - if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=install_root, - dry_run=self.dry_run) - if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=install_root, - verbose=self.verbose, dry_run=self.dry_run) - - - # -- Utility methods ----------------------------------------------- - - def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir): - if not has_any: - return [] - - build_cmd = self.get_finalized_command(build_cmd) - build_files = build_cmd.get_outputs() - build_dir = getattr(build_cmd, cmd_option) - - prefix_len = len(build_dir) + len(os.sep) - outputs = [] - for file in build_files: - outputs.append(os.path.join(output_dir, file[prefix_len:])) - - return outputs - - def _bytecode_filenames(self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - # Since build_py handles package data installation, the - # list of outputs can contain more than just .py files. - # Make sure we only report bytecode for the .py files. - ext = os.path.splitext(os.path.normcase(py_file))[1] - if ext != PYTHON_SOURCE_EXTENSION: - continue - if self.compile: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization='')) - if self.optimize > 0: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization=self.optimize)) - - return bytecode_files - - - # -- External interface -------------------------------------------- - # (called by outsiders) - - def get_outputs(self): - """Return the list of files that would be installed if this command - were actually run. Not affected by the "dry-run" flag or whether - modules have actually been built yet. - """ - pure_outputs = \ - self._mutate_outputs(self.distribution.has_pure_modules(), - 'build_py', 'build_lib', - self.install_dir) - if self.compile: - bytecode_outputs = self._bytecode_filenames(pure_outputs) - else: - bytecode_outputs = [] - - ext_outputs = \ - self._mutate_outputs(self.distribution.has_ext_modules(), - 'build_ext', 'build_lib', - self.install_dir) - - return pure_outputs + bytecode_outputs + ext_outputs - - def get_inputs(self): - """Get the list of files that are input to this command, ie. the - files that get installed as they are named in the build tree. - The files in this list correspond one-to-one to the output - filenames returned by 'get_outputs()'. - """ - inputs = [] - - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - inputs.extend(build_py.get_outputs()) - - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - inputs.extend(build_ext.get_outputs()) - - return inputs diff --git a/Lib/distutils/command/install_scripts.py b/Lib/distutils/command/install_scripts.py deleted file mode 100644 index 31a1130ee54937..00000000000000 --- a/Lib/distutils/command/install_scripts.py +++ /dev/null @@ -1,60 +0,0 @@ -"""distutils.command.install_scripts - -Implements the Distutils 'install_scripts' command, for installing -Python scripts.""" - -# contributed by Bastian Kleineidam - -import os -from distutils.core import Command -from distutils import log -from stat import ST_MODE - - -class install_scripts(Command): - - description = "install scripts (Python or otherwise)" - - user_options = [ - ('install-dir=', 'd', "directory to install scripts to"), - ('build-dir=','b', "build directory (where to install from)"), - ('force', 'f', "force installation (overwrite existing files)"), - ('skip-build', None, "skip the build steps"), - ] - - boolean_options = ['force', 'skip-build'] - - def initialize_options(self): - self.install_dir = None - self.force = 0 - self.build_dir = None - self.skip_build = None - - def finalize_options(self): - self.set_undefined_options('build', ('build_scripts', 'build_dir')) - self.set_undefined_options('install', - ('install_scripts', 'install_dir'), - ('force', 'force'), - ('skip_build', 'skip_build'), - ) - - def run(self): - if not self.skip_build: - self.run_command('build_scripts') - self.outfiles = self.copy_tree(self.build_dir, self.install_dir) - if os.name == 'posix': - # Set the executable bits (owner, group, and world) on - # all the scripts we just installed. - for file in self.get_outputs(): - if self.dry_run: - log.info("changing mode of %s", file) - else: - mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777 - log.info("changing mode of %s to %o", file, mode) - os.chmod(file, mode) - - def get_inputs(self): - return self.distribution.scripts or [] - - def get_outputs(self): - return self.outfiles or [] diff --git a/Lib/distutils/command/register.py b/Lib/distutils/command/register.py deleted file mode 100644 index 170f5497141c9d..00000000000000 --- a/Lib/distutils/command/register.py +++ /dev/null @@ -1,304 +0,0 @@ -"""distutils.command.register - -Implements the Distutils 'register' command (register with the repository). -""" - -# created 2002/10/21, Richard Jones - -import getpass -import io -import urllib.parse, urllib.request -from warnings import warn - -from distutils.core import PyPIRCCommand -from distutils.errors import * -from distutils import log - -class register(PyPIRCCommand): - - description = ("register the distribution with the Python package index") - user_options = PyPIRCCommand.user_options + [ - ('list-classifiers', None, - 'list the valid Trove classifiers'), - ('strict', None , - 'Will stop the registering if the meta-data are not fully compliant') - ] - boolean_options = PyPIRCCommand.boolean_options + [ - 'verify', 'list-classifiers', 'strict'] - - sub_commands = [('check', lambda self: True)] - - def initialize_options(self): - PyPIRCCommand.initialize_options(self) - self.list_classifiers = 0 - self.strict = 0 - - def finalize_options(self): - PyPIRCCommand.finalize_options(self) - # setting options for the `check` subcommand - check_options = {'strict': ('register', self.strict), - 'restructuredtext': ('register', 1)} - self.distribution.command_options['check'] = check_options - - def run(self): - self.finalize_options() - self._set_config() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - if self.dry_run: - self.verify_metadata() - elif self.list_classifiers: - self.classifiers() - else: - self.send_metadata() - - def check_metadata(self): - """Deprecated API.""" - warn("distutils.command.register.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) - check = self.distribution.get_command_obj('check') - check.ensure_finalized() - check.strict = self.strict - check.restructuredtext = 1 - check.run() - - def _set_config(self): - ''' Reads the configuration file and set attributes. - ''' - config = self._read_pypirc() - if config != {}: - self.username = config['username'] - self.password = config['password'] - self.repository = config['repository'] - self.realm = config['realm'] - self.has_config = True - else: - if self.repository not in ('pypi', self.DEFAULT_REPOSITORY): - raise ValueError('%s not found in .pypirc' % self.repository) - if self.repository == 'pypi': - self.repository = self.DEFAULT_REPOSITORY - self.has_config = False - - def classifiers(self): - ''' Fetch the list of classifiers from the server. - ''' - url = self.repository+'?:action=list_classifiers' - response = urllib.request.urlopen(url) - log.info(self._read_pypi_response(response)) - - def verify_metadata(self): - ''' Send the metadata to the package index server to be checked. - ''' - # send the info to the server and report the result - (code, result) = self.post_to_server(self.build_post_data('verify')) - log.info('Server response (%s): %s', code, result) - - def send_metadata(self): - ''' Send the metadata to the package index server. - - Well, do the following: - 1. figure who the user is, and then - 2. send the data as a Basic auth'ed POST. - - First we try to read the username/password from $HOME/.pypirc, - which is a ConfigParser-formatted file with a section - [distutils] containing username and password entries (both - in clear text). Eg: - - [distutils] - index-servers = - pypi - - [pypi] - username: fred - password: sekrit - - Otherwise, to figure who the user is, we offer the user three - choices: - - 1. use existing login, - 2. register as a new user, or - 3. set the password to a random string and email the user. - - ''' - # see if we can short-cut and get the username/password from the - # config - if self.has_config: - choice = '1' - username = self.username - password = self.password - else: - choice = 'x' - username = password = '' - - # get the user's login info - choices = '1 2 3 4'.split() - while choice not in choices: - self.announce('''\ -We need to know who you are, so please choose either: - 1. use your existing login, - 2. register as a new user, - 3. have the server generate a new password for you (and email it to you), or - 4. quit -Your selection [default 1]: ''', log.INFO) - choice = input() - if not choice: - choice = '1' - elif choice not in choices: - print('Please choose one of the four options!') - - if choice == '1': - # get the username and password - while not username: - username = input('Username: ') - while not password: - password = getpass.getpass('Password: ') - - # set up the authentication - auth = urllib.request.HTTPPasswordMgr() - host = urllib.parse.urlparse(self.repository)[1] - auth.add_password(self.realm, host, username, password) - # send the info to the server and report the result - code, result = self.post_to_server(self.build_post_data('submit'), - auth) - self.announce('Server response (%s): %s' % (code, result), - log.INFO) - - # possibly save the login - if code == 200: - if self.has_config: - # sharing the password in the distribution instance - # so the upload command can reuse it - self.distribution.password = password - else: - self.announce(('I can store your PyPI login so future ' - 'submissions will be faster.'), log.INFO) - self.announce('(the login will be stored in %s)' % \ - self._get_rc_file(), log.INFO) - choice = 'X' - while choice.lower() not in 'yn': - choice = input('Save your login (y/N)?') - if not choice: - choice = 'n' - if choice.lower() == 'y': - self._store_pypirc(username, password) - - elif choice == '2': - data = {':action': 'user'} - data['name'] = data['password'] = data['email'] = '' - data['confirm'] = None - while not data['name']: - data['name'] = input('Username: ') - while data['password'] != data['confirm']: - while not data['password']: - data['password'] = getpass.getpass('Password: ') - while not data['confirm']: - data['confirm'] = getpass.getpass(' Confirm: ') - if data['password'] != data['confirm']: - data['password'] = '' - data['confirm'] = None - print("Password and confirm don't match!") - while not data['email']: - data['email'] = input(' EMail: ') - code, result = self.post_to_server(data) - if code != 200: - log.info('Server response (%s): %s', code, result) - else: - log.info('You will receive an email shortly.') - log.info(('Follow the instructions in it to ' - 'complete registration.')) - elif choice == '3': - data = {':action': 'password_reset'} - data['email'] = '' - while not data['email']: - data['email'] = input('Your email address: ') - code, result = self.post_to_server(data) - log.info('Server response (%s): %s', code, result) - - def build_post_data(self, action): - # figure the data to send - the metadata plus some additional - # information used by the package server - meta = self.distribution.metadata - data = { - ':action': action, - 'metadata_version' : '1.0', - 'name': meta.get_name(), - 'version': meta.get_version(), - 'summary': meta.get_description(), - 'home_page': meta.get_url(), - 'author': meta.get_contact(), - 'author_email': meta.get_contact_email(), - 'license': meta.get_licence(), - 'description': meta.get_long_description(), - 'keywords': meta.get_keywords(), - 'platform': meta.get_platforms(), - 'classifiers': meta.get_classifiers(), - 'download_url': meta.get_download_url(), - # PEP 314 - 'provides': meta.get_provides(), - 'requires': meta.get_requires(), - 'obsoletes': meta.get_obsoletes(), - } - if data['provides'] or data['requires'] or data['obsoletes']: - data['metadata_version'] = '1.1' - return data - - def post_to_server(self, data, auth=None): - ''' Post a query to the server, and return a string response. - ''' - if 'name' in data: - self.announce('Registering %s to %s' % (data['name'], - self.repository), - log.INFO) - # Build up the MIME payload for the urllib2 POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = '\n--' + boundary - end_boundary = sep_boundary + '--' - body = io.StringIO() - for key, value in data.items(): - # handle multiple entries for the same name - if not isinstance(value, (list, tuple)): - value = [value] - for value in value: - value = str(value) - body.write(sep_boundary) - body.write('\nContent-Disposition: form-data; name="%s"'%key) - body.write("\n\n") - body.write(value) - if value and value[-1] == '\r': - body.write('\n') # write an extra newline (lurve Macs) - body.write(end_boundary) - body.write("\n") - body = body.getvalue().encode("utf-8") - - # build the Request - headers = { - 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary, - 'Content-length': str(len(body)) - } - req = urllib.request.Request(self.repository, body, headers) - - # handle HTTP and include the Basic Auth handler - opener = urllib.request.build_opener( - urllib.request.HTTPBasicAuthHandler(password_mgr=auth) - ) - data = '' - try: - result = opener.open(req) - except urllib.error.HTTPError as e: - if self.show_response: - data = e.fp.read() - result = e.code, e.msg - except urllib.error.URLError as e: - result = 500, str(e) - else: - if self.show_response: - data = self._read_pypi_response(result) - result = 200, 'OK' - if self.show_response: - msg = '\n'.join(('-' * 75, data, '-' * 75)) - self.announce(msg, log.INFO) - return result diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py deleted file mode 100644 index b4996fcb1d276c..00000000000000 --- a/Lib/distutils/command/sdist.py +++ /dev/null @@ -1,494 +0,0 @@ -"""distutils.command.sdist - -Implements the Distutils 'sdist' command (create a source distribution).""" - -import os -import sys -from glob import glob -from warnings import warn - -from distutils.core import Command -from distutils import dir_util -from distutils import file_util -from distutils import archive_util -from distutils.text_file import TextFile -from distutils.filelist import FileList -from distutils import log -from distutils.util import convert_path -from distutils.errors import DistutilsTemplateError, DistutilsOptionError - - -def show_formats(): - """Print all possible values for the 'formats' option (used by - the "--help-formats" command-line option). - """ - from distutils.fancy_getopt import FancyGetopt - from distutils.archive_util import ARCHIVE_FORMATS - formats = [] - for format in ARCHIVE_FORMATS.keys(): - formats.append(("formats=" + format, None, - ARCHIVE_FORMATS[format][2])) - formats.sort() - FancyGetopt(formats).print_help( - "List of available source distribution formats:") - - -class sdist(Command): - - description = "create a source distribution (tarball, zip file, etc.)" - - def checking_metadata(self): - """Callable used for the check sub-command. - - Placed here so user_options can view it""" - return self.metadata_check - - user_options = [ - ('template=', 't', - "name of manifest template file [default: MANIFEST.in]"), - ('manifest=', 'm', - "name of manifest file [default: MANIFEST]"), - ('use-defaults', None, - "include the default file set in the manifest " - "[default; disable with --no-defaults]"), - ('no-defaults', None, - "don't include the default file set"), - ('prune', None, - "specifically exclude files/directories that should not be " - "distributed (build tree, RCS/CVS dirs, etc.) " - "[default; disable with --no-prune]"), - ('no-prune', None, - "don't automatically exclude anything"), - ('manifest-only', 'o', - "just regenerate the manifest and then stop " - "(implies --force-manifest)"), - ('force-manifest', 'f', - "forcibly regenerate the manifest and carry on as usual. " - "Deprecated: now the manifest is always regenerated."), - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ('metadata-check', None, - "Ensure that all required elements of meta-data " - "are supplied. Warn if any missing. [default]"), - ('owner=', 'u', - "Owner name used when creating a tar file [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file [default: current group]"), - ] - - boolean_options = ['use-defaults', 'prune', - 'manifest-only', 'force-manifest', - 'keep-temp', 'metadata-check'] - - help_options = [ - ('help-formats', None, - "list available distribution formats", show_formats), - ] - - negative_opt = {'no-defaults': 'use-defaults', - 'no-prune': 'prune' } - - sub_commands = [('check', checking_metadata)] - - READMES = ('README', 'README.txt', 'README.rst') - - def initialize_options(self): - # 'template' and 'manifest' are, respectively, the names of - # the manifest template and manifest file. - self.template = None - self.manifest = None - - # 'use_defaults': if true, we will include the default file set - # in the manifest - self.use_defaults = 1 - self.prune = 1 - - self.manifest_only = 0 - self.force_manifest = 0 - - self.formats = ['gztar'] - self.keep_temp = 0 - self.dist_dir = None - - self.archive_files = None - self.metadata_check = 1 - self.owner = None - self.group = None - - def finalize_options(self): - if self.manifest is None: - self.manifest = "MANIFEST" - if self.template is None: - self.template = "MANIFEST.in" - - self.ensure_string_list('formats') - - bad_format = archive_util.check_archive_formats(self.formats) - if bad_format: - raise DistutilsOptionError( - "unknown archive format '%s'" % bad_format) - - if self.dist_dir is None: - self.dist_dir = "dist" - - def run(self): - # 'filelist' contains the list of files that will make up the - # manifest - self.filelist = FileList() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - # Do whatever it takes to get the list of files to process - # (process the manifest template, read an existing manifest, - # whatever). File list is accumulated in 'self.filelist'. - self.get_file_list() - - # If user just wanted us to regenerate the manifest, stop now. - if self.manifest_only: - return - - # Otherwise, go ahead and create the source distribution tarball, - # or zipfile, or whatever. - self.make_distribution() - - def check_metadata(self): - """Deprecated API.""" - warn("distutils.command.sdist.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) - check = self.distribution.get_command_obj('check') - check.ensure_finalized() - check.run() - - def get_file_list(self): - """Figure out the list of files to include in the source - distribution, and put it in 'self.filelist'. This might involve - reading the manifest template (and writing the manifest), or just - reading the manifest, or just using the default file set -- it all - depends on the user's options. - """ - # new behavior when using a template: - # the file list is recalculated every time because - # even if MANIFEST.in or setup.py are not changed - # the user might have added some files in the tree that - # need to be included. - # - # This makes --force the default and only behavior with templates. - template_exists = os.path.isfile(self.template) - if not template_exists and self._manifest_is_not_generated(): - self.read_manifest() - self.filelist.sort() - self.filelist.remove_duplicates() - return - - if not template_exists: - self.warn(("manifest template '%s' does not exist " + - "(using default file list)") % - self.template) - self.filelist.findall() - - if self.use_defaults: - self.add_defaults() - - if template_exists: - self.read_template() - - if self.prune: - self.prune_file_list() - - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def add_defaults(self): - """Add all the default files to self.filelist: - - README or README.txt - - setup.py - - test/test*.py - - all pure Python modules mentioned in setup script - - all files pointed by package_data (build_py) - - all files defined in data_files. - - all files defined as scripts. - - all C sources listed as part of extensions or C libraries - in the setup script (doesn't catch C headers!) - Warns if (README or README.txt) or setup.py are missing; everything - else is optional. - """ - self._add_defaults_standards() - self._add_defaults_optional() - self._add_defaults_python() - self._add_defaults_data_files() - self._add_defaults_ext() - self._add_defaults_c_libs() - self._add_defaults_scripts() - - @staticmethod - def _cs_path_exists(fspath): - """ - Case-sensitive path existence check - - >>> sdist._cs_path_exists(__file__) - True - >>> sdist._cs_path_exists(__file__.upper()) - False - """ - if not os.path.exists(fspath): - return False - # make absolute so we always have a directory - abspath = os.path.abspath(fspath) - directory, filename = os.path.split(abspath) - return filename in os.listdir(directory) - - def _add_defaults_standards(self): - standards = [self.READMES, self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = False - for fn in alts: - if self._cs_path_exists(fn): - got_it = True - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if self._cs_path_exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - def _add_defaults_optional(self): - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = filter(os.path.isfile, glob(pattern)) - self.filelist.extend(files) - - def _add_defaults_python(self): - # build_py is used to get: - # - python modules - # - files defined in package_data - build_py = self.get_finalized_command('build_py') - - # getting python files - if self.distribution.has_pure_modules(): - self.filelist.extend(build_py.get_source_files()) - - # getting package_data files - # (computed in build_py.data_files by build_py.finalize_options) - for pkg, src_dir, build_dir, filenames in build_py.data_files: - for filename in filenames: - self.filelist.append(os.path.join(src_dir, filename)) - - def _add_defaults_data_files(self): - # getting distribution.data_files - if self.distribution.has_data_files(): - for item in self.distribution.data_files: - if isinstance(item, str): - # plain file - item = convert_path(item) - if os.path.isfile(item): - self.filelist.append(item) - else: - # a (dirname, filenames) tuple - dirname, filenames = item - for f in filenames: - f = convert_path(f) - if os.path.isfile(f): - self.filelist.append(f) - - def _add_defaults_ext(self): - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - def _add_defaults_c_libs(self): - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - def _add_defaults_scripts(self): - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) - - def read_template(self): - """Read and parse manifest template file named by self.template. - - (usually "MANIFEST.in") The parsing and processing is done by - 'self.filelist', which updates itself accordingly. - """ - log.info("reading manifest template '%s'", self.template) - template = TextFile(self.template, strip_comments=1, skip_blanks=1, - join_lines=1, lstrip_ws=1, rstrip_ws=1, - collapse_join=1) - - try: - while True: - line = template.readline() - if line is None: # end of file - break - - try: - self.filelist.process_template_line(line) - # the call above can raise a DistutilsTemplateError for - # malformed lines, or a ValueError from the lower-level - # convert_path function - except (DistutilsTemplateError, ValueError) as msg: - self.warn("%s, line %d: %s" % (template.filename, - template.current_line, - msg)) - finally: - template.close() - - def prune_file_list(self): - """Prune off branches that might slip into the file list as created - by 'read_template()', but really don't belong there: - * the build tree (typically "build") - * the release tree itself (only an issue if we ran "sdist" - previously with --keep-temp, or it aborted) - * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories - """ - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - - if sys.platform == 'win32': - seps = r'/|\\' - else: - seps = '/' - - vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', - '_darcs'] - vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps) - self.filelist.exclude_pattern(vcs_ptrn, is_regex=1) - - def write_manifest(self): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - if self._manifest_is_not_generated(): - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) - return - - content = self.filelist.files[:] - content.insert(0, '# file GENERATED by distutils, do NOT edit') - self.execute(file_util.write_file, (self.manifest, content), - "writing manifest file '%s'" % self.manifest) - - def _manifest_is_not_generated(self): - # check for special comment used in 3.1.3 and higher - if not os.path.isfile(self.manifest): - return False - - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() - return first_line != '# file GENERATED by distutils, do NOT edit\n' - - def read_manifest(self): - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - log.info("reading manifest file '%s'", self.manifest) - with open(self.manifest) as manifest: - for line in manifest: - # ignore comments and blank lines - line = line.strip() - if line.startswith('#') or not line: - continue - self.filelist.append(line) - - def make_release_tree(self, base_dir, files): - """Create the directory tree that will become the source - distribution archive. All directories implied by the filenames in - 'files' are created under 'base_dir', and then we hard link or copy - (if hard linking is unavailable) those files into place. - Essentially, this duplicates the developer's source tree, but in a - directory named after the distribution, containing only the files - to be distributed. - """ - # Create all the directories under 'base_dir' necessary to - # put 'files' there; the 'mkpath()' is just so we don't die - # if the manifest happens to be empty. - self.mkpath(base_dir) - dir_util.create_tree(base_dir, files, dry_run=self.dry_run) - - # And walk over the list of files, either making a hard link (if - # os.link exists) to each one that doesn't already exist in its - # corresponding location under 'base_dir', or copying each file - # that's out-of-date in 'base_dir'. (Usually, all files will be - # out-of-date, because by default we blow away 'base_dir' when - # we're done making the distribution archives.) - - if hasattr(os, 'link'): # can make hard links on this system - link = 'hard' - msg = "making hard links in %s..." % base_dir - else: # nope, have to copy - link = None - msg = "copying files to %s..." % base_dir - - if not files: - log.warn("no files to distribute -- empty manifest?") - else: - log.info(msg) - for file in files: - if not os.path.isfile(file): - log.warn("'%s' not a regular file -- skipping", file) - else: - dest = os.path.join(base_dir, file) - self.copy_file(file, dest, link=link) - - self.distribution.metadata.write_pkg_info(base_dir) - - def make_distribution(self): - """Create the source distribution(s). First, we create the release - tree with 'make_release_tree()'; then, we create all required - archive files (according to 'self.formats') from the release tree. - Finally, we clean up by blowing away the release tree (unless - 'self.keep_temp' is true). The list of archive files created is - stored so it can be retrieved later by 'get_archive_files()'. - """ - # Don't warn about missing meta-data here -- should be (and is!) - # done elsewhere. - base_dir = self.distribution.get_fullname() - base_name = os.path.join(self.dist_dir, base_dir) - - self.make_release_tree(base_dir, self.filelist.files) - archive_files = [] # remember names of files we create - # tar archive must be created last to avoid overwrite and remove - if 'tar' in self.formats: - self.formats.append(self.formats.pop(self.formats.index('tar'))) - - for fmt in self.formats: - file = self.make_archive(base_name, fmt, base_dir=base_dir, - owner=self.owner, group=self.group) - archive_files.append(file) - self.distribution.dist_files.append(('sdist', '', file)) - - self.archive_files = archive_files - - if not self.keep_temp: - dir_util.remove_tree(base_dir, dry_run=self.dry_run) - - def get_archive_files(self): - """Return the list of archive files created when the command - was run, or None if the command hasn't run yet. - """ - return self.archive_files diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py deleted file mode 100644 index e0ecb655b93faf..00000000000000 --- a/Lib/distutils/command/upload.py +++ /dev/null @@ -1,215 +0,0 @@ -""" -distutils.command.upload - -Implements the Distutils 'upload' subcommand (upload package to a package -index). -""" - -import os -import io -import hashlib -from base64 import standard_b64encode -from urllib.error import HTTPError -from urllib.request import urlopen, Request -from urllib.parse import urlparse -from distutils.errors import DistutilsError, DistutilsOptionError -from distutils.core import PyPIRCCommand -from distutils.spawn import spawn -from distutils import log - - -# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256) -# https://bugs.python.org/issue40698 -_FILE_CONTENT_DIGESTS = { - "md5_digest": getattr(hashlib, "md5", None), - "sha256_digest": getattr(hashlib, "sha256", None), - "blake2_256_digest": getattr(hashlib, "blake2b", None), -} - - -class upload(PyPIRCCommand): - - description = "upload binary package to PyPI" - - user_options = PyPIRCCommand.user_options + [ - ('sign', 's', - 'sign files to upload using gpg'), - ('identity=', 'i', 'GPG identity used to sign files'), - ] - - boolean_options = PyPIRCCommand.boolean_options + ['sign'] - - def initialize_options(self): - PyPIRCCommand.initialize_options(self) - self.username = '' - self.password = '' - self.show_response = 0 - self.sign = False - self.identity = None - - def finalize_options(self): - PyPIRCCommand.finalize_options(self) - if self.identity and not self.sign: - raise DistutilsOptionError( - "Must use --sign for --identity to have meaning" - ) - config = self._read_pypirc() - if config != {}: - self.username = config['username'] - self.password = config['password'] - self.repository = config['repository'] - self.realm = config['realm'] - - # getting the password from the distribution - # if previously set by the register command - if not self.password and self.distribution.password: - self.password = self.distribution.password - - def run(self): - if not self.distribution.dist_files: - msg = ("Must create and upload files in one command " - "(e.g. setup.py sdist upload)") - raise DistutilsOptionError(msg) - for command, pyversion, filename in self.distribution.dist_files: - self.upload_file(command, pyversion, filename) - - def upload_file(self, command, pyversion, filename): - # Makes sure the repository URL is compliant - schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) - if params or query or fragments: - raise AssertionError("Incompatible url %s" % self.repository) - - if schema not in ('http', 'https'): - raise AssertionError("unsupported schema " + schema) - - # Sign if requested - if self.sign: - gpg_args = ["gpg", "--detach-sign", "-a", filename] - if self.identity: - gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) - - # Fill in the data - send all the meta-data in case we need to - # register a new release - f = open(filename,'rb') - try: - content = f.read() - finally: - f.close() - - meta = self.distribution.metadata - data = { - # action - ':action': 'file_upload', - 'protocol_version': '1', - - # identify release - 'name': meta.get_name(), - 'version': meta.get_version(), - - # file content - 'content': (os.path.basename(filename),content), - 'filetype': command, - 'pyversion': pyversion, - - # additional meta-data - 'metadata_version': '1.0', - 'summary': meta.get_description(), - 'home_page': meta.get_url(), - 'author': meta.get_contact(), - 'author_email': meta.get_contact_email(), - 'license': meta.get_licence(), - 'description': meta.get_long_description(), - 'keywords': meta.get_keywords(), - 'platform': meta.get_platforms(), - 'classifiers': meta.get_classifiers(), - 'download_url': meta.get_download_url(), - # PEP 314 - 'provides': meta.get_provides(), - 'requires': meta.get_requires(), - 'obsoletes': meta.get_obsoletes(), - } - - data['comment'] = '' - - # file content digests - for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items(): - if digest_cons is None: - continue - try: - data[digest_name] = digest_cons(content).hexdigest() - except ValueError: - # hash digest not available or blocked by security policy - pass - - if self.sign: - with open(filename + ".asc", "rb") as f: - data['gpg_signature'] = (os.path.basename(filename) + ".asc", - f.read()) - - # set up the authentication - user_pass = (self.username + ":" + self.password).encode('ascii') - # The exact encoding of the authentication string is debated. - # Anyway PyPI only accepts ascii for both username or password. - auth = "Basic " + standard_b64encode(user_pass).decode('ascii') - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b'\r\n--' + boundary.encode('ascii') - end_boundary = sep_boundary + b'--\r\n' - body = io.BytesIO() - for key, value in data.items(): - title = '\r\nContent-Disposition: form-data; name="%s"' % key - # handle multiple entries for the same name - if not isinstance(value, list): - value = [value] - for value in value: - if type(value) is tuple: - title += '; filename="%s"' % value[0] - value = value[1] - else: - value = str(value).encode('utf-8') - body.write(sep_boundary) - body.write(title.encode('utf-8')) - body.write(b"\r\n\r\n") - body.write(value) - body.write(end_boundary) - body = body.getvalue() - - msg = "Submitting %s to %s" % (filename, self.repository) - self.announce(msg, log.INFO) - - # build the Request - headers = { - 'Content-type': 'multipart/form-data; boundary=%s' % boundary, - 'Content-length': str(len(body)), - 'Authorization': auth, - } - - request = Request(self.repository, data=body, - headers=headers) - # send the data - try: - result = urlopen(request) - status = result.getcode() - reason = result.msg - except HTTPError as e: - status = e.code - reason = e.msg - except OSError as e: - self.announce(str(e), log.ERROR) - raise - - if status == 200: - self.announce('Server response (%s): %s' % (status, reason), - log.INFO) - if self.show_response: - text = self._read_pypi_response(result) - msg = '\n'.join(('-' * 75, text, '-' * 75)) - self.announce(msg, log.INFO) - else: - msg = 'Upload failed (%s): %s' % (status, reason) - self.announce(msg, log.ERROR) - raise DistutilsError(msg) diff --git a/Lib/distutils/config.py b/Lib/distutils/config.py deleted file mode 100644 index a201c86a176844..00000000000000 --- a/Lib/distutils/config.py +++ /dev/null @@ -1,133 +0,0 @@ -"""distutils.pypirc - -Provides the PyPIRCCommand class, the base class for the command classes -that uses .pypirc in the distutils.command package. -""" -import os -from configparser import RawConfigParser -import warnings - -from distutils.cmd import Command - -DEFAULT_PYPIRC = """\ -[distutils] -index-servers = - pypi - -[pypi] -username:%s -password:%s -""" - -class PyPIRCCommand(Command): - """Base command that knows how to handle the .pypirc file - """ - DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' - DEFAULT_REALM = 'pypi' - repository = None - realm = None - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % \ - DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server')] - - boolean_options = ['show-response'] - - def _get_rc_file(self): - """Returns rc file path.""" - return os.path.join(os.path.expanduser('~'), '.pypirc') - - def _store_pypirc(self, username, password): - """Creates a default .pypirc file.""" - rc = self._get_rc_file() - with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f: - f.write(DEFAULT_PYPIRC % (username, password)) - - def _read_pypirc(self): - """Reads the .pypirc file.""" - rc = self._get_rc_file() - if os.path.exists(rc): - self.announce('Using PyPI login from %s' % rc) - repository = self.repository or self.DEFAULT_REPOSITORY - - config = RawConfigParser() - config.read(rc) - sections = config.sections() - if 'distutils' in sections: - # let's get the list of servers - index_servers = config.get('distutils', 'index-servers') - _servers = [server.strip() for server in - index_servers.split('\n') - if server.strip() != ''] - if _servers == []: - # nothing set, let's try to get the default pypi - if 'pypi' in sections: - _servers = ['pypi'] - else: - # the file is not properly defined, returning - # an empty dict - return {} - for server in _servers: - current = {'server': server} - current['username'] = config.get(server, 'username') - - # optional params - for key, default in (('repository', - self.DEFAULT_REPOSITORY), - ('realm', self.DEFAULT_REALM), - ('password', None)): - if config.has_option(server, key): - current[key] = config.get(server, key) - else: - current[key] = default - - # work around people having "repository" for the "pypi" - # section of their config set to the HTTP (rather than - # HTTPS) URL - if (server == 'pypi' and - repository in (self.DEFAULT_REPOSITORY, 'pypi')): - current['repository'] = self.DEFAULT_REPOSITORY - return current - - if (current['server'] == repository or - current['repository'] == repository): - return current - elif 'server-login' in sections: - # old format - server = 'server-login' - if config.has_option(server, 'repository'): - repository = config.get(server, 'repository') - else: - repository = self.DEFAULT_REPOSITORY - return {'username': config.get(server, 'username'), - 'password': config.get(server, 'password'), - 'repository': repository, - 'server': server, - 'realm': self.DEFAULT_REALM} - - return {} - - def _read_pypi_response(self, response): - """Read and decode a PyPI HTTP response.""" - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - import cgi - content_type = response.getheader('content-type', 'text/plain') - encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii') - return response.read().decode(encoding) - - def initialize_options(self): - """Initialize options.""" - self.repository = None - self.realm = None - self.show_response = 0 - - def finalize_options(self): - """Finalizes options.""" - if self.repository is None: - self.repository = self.DEFAULT_REPOSITORY - if self.realm is None: - self.realm = self.DEFAULT_REALM diff --git a/Lib/distutils/core.py b/Lib/distutils/core.py deleted file mode 100644 index d603d4a45a73ee..00000000000000 --- a/Lib/distutils/core.py +++ /dev/null @@ -1,234 +0,0 @@ -"""distutils.core - -The only module that needs to be imported to use the Distutils; provides -the 'setup' function (which is to be called from the setup script). Also -indirectly provides the Distribution and Command classes, although they are -really defined in distutils.dist and distutils.cmd. -""" - -import os -import sys - -from distutils.debug import DEBUG -from distutils.errors import * - -# Mainly import these so setup scripts can "from distutils.core import" them. -from distutils.dist import Distribution -from distutils.cmd import Command -from distutils.config import PyPIRCCommand -from distutils.extension import Extension - -# This is a barebones help message generated displayed when the user -# runs the setup script with no arguments at all. More useful help -# is generated with various --help options: global help, list commands, -# and per-command help. -USAGE = """\ -usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] - or: %(script)s --help [cmd1 cmd2 ...] - or: %(script)s --help-commands - or: %(script)s cmd --help -""" - -def gen_usage (script_name): - script = os.path.basename(script_name) - return USAGE % vars() - - -# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'. -_setup_stop_after = None -_setup_distribution = None - -# Legal keyword arguments for the setup() function -setup_keywords = ('distclass', 'script_name', 'script_args', 'options', - 'name', 'version', 'author', 'author_email', - 'maintainer', 'maintainer_email', 'url', 'license', - 'description', 'long_description', 'keywords', - 'platforms', 'classifiers', 'download_url', - 'requires', 'provides', 'obsoletes', - ) - -# Legal keyword arguments for the Extension constructor -extension_keywords = ('name', 'sources', 'include_dirs', - 'define_macros', 'undef_macros', - 'library_dirs', 'libraries', 'runtime_library_dirs', - 'extra_objects', 'extra_compile_args', 'extra_link_args', - 'swig_opts', 'export_symbols', 'depends', 'language') - -def setup (**attrs): - """The gateway to the Distutils: do everything your setup script needs - to do, in a highly flexible and user-driven way. Briefly: create a - Distribution instance; find and parse config files; parse the command - line; run each Distutils command found there, customized by the options - supplied to 'setup()' (as keyword arguments), in config files, and on - the command line. - - The Distribution instance might be an instance of a class supplied via - the 'distclass' keyword argument to 'setup'; if no such class is - supplied, then the Distribution class (in dist.py) is instantiated. - All other arguments to 'setup' (except for 'cmdclass') are used to set - attributes of the Distribution instance. - - The 'cmdclass' argument, if supplied, is a dictionary mapping command - names to command classes. Each command encountered on the command line - will be turned into a command class, which is in turn instantiated; any - class found in 'cmdclass' is used in place of the default, which is - (for command 'foo_bar') class 'foo_bar' in module - 'distutils.command.foo_bar'. The command class must provide a - 'user_options' attribute which is a list of option specifiers for - 'distutils.fancy_getopt'. Any command-line options between the current - and the next command are used to set attributes of the current command - object. - - When the entire command-line has been successfully parsed, calls the - 'run()' method on each command object in turn. This method will be - driven entirely by the Distribution object (which each command object - has a reference to, thanks to its constructor), and the - command-specific options that became attributes of each command - object. - """ - - global _setup_stop_after, _setup_distribution - - # Determine the distribution class -- either caller-supplied or - # our Distribution (see below). - klass = attrs.get('distclass') - if klass: - del attrs['distclass'] - else: - klass = Distribution - - if 'script_name' not in attrs: - attrs['script_name'] = os.path.basename(sys.argv[0]) - if 'script_args' not in attrs: - attrs['script_args'] = sys.argv[1:] - - # Create the Distribution instance, using the remaining arguments - # (ie. everything except distclass) to initialize it - try: - _setup_distribution = dist = klass(attrs) - except DistutilsSetupError as msg: - if 'name' not in attrs: - raise SystemExit("error in setup command: %s" % msg) - else: - raise SystemExit("error in %s setup command: %s" % \ - (attrs['name'], msg)) - - if _setup_stop_after == "init": - return dist - - # Find and parse the config file(s): they will override options from - # the setup script, but be overridden by the command line. - dist.parse_config_files() - - if DEBUG: - print("options (after parsing config files):") - dist.dump_option_dicts() - - if _setup_stop_after == "config": - return dist - - # Parse the command line and override config files; any - # command-line errors are the end user's fault, so turn them into - # SystemExit to suppress tracebacks. - try: - ok = dist.parse_command_line() - except DistutilsArgError as msg: - raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg) - - if DEBUG: - print("options (after parsing command line):") - dist.dump_option_dicts() - - if _setup_stop_after == "commandline": - return dist - - # And finally, run all the commands found on the command line. - if ok: - try: - dist.run_commands() - except KeyboardInterrupt: - raise SystemExit("interrupted") - except OSError as exc: - if DEBUG: - sys.stderr.write("error: %s\n" % (exc,)) - raise - else: - raise SystemExit("error: %s" % (exc,)) - - except (DistutilsError, - CCompilerError) as msg: - if DEBUG: - raise - else: - raise SystemExit("error: " + str(msg)) - - return dist - -# setup () - - -def run_setup (script_name, script_args=None, stop_after="run"): - """Run a setup script in a somewhat controlled environment, and - return the Distribution instance that drives things. This is useful - if you need to find out the distribution meta-data (passed as - keyword args from 'script' to 'setup()', or the contents of the - config files or command-line. - - 'script_name' is a file that will be read and run with 'exec()'; - 'sys.argv[0]' will be replaced with 'script' for the duration of the - call. 'script_args' is a list of strings; if supplied, - 'sys.argv[1:]' will be replaced by 'script_args' for the duration of - the call. - - 'stop_after' tells 'setup()' when to stop processing; possible - values: - init - stop after the Distribution instance has been created and - populated with the keyword arguments to 'setup()' - config - stop after config files have been parsed (and their data - stored in the Distribution instance) - commandline - stop after the command-line ('sys.argv[1:]' or 'script_args') - have been parsed (and the data stored in the Distribution) - run [default] - stop after all commands have been run (the same as if 'setup()' - had been called in the usual way - - Returns the Distribution instance, which provides all information - used to drive the Distutils. - """ - if stop_after not in ('init', 'config', 'commandline', 'run'): - raise ValueError("invalid value for 'stop_after': %r" % (stop_after,)) - - global _setup_stop_after, _setup_distribution - _setup_stop_after = stop_after - - save_argv = sys.argv.copy() - g = {'__file__': script_name} - try: - try: - sys.argv[0] = script_name - if script_args is not None: - sys.argv[1:] = script_args - with open(script_name, 'rb') as f: - exec(f.read(), g) - finally: - sys.argv = save_argv - _setup_stop_after = None - except SystemExit: - # Hmm, should we do something if exiting with a non-zero code - # (ie. error)? - pass - - if _setup_distribution is None: - raise RuntimeError(("'distutils.core.setup()' was never called -- " - "perhaps '%s' is not a Distutils setup script?") % \ - script_name) - - # I wonder if the setup script's namespace -- g and l -- would be of - # any interest to callers? - #print "_setup_distribution:", _setup_distribution - return _setup_distribution - -# run_setup () diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py deleted file mode 100644 index 66c12dd35830b2..00000000000000 --- a/Lib/distutils/cygwinccompiler.py +++ /dev/null @@ -1,403 +0,0 @@ -"""distutils.cygwinccompiler - -Provides the CygwinCCompiler class, a subclass of UnixCCompiler that -handles the Cygwin port of the GNU C compiler to Windows. It also contains -the Mingw32CCompiler class which handles the mingw32 port of GCC (same as -cygwin in no-cygwin mode). -""" - -# problems: -# -# * if you use a msvc compiled python version (1.5.2) -# 1. you have to insert a __GNUC__ section in its config.h -# 2. you have to generate an import library for its dll -# - create a def-file for python??.dll -# - create an import library using -# dlltool --dllname python15.dll --def python15.def \ -# --output-lib libpython15.a -# -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# -# * We put export_symbols in a def-file, and don't use -# --export-all-symbols because it doesn't worked reliable in some -# tested configurations. And because other windows compilers also -# need their symbols specified this no serious problem. -# -# tested configurations: -# -# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works -# (after patching python's config.h and for C++ some other include files) -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works -# (ld doesn't support -shared, so we use dllwrap) -# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now -# - its dllwrap doesn't work, there is a bug in binutils 2.10.90 -# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html -# - using gcc -mdll instead dllwrap doesn't work without -static because -# it tries to link against dlls instead their import libraries. (If -# it finds the dll first.) -# By specifying -static we force ld to link against the import libraries, -# this is windows standard and there are normally not the necessary symbols -# in the dlls. -# *** only the version of June 2000 shows these problems -# * cygwin gcc 3.2/ld 2.13.90 works -# (ld supports -shared) -# * mingw gcc 3.2/ld 2.13 works -# (ld supports -shared) - -import os -import sys -import copy -from subprocess import Popen, PIPE, check_output -import re - -from distutils.unixccompiler import UnixCCompiler -from distutils.file_util import write_file -from distutils.errors import (DistutilsExecError, CCompilerError, - CompileError, UnknownFileError) -from distutils.version import LooseVersion -from distutils.spawn import find_executable - -def get_msvcr(): - """Include the appropriate MSVC runtime library if Python was built - with MSVC 7.0 or later. - """ - msc_pos = sys.version.find('MSC v.') - if msc_pos != -1: - msc_ver = sys.version[msc_pos+6:msc_pos+10] - if msc_ver == '1300': - # MSVC 7.0 - return ['msvcr70'] - elif msc_ver == '1310': - # MSVC 7.1 - return ['msvcr71'] - elif msc_ver == '1400': - # VS2005 / MSVC 8.0 - return ['msvcr80'] - elif msc_ver == '1500': - # VS2008 / MSVC 9.0 - return ['msvcr90'] - elif msc_ver == '1600': - # VS2010 / MSVC 10.0 - return ['msvcr100'] - else: - raise ValueError("Unknown MS Compiler version %s " % msc_ver) - - -class CygwinCCompiler(UnixCCompiler): - """ Handles the Cygwin port of the GNU C compiler to Windows. - """ - compiler_type = 'cygwin' - obj_extension = ".o" - static_lib_extension = ".a" - shared_lib_extension = ".dll" - static_lib_format = "lib%s%s" - shared_lib_format = "%s%s" - exe_extension = ".exe" - - def __init__(self, verbose=0, dry_run=0, force=0): - - UnixCCompiler.__init__(self, verbose, dry_run, force) - - status, details = check_config_h() - self.debug_print("Python's GCC status: %s (details: %s)" % - (status, details)) - if status is not CONFIG_H_OK: - self.warn( - "Python's pyconfig.h doesn't seem to support your compiler. " - "Reason: %s. " - "Compiling may fail because of undefined preprocessor macros." - % details) - - self.gcc_version, self.ld_version, self.dllwrap_version = \ - get_versions() - self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" % - (self.gcc_version, - self.ld_version, - self.dllwrap_version) ) - - # ld_version >= "2.10.90" and < "2.13" should also be able to use - # gcc -mdll instead of dllwrap - # Older dllwraps had own version numbers, newer ones use the - # same as the rest of binutils ( also ld ) - # dllwrap 2.10.90 is buggy - if self.ld_version >= "2.10.90": - self.linker_dll = "gcc" - else: - self.linker_dll = "dllwrap" - - # ld_version >= "2.13" support -shared so use it instead of - # -mdll -static - if self.ld_version >= "2.13": - shared_option = "-shared" - else: - shared_option = "-mdll -static" - - # Hard-code GCC because that's what this is all about. - # XXX optimization, warnings etc. should be customizable. - self.set_executables(compiler='gcc -mcygwin -O -Wall', - compiler_so='gcc -mcygwin -mdll -O -Wall', - compiler_cxx='g++ -mcygwin -O -Wall', - linker_exe='gcc -mcygwin', - linker_so=('%s -mcygwin %s' % - (self.linker_dll, shared_option))) - - # cygwin and mingw32 need different sets of libraries - if self.gcc_version == "2.91.57": - # cygwin shouldn't need msvcrt, but without the dlls will crash - # (gcc version 2.91.57) -- perhaps something about initialization - self.dll_libraries=["msvcrt"] - self.warn( - "Consider upgrading to a newer version of gcc") - else: - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - """Compiles the source by spawning GCC and windres if needed.""" - if ext == '.rc' or ext == '.res': - # gcc needs '.res' and '.rc' compiled to object files !!! - try: - self.spawn(["windres", "-i", src, "-o", obj]) - except DistutilsExecError as msg: - raise CompileError(msg) - else: # for other files use the C-compiler - try: - self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - """Link the objects.""" - # use separate copies, so we can modify the lists - extra_preargs = copy.copy(extra_preargs or []) - libraries = copy.copy(libraries or []) - objects = copy.copy(objects or []) - - # Additional libraries - libraries.extend(self.dll_libraries) - - # handle export symbols by creating a def-file - # with executables this only works with gcc/ld as linker - if ((export_symbols is not None) and - (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): - # (The linker doesn't do anything if output is up-to-date. - # So it would probably better to check if we really need this, - # but for this we had to insert some unchanged parts of - # UnixCCompiler, and this is not what we want.) - - # we want to put some files in the same directory as the - # object files are, build_temp doesn't help much - # where are the object files - temp_dir = os.path.dirname(objects[0]) - # name of dll to give the helper files the same base name - (dll_name, dll_extension) = os.path.splitext( - os.path.basename(output_filename)) - - # generate the filenames for these files - def_file = os.path.join(temp_dir, dll_name + ".def") - lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a") - - # Generate .def file - contents = [ - "LIBRARY %s" % os.path.basename(output_filename), - "EXPORTS"] - for sym in export_symbols: - contents.append(sym) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) - - # next add options for def-file and to creating import libraries - - # dllwrap uses different options than gcc/ld - if self.linker_dll == "dllwrap": - extra_preargs.extend(["--output-lib", lib_file]) - # for dllwrap we have to use a special option - extra_preargs.extend(["--def", def_file]) - # we use gcc/ld here and can be sure ld is >= 2.9.10 - else: - # doesn't work: bfd_close build\...\libfoo.a: Invalid operation - #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) - # for gcc/ld the def-file is specified as any object files - objects.append(def_file) - - #end: if ((export_symbols is not None) and - # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): - - # who wants symbols and a many times larger output file - # should explicitly switch the debug mode on - # otherwise we let dllwrap/ld strip the output file - # (On my machine: 10KiB < stripped_file < ??100KiB - # unstripped_file = stripped_file + XXX KiB - # ( XXX=254 for a typical python extension)) - if not debug: - extra_preargs.append("-s") - - UnixCCompiler.link(self, target_desc, objects, output_filename, - output_dir, libraries, library_dirs, - runtime_library_dirs, - None, # export_symbols, we do this in our def-file - debug, extra_preargs, extra_postargs, build_temp, - target_lang) - - # -- Miscellaneous methods ----------------------------------------- - - def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): - """Adds supports for rc and res files.""" - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - base, ext = os.path.splitext(os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) - if strip_dir: - base = os.path.basename (base) - if ext in ('.res', '.rc'): - # these need to be compiled to object files - obj_names.append (os.path.join(output_dir, - base + ext + self.obj_extension)) - else: - obj_names.append (os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - -# the same as cygwin plus some additional parameters -class Mingw32CCompiler(CygwinCCompiler): - """ Handles the Mingw32 port of the GNU C compiler to Windows. - """ - compiler_type = 'mingw32' - - def __init__(self, verbose=0, dry_run=0, force=0): - - CygwinCCompiler.__init__ (self, verbose, dry_run, force) - - # ld_version >= "2.13" support -shared so use it instead of - # -mdll -static - if self.ld_version >= "2.13": - shared_option = "-shared" - else: - shared_option = "-mdll -static" - - # A real mingw32 doesn't need to specify a different entry point, - # but cygwin 2.91.57 in no-cygwin-mode needs it. - if self.gcc_version <= "2.91.57": - entry_point = '--entry _DllMain@12' - else: - entry_point = '' - - if is_cygwingcc(): - raise CCompilerError( - 'Cygwin gcc cannot be used with --compiler=mingw32') - - self.set_executables(compiler='gcc -O -Wall', - compiler_so='gcc -mdll -O -Wall', - compiler_cxx='g++ -O -Wall', - linker_exe='gcc', - linker_so='%s %s %s' - % (self.linker_dll, shared_option, - entry_point)) - # Maybe we should also append -mthreads, but then the finished - # dlls need another dll (mingwm10.dll see Mingw32 docs) - # (-mthreads: Support thread-safe exception handling on `Mingw32') - - # no additional libraries needed - self.dll_libraries=[] - - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() - -# Because these compilers aren't configured in Python's pyconfig.h file by -# default, we should at least warn the user if he is using an unmodified -# version. - -CONFIG_H_OK = "ok" -CONFIG_H_NOTOK = "not ok" -CONFIG_H_UNCERTAIN = "uncertain" - -def check_config_h(): - """Check if the current Python installation appears amenable to building - extensions with GCC. - - Returns a tuple (status, details), where 'status' is one of the following - constants: - - - CONFIG_H_OK: all is well, go ahead and compile - - CONFIG_H_NOTOK: doesn't look good - - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h - - 'details' is a human-readable string explaining the situation. - - Note there are two ways to conclude "OK": either 'sys.version' contains - the string "GCC" (implying that this Python was built with GCC), or the - installed "pyconfig.h" contains the string "__GNUC__". - """ - - # XXX since this function also checks sys.version, it's not strictly a - # "pyconfig.h" check -- should probably be renamed... - - from distutils import sysconfig - - # if sys.version contains GCC then python was compiled with GCC, and the - # pyconfig.h file should be OK - if "GCC" in sys.version: - return CONFIG_H_OK, "sys.version mentions 'GCC'" - - # let's see if __GNUC__ is mentioned in python.h - fn = sysconfig.get_config_h_filename() - try: - config_h = open(fn) - try: - if "__GNUC__" in config_h.read(): - return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn - else: - return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn - finally: - config_h.close() - except OSError as exc: - return (CONFIG_H_UNCERTAIN, - "couldn't read '%s': %s" % (fn, exc.strerror)) - -RE_VERSION = re.compile(br'(\d+\.\d+(\.\d+)*)') - -def _find_exe_version(cmd): - """Find the version of an executable by running `cmd` in the shell. - - If the command is not found, or the output does not match - `RE_VERSION`, returns None. - """ - executable = cmd.split()[0] - if find_executable(executable) is None: - return None - out = Popen(cmd, shell=True, stdout=PIPE).stdout - try: - out_string = out.read() - finally: - out.close() - result = RE_VERSION.search(out_string) - if result is None: - return None - # LooseVersion works with strings - # so we need to decode our bytes - return LooseVersion(result.group(1).decode()) - -def get_versions(): - """ Try to find out the versions of gcc, ld and dllwrap. - - If not possible it returns None for it. - """ - commands = ['gcc -dumpversion', 'ld -v', 'dllwrap --version'] - return tuple([_find_exe_version(cmd) for cmd in commands]) - -def is_cygwingcc(): - '''Try to determine if the gcc that would be used is from cygwin.''' - out_string = check_output(['gcc', '-dumpmachine']) - return out_string.strip().endswith(b'cygwin') diff --git a/Lib/distutils/debug.py b/Lib/distutils/debug.py deleted file mode 100644 index daf1660f0d8211..00000000000000 --- a/Lib/distutils/debug.py +++ /dev/null @@ -1,5 +0,0 @@ -import os - -# If DISTUTILS_DEBUG is anything other than the empty string, we run in -# debug mode. -DEBUG = os.environ.get('DISTUTILS_DEBUG') diff --git a/Lib/distutils/dep_util.py b/Lib/distutils/dep_util.py deleted file mode 100644 index d74f5e4e92f3ed..00000000000000 --- a/Lib/distutils/dep_util.py +++ /dev/null @@ -1,92 +0,0 @@ -"""distutils.dep_util - -Utility functions for simple, timestamp-based dependency of files -and groups of files; also, function based entirely on such -timestamp dependency analysis.""" - -import os -from distutils.errors import DistutilsFileError - - -def newer (source, target): - """Return true if 'source' exists and is more recently modified than - 'target', or if 'source' exists and 'target' doesn't. Return false if - both exist and 'target' is the same age or younger than 'source'. - Raise DistutilsFileError if 'source' does not exist. - """ - if not os.path.exists(source): - raise DistutilsFileError("file '%s' does not exist" % - os.path.abspath(source)) - if not os.path.exists(target): - return 1 - - from stat import ST_MTIME - mtime1 = os.stat(source)[ST_MTIME] - mtime2 = os.stat(target)[ST_MTIME] - - return mtime1 > mtime2 - -# newer () - - -def newer_pairwise (sources, targets): - """Walk two filename lists in parallel, testing if each source is newer - than its corresponding target. Return a pair of lists (sources, - targets) where source is newer than target, according to the semantics - of 'newer()'. - """ - if len(sources) != len(targets): - raise ValueError("'sources' and 'targets' must be same length") - - # build a pair of lists (sources, targets) where source is newer - n_sources = [] - n_targets = [] - for i in range(len(sources)): - if newer(sources[i], targets[i]): - n_sources.append(sources[i]) - n_targets.append(targets[i]) - - return (n_sources, n_targets) - -# newer_pairwise () - - -def newer_group (sources, target, missing='error'): - """Return true if 'target' is out-of-date with respect to any file - listed in 'sources'. In other words, if 'target' exists and is newer - than every file in 'sources', return false; otherwise return true. - 'missing' controls what we do when a source file is missing; the - default ("error") is to blow up with an OSError from inside 'stat()'; - if it is "ignore", we silently drop any missing source files; if it is - "newer", any missing source files make us assume that 'target' is - out-of-date (this is handy in "dry-run" mode: it'll make you pretend to - carry out commands that wouldn't work because inputs are missing, but - that doesn't matter because you're not actually going to run the - commands). - """ - # If the target doesn't even exist, then it's definitely out-of-date. - if not os.path.exists(target): - return 1 - - # Otherwise we have to find out the hard way: if *any* source file - # is more recent than 'target', then 'target' is out-of-date and - # we can immediately return true. If we fall through to the end - # of the loop, then 'target' is up-to-date and we return false. - from stat import ST_MTIME - target_mtime = os.stat(target)[ST_MTIME] - for source in sources: - if not os.path.exists(source): - if missing == 'error': # blow up when we stat() the file - pass - elif missing == 'ignore': # missing source dropped from - continue # target's dependency list - elif missing == 'newer': # missing source means target is - return 1 # out-of-date - - source_mtime = os.stat(source)[ST_MTIME] - if source_mtime > target_mtime: - return 1 - else: - return 0 - -# newer_group () diff --git a/Lib/distutils/dir_util.py b/Lib/distutils/dir_util.py deleted file mode 100644 index d5cd8e3e24f46a..00000000000000 --- a/Lib/distutils/dir_util.py +++ /dev/null @@ -1,210 +0,0 @@ -"""distutils.dir_util - -Utility functions for manipulating directories and directory trees.""" - -import os -import errno -from distutils.errors import DistutilsFileError, DistutilsInternalError -from distutils import log - -# cache for by mkpath() -- in addition to cheapening redundant calls, -# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode -_path_created = {} - -# I don't use os.makedirs because a) it's new to Python 1.5.2, and -# b) it blows up if the directory already exists (I want to silently -# succeed in that case). -def mkpath(name, mode=0o777, verbose=1, dry_run=0): - """Create a directory and any missing ancestor directories. - - If the directory already exists (or if 'name' is the empty string, which - means the current directory, which of course exists), then do nothing. - Raise DistutilsFileError if unable to create some directory along the way - (eg. some sub-path exists, but is a file rather than a directory). - If 'verbose' is true, print a one-line summary of each mkdir to stdout. - Return the list of directories actually created. - """ - - global _path_created - - # Detect a common bug -- name is None - if not isinstance(name, str): - raise DistutilsInternalError( - "mkpath: 'name' must be a string (got %r)" % (name,)) - - # XXX what's the better way to handle verbosity? print as we create - # each directory in the path (the current behaviour), or only announce - # the creation of the whole path? (quite easy to do the latter since - # we're not using a recursive algorithm) - - name = os.path.normpath(name) - created_dirs = [] - if os.path.isdir(name) or name == '': - return created_dirs - if _path_created.get(os.path.abspath(name)): - return created_dirs - - (head, tail) = os.path.split(name) - tails = [tail] # stack of lone dirs to create - - while head and tail and not os.path.isdir(head): - (head, tail) = os.path.split(head) - tails.insert(0, tail) # push next higher dir onto stack - - # now 'head' contains the deepest directory that already exists - # (that is, the child of 'head' in 'name' is the highest directory - # that does *not* exist) - for d in tails: - #print "head = %s, d = %s: " % (head, d), - head = os.path.join(head, d) - abs_head = os.path.abspath(head) - - if _path_created.get(abs_head): - continue - - if verbose >= 1: - log.info("creating %s", head) - - if not dry_run: - try: - os.mkdir(head, mode) - except OSError as exc: - if not (exc.errno == errno.EEXIST and os.path.isdir(head)): - raise DistutilsFileError( - "could not create '%s': %s" % (head, exc.args[-1])) - created_dirs.append(head) - - _path_created[abs_head] = 1 - return created_dirs - -def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): - """Create all the empty directories under 'base_dir' needed to put 'files' - there. - - 'base_dir' is just the name of a directory which doesn't necessarily - exist yet; 'files' is a list of filenames to be interpreted relative to - 'base_dir'. 'base_dir' + the directory portion of every file in 'files' - will be created if it doesn't already exist. 'mode', 'verbose' and - 'dry_run' flags are as for 'mkpath()'. - """ - # First get the list of directories to create - need_dir = set() - for file in files: - need_dir.add(os.path.join(base_dir, os.path.dirname(file))) - - # Now create them - for dir in sorted(need_dir): - mkpath(dir, mode, verbose=verbose, dry_run=dry_run) - -def copy_tree(src, dst, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, update=0, verbose=1, dry_run=0): - """Copy an entire directory tree 'src' to a new location 'dst'. - - Both 'src' and 'dst' must be directory names. If 'src' is not a - directory, raise DistutilsFileError. If 'dst' does not exist, it is - created with 'mkpath()'. The end result of the copy is that every - file in 'src' is copied to 'dst', and directories under 'src' are - recursively copied to 'dst'. Return the list of files that were - copied or might have been copied, using their output name. The - return value is unaffected by 'update' or 'dry_run': it is simply - the list of all files under 'src', with the names changed to be - under 'dst'. - - 'preserve_mode' and 'preserve_times' are the same as for - 'copy_file'; note that they only apply to regular files, not to - directories. If 'preserve_symlinks' is true, symlinks will be - copied as symlinks (on platforms that support them!); otherwise - (the default), the destination of the symlink will be copied. - 'update' and 'verbose' are the same as for 'copy_file'. - """ - from distutils.file_util import copy_file - - if not dry_run and not os.path.isdir(src): - raise DistutilsFileError( - "cannot copy tree '%s': not a directory" % src) - try: - names = os.listdir(src) - except OSError as e: - if dry_run: - names = [] - else: - raise DistutilsFileError( - "error listing files in '%s': %s" % (src, e.strerror)) - - if not dry_run: - mkpath(dst, verbose=verbose) - - outputs = [] - - for n in names: - src_name = os.path.join(src, n) - dst_name = os.path.join(dst, n) - - if n.startswith('.nfs'): - # skip NFS rename files - continue - - if preserve_symlinks and os.path.islink(src_name): - link_dest = os.readlink(src_name) - if verbose >= 1: - log.info("linking %s -> %s", dst_name, link_dest) - if not dry_run: - os.symlink(link_dest, dst_name) - outputs.append(dst_name) - - elif os.path.isdir(src_name): - outputs.extend( - copy_tree(src_name, dst_name, preserve_mode, - preserve_times, preserve_symlinks, update, - verbose=verbose, dry_run=dry_run)) - else: - copy_file(src_name, dst_name, preserve_mode, - preserve_times, update, verbose=verbose, - dry_run=dry_run) - outputs.append(dst_name) - - return outputs - -def _build_cmdtuple(path, cmdtuples): - """Helper for remove_tree().""" - for f in os.listdir(path): - real_f = os.path.join(path,f) - if os.path.isdir(real_f) and not os.path.islink(real_f): - _build_cmdtuple(real_f, cmdtuples) - else: - cmdtuples.append((os.remove, real_f)) - cmdtuples.append((os.rmdir, path)) - -def remove_tree(directory, verbose=1, dry_run=0): - """Recursively remove an entire directory tree. - - Any errors are ignored (apart from being reported to stdout if 'verbose' - is true). - """ - global _path_created - - if verbose >= 1: - log.info("removing '%s' (and everything under it)", directory) - if dry_run: - return - cmdtuples = [] - _build_cmdtuple(directory, cmdtuples) - for cmd in cmdtuples: - try: - cmd[0](cmd[1]) - # remove dir from cache if it's already there - abspath = os.path.abspath(cmd[1]) - if abspath in _path_created: - del _path_created[abspath] - except OSError as exc: - log.warn("error removing %s: %s", directory, exc) - -def ensure_relative(path): - """Take the full path 'path', and make it a relative path. - - This is useful to make 'path' the second argument to os.path.join(). - """ - drive, path = os.path.splitdrive(path) - if path[0:1] == os.sep: - path = drive + path[1:] - return path diff --git a/Lib/distutils/dist.py b/Lib/distutils/dist.py deleted file mode 100644 index 6cf0a0d6632dc7..00000000000000 --- a/Lib/distutils/dist.py +++ /dev/null @@ -1,1256 +0,0 @@ -"""distutils.dist - -Provides the Distribution class, which represents the module distribution -being built/installed/distributed. -""" - -import sys -import os -import re -from email import message_from_file - -try: - import warnings -except ImportError: - warnings = None - -from distutils.errors import * -from distutils.fancy_getopt import FancyGetopt, translate_longopt -from distutils.util import check_environ, strtobool, rfc822_escape -from distutils import log -from distutils.debug import DEBUG - -# Regex to define acceptable Distutils command names. This is not *quite* -# the same as a Python NAME -- I don't allow leading underscores. The fact -# that they're very similar is no coincidence; the default naming scheme is -# to look for a Python module named after the command. -command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') - - -def _ensure_list(value, fieldname): - if isinstance(value, str): - # a string containing comma separated values is okay. It will - # be converted to a list by Distribution.finalize_options(). - pass - elif not isinstance(value, list): - # passing a tuple or an iterator perhaps, warn and convert - typename = type(value).__name__ - msg = f"Warning: '{fieldname}' should be a list, got type '{typename}'" - log.log(log.WARN, msg) - value = list(value) - return value - - -class Distribution: - """The core of the Distutils. Most of the work hiding behind 'setup' - is really done within a Distribution instance, which farms the work out - to the Distutils commands specified on the command line. - - Setup scripts will almost never instantiate Distribution directly, - unless the 'setup()' function is totally inadequate to their needs. - However, it is conceivable that a setup script might wish to subclass - Distribution for some specialized purpose, and then pass the subclass - to 'setup()' as the 'distclass' keyword argument. If so, it is - necessary to respect the expectations that 'setup' has of Distribution. - See the code for 'setup()', in core.py, for details. - """ - - # 'global_options' describes the command-line options that may be - # supplied to the setup script prior to any actual commands. - # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of - # these global options. This list should be kept to a bare minimum, - # since every global option is also valid as a command option -- and we - # don't want to pollute the commands with too many options that they - # have minimal control over. - # The fourth entry for verbose means that it can be repeated. - global_options = [ - ('verbose', 'v', "run verbosely (default)", 1), - ('quiet', 'q', "run quietly (turns verbosity off)"), - ('dry-run', 'n', "don't actually do anything"), - ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, - 'ignore pydistutils.cfg in your home directory'), - ] - - # 'common_usage' is a short (2-3 line) string describing the common - # usage of the setup script. - common_usage = """\ -Common commands: (see '--help-commands' for more) - - setup.py build will build the package underneath 'build/' - setup.py install will install the package -""" - - # options that are not propagated to the commands - display_options = [ - ('help-commands', None, - "list all available commands"), - ('name', None, - "print package name"), - ('version', 'V', - "print package version"), - ('fullname', None, - "print -"), - ('author', None, - "print the author's name"), - ('author-email', None, - "print the author's email address"), - ('maintainer', None, - "print the maintainer's name"), - ('maintainer-email', None, - "print the maintainer's email address"), - ('contact', None, - "print the maintainer's name if known, else the author's"), - ('contact-email', None, - "print the maintainer's email address if known, else the author's"), - ('url', None, - "print the URL for this package"), - ('license', None, - "print the license of the package"), - ('licence', None, - "alias for --license"), - ('description', None, - "print the package description"), - ('long-description', None, - "print the long package description"), - ('platforms', None, - "print the list of platforms"), - ('classifiers', None, - "print the list of classifiers"), - ('keywords', None, - "print the list of keywords"), - ('provides', None, - "print the list of packages/modules provided"), - ('requires', None, - "print the list of packages/modules required"), - ('obsoletes', None, - "print the list of packages/modules made obsolete") - ] - display_option_names = [translate_longopt(x[0]) for x in display_options] - - # negative options are options that exclude other options - negative_opt = {'quiet': 'verbose'} - - # -- Creation/initialization methods ------------------------------- - - def __init__(self, attrs=None): - """Construct a new Distribution instance: initialize all the - attributes of a Distribution, and then use 'attrs' (a dictionary - mapping attribute names to values) to assign some of those - attributes their "real" values. (Any attributes not mentioned in - 'attrs' will be assigned to some null value: 0, None, an empty list - or dictionary, etc.) Most importantly, initialize the - 'command_obj' attribute to the empty dictionary; this will be - filled in with real command objects by 'parse_command_line()'. - """ - - # Default values for our command-line options - self.verbose = 1 - self.dry_run = 0 - self.help = 0 - for attr in self.display_option_names: - setattr(self, attr, 0) - - # Store the distribution meta-data (name, version, author, and so - # forth) in a separate object -- we're getting to have enough - # information here (and enough command-line options) that it's - # worth it. Also delegate 'get_XXX()' methods to the 'metadata' - # object in a sneaky and underhanded (but efficient!) way. - self.metadata = DistributionMetadata() - for basename in self.metadata._METHOD_BASENAMES: - method_name = "get_" + basename - setattr(self, method_name, getattr(self.metadata, method_name)) - - # 'cmdclass' maps command names to class objects, so we - # can 1) quickly figure out which class to instantiate when - # we need to create a new command object, and 2) have a way - # for the setup script to override command classes - self.cmdclass = {} - - # 'command_packages' is a list of packages in which commands - # are searched for. The factory for command 'foo' is expected - # to be named 'foo' in the module 'foo' in one of the packages - # named here. This list is searched from the left; an error - # is raised if no named package provides the command being - # searched for. (Always access using get_command_packages().) - self.command_packages = None - - # 'script_name' and 'script_args' are usually set to sys.argv[0] - # and sys.argv[1:], but they can be overridden when the caller is - # not necessarily a setup script run from the command-line. - self.script_name = None - self.script_args = None - - # 'command_options' is where we store command options between - # parsing them (from config files, the command-line, etc.) and when - # they are actually needed -- ie. when the command in question is - # instantiated. It is a dictionary of dictionaries of 2-tuples: - # command_options = { command_name : { option : (source, value) } } - self.command_options = {} - - # 'dist_files' is the list of (command, pyversion, file) that - # have been created by any dist commands run so far. This is - # filled regardless of whether the run is dry or not. pyversion - # gives sysconfig.get_python_version() if the dist file is - # specific to a Python version, 'any' if it is good for all - # Python versions on the target platform, and '' for a source - # file. pyversion should not be used to specify minimum or - # maximum required Python versions; use the metainfo for that - # instead. - self.dist_files = [] - - # These options are really the business of various commands, rather - # than of the Distribution itself. We provide aliases for them in - # Distribution as a convenience to the developer. - self.packages = None - self.package_data = {} - self.package_dir = None - self.py_modules = None - self.libraries = None - self.headers = None - self.ext_modules = None - self.ext_package = None - self.include_dirs = None - self.extra_path = None - self.scripts = None - self.data_files = None - self.password = '' - - # And now initialize bookkeeping stuff that can't be supplied by - # the caller at all. 'command_obj' maps command names to - # Command instances -- that's how we enforce that every command - # class is a singleton. - self.command_obj = {} - - # 'have_run' maps command names to boolean values; it keeps track - # of whether we have actually run a particular command, to make it - # cheap to "run" a command whenever we think we might need to -- if - # it's already been done, no need for expensive filesystem - # operations, we just check the 'have_run' dictionary and carry on. - # It's only safe to query 'have_run' for a command class that has - # been instantiated -- a false value will be inserted when the - # command object is created, and replaced with a true value when - # the command is successfully run. Thus it's probably best to use - # '.get()' rather than a straight lookup. - self.have_run = {} - - # Now we'll use the attrs dictionary (ultimately, keyword args from - # the setup script) to possibly override any or all of these - # distribution options. - - if attrs: - # Pull out the set of command options and work on them - # specifically. Note that this order guarantees that aliased - # command options will override any supplied redundantly - # through the general options dictionary. - options = attrs.get('options') - if options is not None: - del attrs['options'] - for (command, cmd_options) in options.items(): - opt_dict = self.get_option_dict(command) - for (opt, val) in cmd_options.items(): - opt_dict[opt] = ("setup script", val) - - if 'licence' in attrs: - attrs['license'] = attrs['licence'] - del attrs['licence'] - msg = "'licence' distribution option is deprecated; use 'license'" - if warnings is not None: - warnings.warn(msg) - else: - sys.stderr.write(msg + "\n") - - # Now work on the rest of the attributes. Any attribute that's - # not already defined is invalid! - for (key, val) in attrs.items(): - if hasattr(self.metadata, "set_" + key): - getattr(self.metadata, "set_" + key)(val) - elif hasattr(self.metadata, key): - setattr(self.metadata, key, val) - elif hasattr(self, key): - setattr(self, key, val) - else: - msg = "Unknown distribution option: %s" % repr(key) - warnings.warn(msg) - - # no-user-cfg is handled before other command line args - # because other args override the config files, and this - # one is needed before we can load the config files. - # If attrs['script_args'] wasn't passed, assume false. - # - # This also make sure we just look at the global options - self.want_user_cfg = True - - if self.script_args is not None: - for arg in self.script_args: - if not arg.startswith('-'): - break - if arg == '--no-user-cfg': - self.want_user_cfg = False - break - - self.finalize_options() - - def get_option_dict(self, command): - """Get the option dictionary for a given command. If that - command's option dictionary hasn't been created yet, then create it - and return the new dictionary; otherwise, return the existing - option dictionary. - """ - dict = self.command_options.get(command) - if dict is None: - dict = self.command_options[command] = {} - return dict - - def dump_option_dicts(self, header=None, commands=None, indent=""): - from pprint import pformat - - if commands is None: # dump all command option dicts - commands = sorted(self.command_options.keys()) - - if header is not None: - self.announce(indent + header) - indent = indent + " " - - if not commands: - self.announce(indent + "no commands known yet") - return - - for cmd_name in commands: - opt_dict = self.command_options.get(cmd_name) - if opt_dict is None: - self.announce(indent + - "no option dict for '%s' command" % cmd_name) - else: - self.announce(indent + - "option dict for '%s' command:" % cmd_name) - out = pformat(opt_dict) - for line in out.split('\n'): - self.announce(indent + " " + line) - - # -- Config file finding/parsing methods --------------------------- - - def find_config_files(self): - """Find as many configuration files as should be processed for this - platform, and return a list of filenames in the order in which they - should be parsed. The filenames returned are guaranteed to exist - (modulo nasty race conditions). - - There are three possible config files: distutils.cfg in the - Distutils installation directory (ie. where the top-level - Distutils __inst__.py file lives), a file in the user's home - directory named .pydistutils.cfg on Unix and pydistutils.cfg - on Windows/Mac; and setup.cfg in the current directory. - - The file in the user's home directory can be disabled with the - --no-user-cfg option. - """ - files = [] - check_environ() - - # Where to look for the system-wide Distutils config file - sys_dir = os.path.dirname(sys.modules['distutils'].__file__) - - # Look for the system config file - sys_file = os.path.join(sys_dir, "distutils.cfg") - if os.path.isfile(sys_file): - files.append(sys_file) - - # What to call the per-user config file - if os.name == 'posix': - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - - # And look for the user config file - if self.want_user_cfg: - user_file = os.path.join(os.path.expanduser('~'), user_filename) - if os.path.isfile(user_file): - files.append(user_file) - - # All platforms support local setup.cfg - local_file = "setup.cfg" - if os.path.isfile(local_file): - files.append(local_file) - - if DEBUG: - self.announce("using config files: %s" % ', '.join(files)) - - return files - - def parse_config_files(self, filenames=None): - from configparser import ConfigParser - - # Ignore install directory options if we have a venv - if sys.prefix != sys.base_prefix: - ignore_options = [ - 'install-base', 'install-platbase', 'install-lib', - 'install-platlib', 'install-purelib', 'install-headers', - 'install-scripts', 'install-data', 'prefix', 'exec-prefix', - 'home', 'user', 'root'] - else: - ignore_options = [] - - ignore_options = frozenset(ignore_options) - - if filenames is None: - filenames = self.find_config_files() - - if DEBUG: - self.announce("Distribution.parse_config_files():") - - parser = ConfigParser() - for filename in filenames: - if DEBUG: - self.announce(" reading %s" % filename) - parser.read(filename) - for section in parser.sections(): - options = parser.options(section) - opt_dict = self.get_option_dict(section) - - for opt in options: - if opt != '__name__' and opt not in ignore_options: - val = parser.get(section,opt) - opt = opt.replace('-', '_') - opt_dict[opt] = (filename, val) - - # Make the ConfigParser forget everything (so we retain - # the original filenames that options come from) - parser.__init__() - - # If there was a "global" section in the config file, use it - # to set Distribution options. - - if 'global' in self.command_options: - for (opt, (src, val)) in self.command_options['global'].items(): - alias = self.negative_opt.get(opt) - try: - if alias: - setattr(self, alias, not strtobool(val)) - elif opt in ('verbose', 'dry_run'): # ugh! - setattr(self, opt, strtobool(val)) - else: - setattr(self, opt, val) - except ValueError as msg: - raise DistutilsOptionError(msg) - - # -- Command-line parsing methods ---------------------------------- - - def parse_command_line(self): - """Parse the setup script's command line, taken from the - 'script_args' instance attribute (which defaults to 'sys.argv[1:]' - -- see 'setup()' in core.py). This list is first processed for - "global options" -- options that set attributes of the Distribution - instance. Then, it is alternately scanned for Distutils commands - and options for that command. Each new command terminates the - options for the previous command. The allowed options for a - command are determined by the 'user_options' attribute of the - command class -- thus, we have to be able to load command classes - in order to parse the command line. Any error in that 'options' - attribute raises DistutilsGetoptError; any error on the - command-line raises DistutilsArgError. If no Distutils commands - were found on the command line, raises DistutilsArgError. Return - true if command-line was successfully parsed and we should carry - on with executing commands; false if no errors but we shouldn't - execute commands (currently, this only happens if user asks for - help). - """ - # - # We now have enough information to show the Macintosh dialog - # that allows the user to interactively specify the "command line". - # - toplevel_options = self._get_toplevel_options() - - # We have to parse the command line a bit at a time -- global - # options, then the first command, then its options, and so on -- - # because each command will be handled by a different class, and - # the options that are valid for a particular class aren't known - # until we have loaded the command class, which doesn't happen - # until we know what the command is. - - self.commands = [] - parser = FancyGetopt(toplevel_options + self.display_options) - parser.set_negative_aliases(self.negative_opt) - parser.set_aliases({'licence': 'license'}) - args = parser.getopt(args=self.script_args, object=self) - option_order = parser.get_option_order() - log.set_verbosity(self.verbose) - - # for display options we return immediately - if self.handle_display_options(option_order): - return - while args: - args = self._parse_command_opts(parser, args) - if args is None: # user asked for help (and got it) - return - - # Handle the cases of --help as a "global" option, ie. - # "setup.py --help" and "setup.py --help command ...". For the - # former, we show global options (--verbose, --dry-run, etc.) - # and display-only options (--name, --version, etc.); for the - # latter, we omit the display-only options and show help for - # each command listed on the command line. - if self.help: - self._show_help(parser, - display_options=len(self.commands) == 0, - commands=self.commands) - return - - # Oops, no commands found -- an end-user error - if not self.commands: - raise DistutilsArgError("no commands supplied") - - # All is well: return true - return True - - def _get_toplevel_options(self): - """Return the non-display options recognized at the top level. - - This includes options that are recognized *only* at the top - level as well as options recognized for commands. - """ - return self.global_options + [ - ("command-packages=", None, - "list of packages that provide distutils commands"), - ] - - def _parse_command_opts(self, parser, args): - """Parse the command-line options for a single command. - 'parser' must be a FancyGetopt instance; 'args' must be the list - of arguments, starting with the current command (whose options - we are about to parse). Returns a new version of 'args' with - the next command at the front of the list; will be the empty - list if there are no more commands on the command line. Returns - None if the user asked for help on this command. - """ - # late import because of mutual dependence between these modules - from distutils.cmd import Command - - # Pull the current command from the head of the command line - command = args[0] - if not command_re.match(command): - raise SystemExit("invalid command name '%s'" % command) - self.commands.append(command) - - # Dig up the command class that implements this command, so we - # 1) know that it's a valid command, and 2) know which options - # it takes. - try: - cmd_class = self.get_command_class(command) - except DistutilsModuleError as msg: - raise DistutilsArgError(msg) - - # Require that the command class be derived from Command -- want - # to be sure that the basic "command" interface is implemented. - if not issubclass(cmd_class, Command): - raise DistutilsClassError( - "command class %s must subclass Command" % cmd_class) - - # Also make sure that the command object provides a list of its - # known options. - if not (hasattr(cmd_class, 'user_options') and - isinstance(cmd_class.user_options, list)): - msg = ("command class %s must provide " - "'user_options' attribute (a list of tuples)") - raise DistutilsClassError(msg % cmd_class) - - # If the command class has a list of negative alias options, - # merge it in with the global negative aliases. - negative_opt = self.negative_opt - if hasattr(cmd_class, 'negative_opt'): - negative_opt = negative_opt.copy() - negative_opt.update(cmd_class.negative_opt) - - # Check for help_options in command class. They have a different - # format (tuple of four) so we need to preprocess them here. - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_options = fix_help_options(cmd_class.help_options) - else: - help_options = [] - - # All commands support the global options too, just by adding - # in 'global_options'. - parser.set_option_table(self.global_options + - cmd_class.user_options + - help_options) - parser.set_negative_aliases(negative_opt) - (args, opts) = parser.getopt(args[1:]) - if hasattr(opts, 'help') and opts.help: - self._show_help(parser, display_options=0, commands=[cmd_class]) - return - - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_option_found=0 - for (help_option, short, desc, func) in cmd_class.help_options: - if hasattr(opts, parser.get_attr_name(help_option)): - help_option_found=1 - if callable(func): - func() - else: - raise DistutilsClassError( - "invalid help function %r for help option '%s': " - "must be a callable object (function, etc.)" - % (func, help_option)) - - if help_option_found: - return - - # Put the options from the command-line into their official - # holding pen, the 'command_options' dictionary. - opt_dict = self.get_option_dict(command) - for (name, value) in vars(opts).items(): - opt_dict[name] = ("command line", value) - - return args - - def finalize_options(self): - """Set final values for all the options on the Distribution - instance, analogous to the .finalize_options() method of Command - objects. - """ - for attr in ('keywords', 'platforms'): - value = getattr(self.metadata, attr) - if value is None: - continue - if isinstance(value, str): - value = [elm.strip() for elm in value.split(',')] - setattr(self.metadata, attr, value) - - def _show_help(self, parser, global_options=1, display_options=1, - commands=[]): - """Show help for the setup script command-line in the form of - several lists of command-line options. 'parser' should be a - FancyGetopt instance; do not expect it to be returned in the - same state, as its option table will be reset to make it - generate the correct help text. - - If 'global_options' is true, lists the global options: - --verbose, --dry-run, etc. If 'display_options' is true, lists - the "display-only" options: --name, --version, etc. Finally, - lists per-command help for every command name or command class - in 'commands'. - """ - # late import because of mutual dependence between these modules - from distutils.core import gen_usage - from distutils.cmd import Command - - if global_options: - if display_options: - options = self._get_toplevel_options() - else: - options = self.global_options - parser.set_option_table(options) - parser.print_help(self.common_usage + "\nGlobal options:") - print('') - - if display_options: - parser.set_option_table(self.display_options) - parser.print_help( - "Information display options (just display " + - "information, ignore any commands)") - print('') - - for command in self.commands: - if isinstance(command, type) and issubclass(command, Command): - klass = command - else: - klass = self.get_command_class(command) - if (hasattr(klass, 'help_options') and - isinstance(klass.help_options, list)): - parser.set_option_table(klass.user_options + - fix_help_options(klass.help_options)) - else: - parser.set_option_table(klass.user_options) - parser.print_help("Options for '%s' command:" % klass.__name__) - print('') - - print(gen_usage(self.script_name)) - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - from distutils.core import gen_usage - - # User just wants a list of commands -- we'll print it out and stop - # processing now (ie. if they ran "setup --help-commands foo bar", - # we ignore "foo bar"). - if self.help_commands: - self.print_commands() - print('') - print(gen_usage(self.script_name)) - return 1 - - # If user supplied any of the "display metadata" options, then - # display that metadata in the order in which the user supplied the - # metadata options. - any_display_options = 0 - is_display_option = {} - for option in self.display_options: - is_display_option[option[0]] = 1 - - for (opt, val) in option_order: - if val and is_display_option.get(opt): - opt = translate_longopt(opt) - value = getattr(self.metadata, "get_"+opt)() - if opt in ['keywords', 'platforms']: - print(','.join(value)) - elif opt in ('classifiers', 'provides', 'requires', - 'obsoletes'): - print('\n'.join(value)) - else: - print(value) - any_display_options = 1 - - return any_display_options - - def print_command_list(self, commands, header, max_length): - """Print a subset of the list of all commands -- used by - 'print_commands()'. - """ - print(header + ":") - - for cmd in commands: - klass = self.cmdclass.get(cmd) - if not klass: - klass = self.get_command_class(cmd) - try: - description = klass.description - except AttributeError: - description = "(no description available)" - - print(" %-*s %s" % (max_length, cmd, description)) - - def print_commands(self): - """Print out a help message listing all available commands with a - description of each. The list is divided into "standard commands" - (listed in distutils.command.__all__) and "extra commands" - (mentioned in self.cmdclass, but not a standard command). The - descriptions come from the command class attribute - 'description'. - """ - import distutils.command - std_commands = distutils.command.__all__ - is_std = {} - for cmd in std_commands: - is_std[cmd] = 1 - - extra_commands = [] - for cmd in self.cmdclass.keys(): - if not is_std.get(cmd): - extra_commands.append(cmd) - - max_length = 0 - for cmd in (std_commands + extra_commands): - if len(cmd) > max_length: - max_length = len(cmd) - - self.print_command_list(std_commands, - "Standard commands", - max_length) - if extra_commands: - print() - self.print_command_list(extra_commands, - "Extra commands", - max_length) - - def get_command_list(self): - """Get a list of (command, description) tuples. - The list is divided into "standard commands" (listed in - distutils.command.__all__) and "extra commands" (mentioned in - self.cmdclass, but not a standard command). The descriptions come - from the command class attribute 'description'. - """ - # Currently this is only used on Mac OS, for the Mac-only GUI - # Distutils interface (by Jack Jansen) - import distutils.command - std_commands = distutils.command.__all__ - is_std = {} - for cmd in std_commands: - is_std[cmd] = 1 - - extra_commands = [] - for cmd in self.cmdclass.keys(): - if not is_std.get(cmd): - extra_commands.append(cmd) - - rv = [] - for cmd in (std_commands + extra_commands): - klass = self.cmdclass.get(cmd) - if not klass: - klass = self.get_command_class(cmd) - try: - description = klass.description - except AttributeError: - description = "(no description available)" - rv.append((cmd, description)) - return rv - - # -- Command class/object methods ---------------------------------- - - def get_command_packages(self): - """Return a list of packages from which commands are loaded.""" - pkgs = self.command_packages - if not isinstance(pkgs, list): - if pkgs is None: - pkgs = '' - pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != ''] - if "distutils.command" not in pkgs: - pkgs.insert(0, "distutils.command") - self.command_packages = pkgs - return pkgs - - def get_command_class(self, command): - """Return the class that implements the Distutils command named by - 'command'. First we check the 'cmdclass' dictionary; if the - command is mentioned there, we fetch the class object from the - dictionary and return it. Otherwise we load the command module - ("distutils.command." + command) and fetch the command class from - the module. The loaded class is also stored in 'cmdclass' - to speed future calls to 'get_command_class()'. - - Raises DistutilsModuleError if the expected module could not be - found, or if that module does not define the expected class. - """ - klass = self.cmdclass.get(command) - if klass: - return klass - - for pkgname in self.get_command_packages(): - module_name = "%s.%s" % (pkgname, command) - klass_name = command - - try: - __import__(module_name) - module = sys.modules[module_name] - except ImportError: - continue - - try: - klass = getattr(module, klass_name) - except AttributeError: - raise DistutilsModuleError( - "invalid command '%s' (no class '%s' in module '%s')" - % (command, klass_name, module_name)) - - self.cmdclass[command] = klass - return klass - - raise DistutilsModuleError("invalid command '%s'" % command) - - def get_command_obj(self, command, create=1): - """Return the command object for 'command'. Normally this object - is cached on a previous call to 'get_command_obj()'; if no command - object for 'command' is in the cache, then we either create and - return it (if 'create' is true) or return None. - """ - cmd_obj = self.command_obj.get(command) - if not cmd_obj and create: - if DEBUG: - self.announce("Distribution.get_command_obj(): " - "creating '%s' command object" % command) - - klass = self.get_command_class(command) - cmd_obj = self.command_obj[command] = klass(self) - self.have_run[command] = 0 - - # Set any options that were supplied in config files - # or on the command line. (NB. support for error - # reporting is lame here: any errors aren't reported - # until 'finalize_options()' is called, which means - # we won't report the source of the error.) - options = self.command_options.get(command) - if options: - self._set_command_options(cmd_obj, options) - - return cmd_obj - - def _set_command_options(self, command_obj, option_dict=None): - """Set the options for 'command_obj' from 'option_dict'. Basically - this means copying elements of a dictionary ('option_dict') to - attributes of an instance ('command'). - - 'command_obj' must be a Command instance. If 'option_dict' is not - supplied, uses the standard option dictionary for this command - (from 'self.command_options'). - """ - command_name = command_obj.get_command_name() - if option_dict is None: - option_dict = self.get_option_dict(command_name) - - if DEBUG: - self.announce(" setting options for '%s' command:" % command_name) - for (option, (source, value)) in option_dict.items(): - if DEBUG: - self.announce(" %s = %s (from %s)" % (option, value, - source)) - try: - bool_opts = [translate_longopt(o) - for o in command_obj.boolean_options] - except AttributeError: - bool_opts = [] - try: - neg_opt = command_obj.negative_opt - except AttributeError: - neg_opt = {} - - try: - is_string = isinstance(value, str) - if option in neg_opt and is_string: - setattr(command_obj, neg_opt[option], not strtobool(value)) - elif option in bool_opts and is_string: - setattr(command_obj, option, strtobool(value)) - elif hasattr(command_obj, option): - setattr(command_obj, option, value) - else: - raise DistutilsOptionError( - "error in %s: command '%s' has no such option '%s'" - % (source, command_name, option)) - except ValueError as msg: - raise DistutilsOptionError(msg) - - def reinitialize_command(self, command, reinit_subcommands=0): - """Reinitializes a command to the state it was in when first - returned by 'get_command_obj()': ie., initialized but not yet - finalized. This provides the opportunity to sneak option - values in programmatically, overriding or supplementing - user-supplied values from the config files and command line. - You'll have to re-finalize the command object (by calling - 'finalize_options()' or 'ensure_finalized()') before using it for - real. - - 'command' should be a command name (string) or command object. If - 'reinit_subcommands' is true, also reinitializes the command's - sub-commands, as declared by the 'sub_commands' class attribute (if - it has one). See the "install" command for an example. Only - reinitializes the sub-commands that actually matter, ie. those - whose test predicates return true. - - Returns the reinitialized command object. - """ - from distutils.cmd import Command - if not isinstance(command, Command): - command_name = command - command = self.get_command_obj(command_name) - else: - command_name = command.get_command_name() - - if not command.finalized: - return command - command.initialize_options() - command.finalized = 0 - self.have_run[command_name] = 0 - self._set_command_options(command) - - if reinit_subcommands: - for sub in command.get_sub_commands(): - self.reinitialize_command(sub, reinit_subcommands) - - return command - - # -- Methods that operate on the Distribution ---------------------- - - def announce(self, msg, level=log.INFO): - log.log(level, msg) - - def run_commands(self): - """Run each command that was seen on the setup script command line. - Uses the list of commands found and cache of command objects - created by 'get_command_obj()'. - """ - for cmd in self.commands: - self.run_command(cmd) - - # -- Methods that operate on its Commands -------------------------- - - def run_command(self, command): - """Do whatever it takes to run a command (including nothing at all, - if the command has already been run). Specifically: if we have - already created and run the command named by 'command', return - silently without doing anything. If the command named by 'command' - doesn't even have a command object yet, create one. Then invoke - 'run()' on that command object (or an existing one). - """ - # Already been here, done that? then return silently. - if self.have_run.get(command): - return - - log.info("running %s", command) - cmd_obj = self.get_command_obj(command) - cmd_obj.ensure_finalized() - cmd_obj.run() - self.have_run[command] = 1 - - # -- Distribution query methods ------------------------------------ - - def has_pure_modules(self): - return len(self.packages or self.py_modules or []) > 0 - - def has_ext_modules(self): - return self.ext_modules and len(self.ext_modules) > 0 - - def has_c_libraries(self): - return self.libraries and len(self.libraries) > 0 - - def has_modules(self): - return self.has_pure_modules() or self.has_ext_modules() - - def has_headers(self): - return self.headers and len(self.headers) > 0 - - def has_scripts(self): - return self.scripts and len(self.scripts) > 0 - - def has_data_files(self): - return self.data_files and len(self.data_files) > 0 - - def is_pure(self): - return (self.has_pure_modules() and - not self.has_ext_modules() and - not self.has_c_libraries()) - - # -- Metadata query methods ---------------------------------------- - - # If you're looking for 'get_name()', 'get_version()', and so forth, - # they are defined in a sneaky way: the constructor binds self.get_XXX - # to self.metadata.get_XXX. The actual code is in the - # DistributionMetadata class, below. - -class DistributionMetadata: - """Dummy class to hold the distribution meta-data: name, version, - author, and so forth. - """ - - _METHOD_BASENAMES = ("name", "version", "author", "author_email", - "maintainer", "maintainer_email", "url", - "license", "description", "long_description", - "keywords", "platforms", "fullname", "contact", - "contact_email", "classifiers", "download_url", - # PEP 314 - "provides", "requires", "obsoletes", - ) - - def __init__(self, path=None): - if path is not None: - self.read_pkg_file(open(path)) - else: - self.name = None - self.version = None - self.author = None - self.author_email = None - self.maintainer = None - self.maintainer_email = None - self.url = None - self.license = None - self.description = None - self.long_description = None - self.keywords = None - self.platforms = None - self.classifiers = None - self.download_url = None - # PEP 314 - self.provides = None - self.requires = None - self.obsoletes = None - - def read_pkg_file(self, file): - """Reads the metadata values from a file object.""" - msg = message_from_file(file) - - def _read_field(name): - value = msg[name] - if value == 'UNKNOWN': - return None - return value - - def _read_list(name): - values = msg.get_all(name, None) - if values == []: - return None - return values - - metadata_version = msg['metadata-version'] - self.name = _read_field('name') - self.version = _read_field('version') - self.description = _read_field('summary') - # we are filling author only. - self.author = _read_field('author') - self.maintainer = None - self.author_email = _read_field('author-email') - self.maintainer_email = None - self.url = _read_field('home-page') - self.license = _read_field('license') - - if 'download-url' in msg: - self.download_url = _read_field('download-url') - else: - self.download_url = None - - self.long_description = _read_field('description') - self.description = _read_field('summary') - - if 'keywords' in msg: - self.keywords = _read_field('keywords').split(',') - - self.platforms = _read_list('platform') - self.classifiers = _read_list('classifier') - - # PEP 314 - these fields only exist in 1.1 - if metadata_version == '1.1': - self.requires = _read_list('requires') - self.provides = _read_list('provides') - self.obsoletes = _read_list('obsoletes') - else: - self.requires = None - self.provides = None - self.obsoletes = None - - def write_pkg_info(self, base_dir): - """Write the PKG-INFO file into the release tree. - """ - with open(os.path.join(base_dir, 'PKG-INFO'), 'w', - encoding='UTF-8') as pkg_info: - self.write_pkg_file(pkg_info) - - def write_pkg_file(self, file): - """Write the PKG-INFO format data to a file object. - """ - version = '1.0' - if (self.provides or self.requires or self.obsoletes or - self.classifiers or self.download_url): - version = '1.1' - - file.write('Metadata-Version: %s\n' % version) - file.write('Name: %s\n' % self.get_name()) - file.write('Version: %s\n' % self.get_version()) - file.write('Summary: %s\n' % self.get_description()) - file.write('Home-page: %s\n' % self.get_url()) - file.write('Author: %s\n' % self.get_contact()) - file.write('Author-email: %s\n' % self.get_contact_email()) - file.write('License: %s\n' % self.get_license()) - if self.download_url: - file.write('Download-URL: %s\n' % self.download_url) - - long_desc = rfc822_escape(self.get_long_description()) - file.write('Description: %s\n' % long_desc) - - keywords = ','.join(self.get_keywords()) - if keywords: - file.write('Keywords: %s\n' % keywords) - - self._write_list(file, 'Platform', self.get_platforms()) - self._write_list(file, 'Classifier', self.get_classifiers()) - - # PEP 314 - self._write_list(file, 'Requires', self.get_requires()) - self._write_list(file, 'Provides', self.get_provides()) - self._write_list(file, 'Obsoletes', self.get_obsoletes()) - - def _write_list(self, file, name, values): - for value in values: - file.write('%s: %s\n' % (name, value)) - - # -- Metadata query methods ---------------------------------------- - - def get_name(self): - return self.name or "UNKNOWN" - - def get_version(self): - return self.version or "0.0.0" - - def get_fullname(self): - return "%s-%s" % (self.get_name(), self.get_version()) - - def get_author(self): - return self.author or "UNKNOWN" - - def get_author_email(self): - return self.author_email or "UNKNOWN" - - def get_maintainer(self): - return self.maintainer or "UNKNOWN" - - def get_maintainer_email(self): - return self.maintainer_email or "UNKNOWN" - - def get_contact(self): - return self.maintainer or self.author or "UNKNOWN" - - def get_contact_email(self): - return self.maintainer_email or self.author_email or "UNKNOWN" - - def get_url(self): - return self.url or "UNKNOWN" - - def get_license(self): - return self.license or "UNKNOWN" - get_licence = get_license - - def get_description(self): - return self.description or "UNKNOWN" - - def get_long_description(self): - return self.long_description or "UNKNOWN" - - def get_keywords(self): - return self.keywords or [] - - def set_keywords(self, value): - self.keywords = _ensure_list(value, 'keywords') - - def get_platforms(self): - return self.platforms or ["UNKNOWN"] - - def set_platforms(self, value): - self.platforms = _ensure_list(value, 'platforms') - - def get_classifiers(self): - return self.classifiers or [] - - def set_classifiers(self, value): - self.classifiers = _ensure_list(value, 'classifiers') - - def get_download_url(self): - return self.download_url or "UNKNOWN" - - # PEP 314 - def get_requires(self): - return self.requires or [] - - def set_requires(self, value): - import distutils.versionpredicate - for v in value: - distutils.versionpredicate.VersionPredicate(v) - self.requires = list(value) - - def get_provides(self): - return self.provides or [] - - def set_provides(self, value): - value = [v.strip() for v in value] - for v in value: - import distutils.versionpredicate - distutils.versionpredicate.split_provision(v) - self.provides = value - - def get_obsoletes(self): - return self.obsoletes or [] - - def set_obsoletes(self, value): - import distutils.versionpredicate - for v in value: - distutils.versionpredicate.VersionPredicate(v) - self.obsoletes = list(value) - -def fix_help_options(options): - """Convert a 4-tuple 'help_options' list as found in various command - classes to the 3-tuple form required by FancyGetopt. - """ - new_options = [] - for help_tuple in options: - new_options.append(help_tuple[0:3]) - return new_options diff --git a/Lib/distutils/errors.py b/Lib/distutils/errors.py deleted file mode 100644 index 8b93059e19faa9..00000000000000 --- a/Lib/distutils/errors.py +++ /dev/null @@ -1,97 +0,0 @@ -"""distutils.errors - -Provides exceptions used by the Distutils modules. Note that Distutils -modules may raise standard exceptions; in particular, SystemExit is -usually raised for errors that are obviously the end-user's fault -(eg. bad command-line arguments). - -This module is safe to use in "from ... import *" mode; it only exports -symbols whose names start with "Distutils" and end with "Error".""" - -class DistutilsError (Exception): - """The root of all Distutils evil.""" - pass - -class DistutilsModuleError (DistutilsError): - """Unable to load an expected module, or to find an expected class - within some module (in particular, command modules and classes).""" - pass - -class DistutilsClassError (DistutilsError): - """Some command class (or possibly distribution class, if anyone - feels a need to subclass Distribution) is found not to be holding - up its end of the bargain, ie. implementing some part of the - "command "interface.""" - pass - -class DistutilsGetoptError (DistutilsError): - """The option table provided to 'fancy_getopt()' is bogus.""" - pass - -class DistutilsArgError (DistutilsError): - """Raised by fancy_getopt in response to getopt.error -- ie. an - error in the command line usage.""" - pass - -class DistutilsFileError (DistutilsError): - """Any problems in the filesystem: expected file not found, etc. - Typically this is for problems that we detect before OSError - could be raised.""" - pass - -class DistutilsOptionError (DistutilsError): - """Syntactic/semantic errors in command options, such as use of - mutually conflicting options, or inconsistent options, - badly-spelled values, etc. No distinction is made between option - values originating in the setup script, the command line, config - files, or what-have-you -- but if we *know* something originated in - the setup script, we'll raise DistutilsSetupError instead.""" - pass - -class DistutilsSetupError (DistutilsError): - """For errors that can be definitely blamed on the setup script, - such as invalid keyword arguments to 'setup()'.""" - pass - -class DistutilsPlatformError (DistutilsError): - """We don't know how to do something on the current platform (but - we do know how to do it on some platform) -- eg. trying to compile - C files on a platform not supported by a CCompiler subclass.""" - pass - -class DistutilsExecError (DistutilsError): - """Any problems executing an external program (such as the C - compiler, when compiling C files).""" - pass - -class DistutilsInternalError (DistutilsError): - """Internal inconsistencies or impossibilities (obviously, this - should never be seen if the code is working!).""" - pass - -class DistutilsTemplateError (DistutilsError): - """Syntax error in a file list template.""" - -class DistutilsByteCompileError(DistutilsError): - """Byte compile error.""" - -# Exception classes used by the CCompiler implementation classes -class CCompilerError (Exception): - """Some compile/link operation failed.""" - -class PreprocessError (CCompilerError): - """Failure to preprocess one or more C/C++ files.""" - -class CompileError (CCompilerError): - """Failure to compile one or more C/C++ source files.""" - -class LibError (CCompilerError): - """Failure to create a static library from one or more C/C++ object - files.""" - -class LinkError (CCompilerError): - """Failure to link one or more C/C++ object files into an executable - or shared library file.""" - -class UnknownFileError (CCompilerError): - """Attempt to process an unknown file type.""" diff --git a/Lib/distutils/extension.py b/Lib/distutils/extension.py deleted file mode 100644 index e85032ece8916f..00000000000000 --- a/Lib/distutils/extension.py +++ /dev/null @@ -1,241 +0,0 @@ -"""distutils.extension - -Provides the Extension class, used to describe C/C++ extension -modules in setup scripts.""" - -import os -import re -import warnings - -# This class is really only used by the "build_ext" command, so it might -# make sense to put it in distutils.command.build_ext. However, that -# module is already big enough, and I want to make this class a bit more -# complex to simplify some common cases ("foo" module in "foo.c") and do -# better error-checking ("foo.c" actually exists). -# -# Also, putting this in build_ext.py means every setup script would have to -# import that large-ish module (indirectly, through distutils.core) in -# order to do anything. - -class Extension: - """Just a collection of attributes that describes an extension - module and everything needed to build it (hopefully in a portable - way, but there are hooks that let you be as unportable as you need). - - Instance attributes: - name : string - the full name of the extension, including any packages -- ie. - *not* a filename or pathname, but Python dotted name - sources : [string] - list of source filenames, relative to the distribution root - (where the setup script lives), in Unix form (slash-separated) - for portability. Source files may be C, C++, SWIG (.i), - platform-specific resource files, or whatever else is recognized - by the "build_ext" command as source for a Python extension. - include_dirs : [string] - list of directories to search for C/C++ header files (in Unix - form for portability) - define_macros : [(name : string, value : string|None)] - list of macros to define; each macro is defined using a 2-tuple, - where 'value' is either the string to define it to or None to - define it without a particular value (equivalent of "#define - FOO" in source or -DFOO on Unix C compiler command line) - undef_macros : [string] - list of macros to undefine explicitly - library_dirs : [string] - list of directories to search for C/C++ libraries at link time - libraries : [string] - list of library names (not filenames or paths) to link against - runtime_library_dirs : [string] - list of directories to search for C/C++ libraries at run time - (for shared extensions, this is when the extension is loaded) - extra_objects : [string] - list of extra files to link with (eg. object files not implied - by 'sources', static library that must be explicitly specified, - binary resource files, etc.) - extra_compile_args : [string] - any extra platform- and compiler-specific information to use - when compiling the source files in 'sources'. For platforms and - compilers where "command line" makes sense, this is typically a - list of command-line arguments, but for other platforms it could - be anything. - extra_link_args : [string] - any extra platform- and compiler-specific information to use - when linking object files together to create the extension (or - to create a new static Python interpreter). Similar - interpretation as for 'extra_compile_args'. - export_symbols : [string] - list of symbols to be exported from a shared extension. Not - used on all platforms, and not generally necessary for Python - extensions, which typically export exactly one symbol: "init" + - extension_name. - swig_opts : [string] - any extra options to pass to SWIG if a source file has the .i - extension. - depends : [string] - list of files that the extension depends on - language : string - extension language (i.e. "c", "c++", "objc"). Will be detected - from the source extensions if not provided. - optional : boolean - specifies that a build failure in the extension should not abort the - build process, but simply not install the failing extension. - """ - - # When adding arguments to this constructor, be sure to update - # setup_keywords in core.py. - def __init__(self, name, sources, - include_dirs=None, - define_macros=None, - undef_macros=None, - library_dirs=None, - libraries=None, - runtime_library_dirs=None, - extra_objects=None, - extra_compile_args=None, - extra_link_args=None, - export_symbols=None, - swig_opts = None, - depends=None, - language=None, - optional=None, - **kw # To catch unknown keywords - ): - if not isinstance(name, str): - raise AssertionError("'name' must be a string") - if not (isinstance(sources, list) and - all(isinstance(v, str) for v in sources)): - raise AssertionError("'sources' must be a list of strings") - - self.name = name - self.sources = sources - self.include_dirs = include_dirs or [] - self.define_macros = define_macros or [] - self.undef_macros = undef_macros or [] - self.library_dirs = library_dirs or [] - self.libraries = libraries or [] - self.runtime_library_dirs = runtime_library_dirs or [] - self.extra_objects = extra_objects or [] - self.extra_compile_args = extra_compile_args or [] - self.extra_link_args = extra_link_args or [] - self.export_symbols = export_symbols or [] - self.swig_opts = swig_opts or [] - self.depends = depends or [] - self.language = language - self.optional = optional - - # If there are unknown keyword options, warn about them - if len(kw) > 0: - options = [repr(option) for option in kw] - options = ', '.join(sorted(options)) - msg = "Unknown Extension options: %s" % options - warnings.warn(msg) - - def __repr__(self): - return '<%s.%s(%r) at %#x>' % ( - self.__class__.__module__, - self.__class__.__qualname__, - self.name, - id(self)) - - -def read_setup_file(filename): - """Reads a Setup file and returns Extension instances.""" - from distutils.sysconfig import (parse_makefile, expand_makefile_vars, - _variable_rx) - - from distutils.text_file import TextFile - from distutils.util import split_quoted - - # First pass over the file to gather "VAR = VALUE" assignments. - vars = parse_makefile(filename) - - # Second pass to gobble up the real content: lines of the form - # ... [ ...] [ ...] [ ...] - file = TextFile(filename, - strip_comments=1, skip_blanks=1, join_lines=1, - lstrip_ws=1, rstrip_ws=1) - try: - extensions = [] - - while True: - line = file.readline() - if line is None: # eof - break - if re.match(_variable_rx, line): # VAR=VALUE, handled in first pass - continue - - if line[0] == line[-1] == "*": - file.warn("'%s' lines not handled yet" % line) - continue - - line = expand_makefile_vars(line, vars) - words = split_quoted(line) - - # NB. this parses a slightly different syntax than the old - # makesetup script: here, there must be exactly one extension per - # line, and it must be the first word of the line. I have no idea - # why the old syntax supported multiple extensions per line, as - # they all wind up being the same. - - module = words[0] - ext = Extension(module, []) - append_next_word = None - - for word in words[1:]: - if append_next_word is not None: - append_next_word.append(word) - append_next_word = None - continue - - suffix = os.path.splitext(word)[1] - switch = word[0:2] ; value = word[2:] - - if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): - # hmm, should we do something about C vs. C++ sources? - # or leave it up to the CCompiler implementation to - # worry about? - ext.sources.append(word) - elif switch == "-I": - ext.include_dirs.append(value) - elif switch == "-D": - equals = value.find("=") - if equals == -1: # bare "-DFOO" -- no value - ext.define_macros.append((value, None)) - else: # "-DFOO=blah" - ext.define_macros.append((value[0:equals], - value[equals+2:])) - elif switch == "-U": - ext.undef_macros.append(value) - elif switch == "-C": # only here 'cause makesetup has it! - ext.extra_compile_args.append(word) - elif switch == "-l": - ext.libraries.append(value) - elif switch == "-L": - ext.library_dirs.append(value) - elif switch == "-R": - ext.runtime_library_dirs.append(value) - elif word == "-rpath": - append_next_word = ext.runtime_library_dirs - elif word == "-Xlinker": - append_next_word = ext.extra_link_args - elif word == "-Xcompiler": - append_next_word = ext.extra_compile_args - elif switch == "-u": - ext.extra_link_args.append(word) - if not value: - append_next_word = ext.extra_link_args - elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): - # NB. a really faithful emulation of makesetup would - # append a .o file to extra_objects only if it - # had a slash in it; otherwise, it would s/.o/.c/ - # and append it to sources. Hmmmm. - ext.extra_objects.append(word) - else: - file.warn("unrecognized argument '%s'" % word) - - extensions.append(ext) - finally: - file.close() - - return extensions diff --git a/Lib/distutils/fancy_getopt.py b/Lib/distutils/fancy_getopt.py deleted file mode 100644 index 7d170dd27731a5..00000000000000 --- a/Lib/distutils/fancy_getopt.py +++ /dev/null @@ -1,457 +0,0 @@ -"""distutils.fancy_getopt - -Wrapper around the standard getopt module that provides the following -additional features: - * short and long options are tied together - * options have help strings, so fancy_getopt could potentially - create a complete usage summary - * options set attributes of a passed-in object -""" - -import sys, string, re -import getopt -from distutils.errors import * - -# Much like command_re in distutils.core, this is close to but not quite -# the same as a Python NAME -- except, in the spirit of most GNU -# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!) -# The similarities to NAME are again not a coincidence... -longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)' -longopt_re = re.compile(r'^%s$' % longopt_pat) - -# For recognizing "negative alias" options, eg. "quiet=!verbose" -neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat)) - -# This is used to translate long options to legitimate Python identifiers -# (for use as attributes of some object). -longopt_xlate = str.maketrans('-', '_') - -class FancyGetopt: - """Wrapper around the standard 'getopt()' module that provides some - handy extra functionality: - * short and long options are tied together - * options have help strings, and help text can be assembled - from them - * options set attributes of a passed-in object - * boolean options can have "negative aliases" -- eg. if - --quiet is the "negative alias" of --verbose, then "--quiet" - on the command line sets 'verbose' to false - """ - - def __init__(self, option_table=None): - # The option table is (currently) a list of tuples. The - # tuples may have 3 or four values: - # (long_option, short_option, help_string [, repeatable]) - # if an option takes an argument, its long_option should have '=' - # appended; short_option should just be a single character, no ':' - # in any case. If a long_option doesn't have a corresponding - # short_option, short_option should be None. All option tuples - # must have long options. - self.option_table = option_table - - # 'option_index' maps long option names to entries in the option - # table (ie. those 3-tuples). - self.option_index = {} - if self.option_table: - self._build_index() - - # 'alias' records (duh) alias options; {'foo': 'bar'} means - # --foo is an alias for --bar - self.alias = {} - - # 'negative_alias' keeps track of options that are the boolean - # opposite of some other option - self.negative_alias = {} - - # These keep track of the information in the option table. We - # don't actually populate these structures until we're ready to - # parse the command-line, since the 'option_table' passed in here - # isn't necessarily the final word. - self.short_opts = [] - self.long_opts = [] - self.short2long = {} - self.attr_name = {} - self.takes_arg = {} - - # And 'option_order' is filled up in 'getopt()'; it records the - # original order of options (and their values) on the command-line, - # but expands short options, converts aliases, etc. - self.option_order = [] - - def _build_index(self): - self.option_index.clear() - for option in self.option_table: - self.option_index[option[0]] = option - - def set_option_table(self, option_table): - self.option_table = option_table - self._build_index() - - def add_option(self, long_option, short_option=None, help_string=None): - if long_option in self.option_index: - raise DistutilsGetoptError( - "option conflict: already an option '%s'" % long_option) - else: - option = (long_option, short_option, help_string) - self.option_table.append(option) - self.option_index[long_option] = option - - def has_option(self, long_option): - """Return true if the option table for this parser has an - option with long name 'long_option'.""" - return long_option in self.option_index - - def get_attr_name(self, long_option): - """Translate long option name 'long_option' to the form it - has as an attribute of some object: ie., translate hyphens - to underscores.""" - return long_option.translate(longopt_xlate) - - def _check_alias_dict(self, aliases, what): - assert isinstance(aliases, dict) - for (alias, opt) in aliases.items(): - if alias not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "option '%s' not defined") % (what, alias, alias)) - if opt not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "aliased option '%s' not defined") % (what, alias, opt)) - - def set_aliases(self, alias): - """Set the aliases for this option parser.""" - self._check_alias_dict(alias, "alias") - self.alias = alias - - def set_negative_aliases(self, negative_alias): - """Set the negative aliases for this option parser. - 'negative_alias' should be a dictionary mapping option names to - option names, both the key and value must already be defined - in the option table.""" - self._check_alias_dict(negative_alias, "negative alias") - self.negative_alias = negative_alias - - def _grok_option_table(self): - """Populate the various data structures that keep tabs on the - option table. Called by 'getopt()' before it can do anything - worthwhile. - """ - self.long_opts = [] - self.short_opts = [] - self.short2long.clear() - self.repeat = {} - - for option in self.option_table: - if len(option) == 3: - long, short, help = option - repeat = 0 - elif len(option) == 4: - long, short, help, repeat = option - else: - # the option table is part of the code, so simply - # assert that it is correct - raise ValueError("invalid option tuple: %r" % (option,)) - - # Type- and value-check the option names - if not isinstance(long, str) or len(long) < 2: - raise DistutilsGetoptError(("invalid long option '%s': " - "must be a string of length >= 2") % long) - - if (not ((short is None) or - (isinstance(short, str) and len(short) == 1))): - raise DistutilsGetoptError("invalid short option '%s': " - "must a single character or None" % short) - - self.repeat[long] = repeat - self.long_opts.append(long) - - if long[-1] == '=': # option takes an argument? - if short: short = short + ':' - long = long[0:-1] - self.takes_arg[long] = 1 - else: - # Is option is a "negative alias" for some other option (eg. - # "quiet" == "!verbose")? - alias_to = self.negative_alias.get(long) - if alias_to is not None: - if self.takes_arg[alias_to]: - raise DistutilsGetoptError( - "invalid negative alias '%s': " - "aliased option '%s' takes a value" - % (long, alias_to)) - - self.long_opts[-1] = long # XXX redundant?! - self.takes_arg[long] = 0 - - # If this is an alias option, make sure its "takes arg" flag is - # the same as the option it's aliased to. - alias_to = self.alias.get(long) - if alias_to is not None: - if self.takes_arg[long] != self.takes_arg[alias_to]: - raise DistutilsGetoptError( - "invalid alias '%s': inconsistent with " - "aliased option '%s' (one of them takes a value, " - "the other doesn't" - % (long, alias_to)) - - # Now enforce some bondage on the long option name, so we can - # later translate it to an attribute name on some object. Have - # to do this a bit late to make sure we've removed any trailing - # '='. - if not longopt_re.match(long): - raise DistutilsGetoptError( - "invalid long option name '%s' " - "(must be letters, numbers, hyphens only" % long) - - self.attr_name[long] = self.get_attr_name(long) - if short: - self.short_opts.append(short) - self.short2long[short[0]] = long - - def getopt(self, args=None, object=None): - """Parse command-line options in args. Store as attributes on object. - - If 'args' is None or not supplied, uses 'sys.argv[1:]'. If - 'object' is None or not supplied, creates a new OptionDummy - object, stores option values there, and returns a tuple (args, - object). If 'object' is supplied, it is modified in place and - 'getopt()' just returns 'args'; in both cases, the returned - 'args' is a modified copy of the passed-in 'args' list, which - is left untouched. - """ - if args is None: - args = sys.argv[1:] - if object is None: - object = OptionDummy() - created_object = True - else: - created_object = False - - self._grok_option_table() - - short_opts = ' '.join(self.short_opts) - try: - opts, args = getopt.getopt(args, short_opts, self.long_opts) - except getopt.error as msg: - raise DistutilsArgError(msg) - - for opt, val in opts: - if len(opt) == 2 and opt[0] == '-': # it's a short option - opt = self.short2long[opt[1]] - else: - assert len(opt) > 2 and opt[:2] == '--' - opt = opt[2:] - - alias = self.alias.get(opt) - if alias: - opt = alias - - if not self.takes_arg[opt]: # boolean option? - assert val == '', "boolean option can't have value" - alias = self.negative_alias.get(opt) - if alias: - opt = alias - val = 0 - else: - val = 1 - - attr = self.attr_name[opt] - # The only repeating option at the moment is 'verbose'. - # It has a negative option -q quiet, which should set verbose = 0. - if val and self.repeat.get(attr) is not None: - val = getattr(object, attr, 0) + 1 - setattr(object, attr, val) - self.option_order.append((opt, val)) - - # for opts - if created_object: - return args, object - else: - return args - - def get_option_order(self): - """Returns the list of (option, value) tuples processed by the - previous run of 'getopt()'. Raises RuntimeError if - 'getopt()' hasn't been called yet. - """ - if self.option_order is None: - raise RuntimeError("'getopt()' hasn't been called yet") - else: - return self.option_order - - def generate_help(self, header=None): - """Generate help text (a list of strings, one per suggested line of - output) from the option table for this FancyGetopt object. - """ - # Blithely assume the option table is good: probably wouldn't call - # 'generate_help()' unless you've already called 'getopt()'. - - # First pass: determine maximum length of long option names - max_opt = 0 - for option in self.option_table: - long = option[0] - short = option[1] - l = len(long) - if long[-1] == '=': - l = l - 1 - if short is not None: - l = l + 5 # " (-x)" where short == 'x' - if l > max_opt: - max_opt = l - - opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter - - # Typical help block looks like this: - # --foo controls foonabulation - # Help block for longest option looks like this: - # --flimflam set the flim-flam level - # and with wrapped text: - # --flimflam set the flim-flam level (must be between - # 0 and 100, except on Tuesdays) - # Options with short names will have the short name shown (but - # it doesn't contribute to max_opt): - # --foo (-f) controls foonabulation - # If adding the short option would make the left column too wide, - # we push the explanation off to the next line - # --flimflam (-l) - # set the flim-flam level - # Important parameters: - # - 2 spaces before option block start lines - # - 2 dashes for each long option name - # - min. 2 spaces between option and explanation (gutter) - # - 5 characters (incl. space) for short option name - - # Now generate lines of help text. (If 80 columns were good enough - # for Jesus, then 78 columns are good enough for me!) - line_width = 78 - text_width = line_width - opt_width - big_indent = ' ' * opt_width - if header: - lines = [header] - else: - lines = ['Option summary:'] - - for option in self.option_table: - long, short, help = option[:3] - text = wrap_text(help, text_width) - if long[-1] == '=': - long = long[0:-1] - - # Case 1: no short option at all (makes life easy) - if short is None: - if text: - lines.append(" --%-*s %s" % (max_opt, long, text[0])) - else: - lines.append(" --%-*s " % (max_opt, long)) - - # Case 2: we have a short option, so we have to include it - # just after the long option - else: - opt_names = "%s (-%s)" % (long, short) - if text: - lines.append(" --%-*s %s" % - (max_opt, opt_names, text[0])) - else: - lines.append(" --%-*s" % opt_names) - - for l in text[1:]: - lines.append(big_indent + l) - return lines - - def print_help(self, header=None, file=None): - if file is None: - file = sys.stdout - for line in self.generate_help(header): - file.write(line + "\n") - - -def fancy_getopt(options, negative_opt, object, args): - parser = FancyGetopt(options) - parser.set_negative_aliases(negative_opt) - return parser.getopt(args, object) - - -WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace} - -def wrap_text(text, width): - """wrap_text(text : string, width : int) -> [string] - - Split 'text' into multiple lines of no more than 'width' characters - each, and return the list of strings that results. - """ - if text is None: - return [] - if len(text) <= width: - return [text] - - text = text.expandtabs() - text = text.translate(WS_TRANS) - chunks = re.split(r'( +|-+)', text) - chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings - lines = [] - - while chunks: - cur_line = [] # list of chunks (to-be-joined) - cur_len = 0 # length of current line - - while chunks: - l = len(chunks[0]) - if cur_len + l <= width: # can squeeze (at least) this chunk in - cur_line.append(chunks[0]) - del chunks[0] - cur_len = cur_len + l - else: # this line is full - # drop last chunk if all space - if cur_line and cur_line[-1][0] == ' ': - del cur_line[-1] - break - - if chunks: # any chunks left to process? - # if the current line is still empty, then we had a single - # chunk that's too big too fit on a line -- so we break - # down and break it up at the line width - if cur_len == 0: - cur_line.append(chunks[0][0:width]) - chunks[0] = chunks[0][width:] - - # all-whitespace chunks at the end of a line can be discarded - # (and we know from the re.split above that if a chunk has - # *any* whitespace, it is *all* whitespace) - if chunks[0][0] == ' ': - del chunks[0] - - # and store this line in the list-of-all-lines -- as a single - # string, of course! - lines.append(''.join(cur_line)) - - return lines - - -def translate_longopt(opt): - """Convert a long option name to a valid Python identifier by - changing "-" to "_". - """ - return opt.translate(longopt_xlate) - - -class OptionDummy: - """Dummy class just used as a place to hold command-line option - values as instance attributes.""" - - def __init__(self, options=[]): - """Create a new OptionDummy instance. The attributes listed in - 'options' will be initialized to None.""" - for opt in options: - setattr(self, opt, None) - - -if __name__ == "__main__": - text = """\ -Tra-la-la, supercalifragilisticexpialidocious. -How *do* you spell that odd word, anyways? -(Someone ask Mary -- she'll know [or she'll -say, "How should I know?"].)""" - - for w in (10, 20, 30, 40): - print("width: %d" % w) - print("\n".join(wrap_text(text, w))) - print() diff --git a/Lib/distutils/file_util.py b/Lib/distutils/file_util.py deleted file mode 100644 index b3fee35a6cce81..00000000000000 --- a/Lib/distutils/file_util.py +++ /dev/null @@ -1,238 +0,0 @@ -"""distutils.file_util - -Utility functions for operating on single files. -""" - -import os -from distutils.errors import DistutilsFileError -from distutils import log - -# for generating verbose output in 'copy_file()' -_copy_action = { None: 'copying', - 'hard': 'hard linking', - 'sym': 'symbolically linking' } - - -def _copy_file_contents(src, dst, buffer_size=16*1024): - """Copy the file 'src' to 'dst'; both must be filenames. Any error - opening either file, reading from 'src', or writing to 'dst', raises - DistutilsFileError. Data is read/written in chunks of 'buffer_size' - bytes (default 16k). No attempt is made to handle anything apart from - regular files. - """ - # Stolen from shutil module in the standard library, but with - # custom error-handling added. - fsrc = None - fdst = None - try: - try: - fsrc = open(src, 'rb') - except OSError as e: - raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror)) - - if os.path.exists(dst): - try: - os.unlink(dst) - except OSError as e: - raise DistutilsFileError( - "could not delete '%s': %s" % (dst, e.strerror)) - - try: - fdst = open(dst, 'wb') - except OSError as e: - raise DistutilsFileError( - "could not create '%s': %s" % (dst, e.strerror)) - - while True: - try: - buf = fsrc.read(buffer_size) - except OSError as e: - raise DistutilsFileError( - "could not read from '%s': %s" % (src, e.strerror)) - - if not buf: - break - - try: - fdst.write(buf) - except OSError as e: - raise DistutilsFileError( - "could not write to '%s': %s" % (dst, e.strerror)) - finally: - if fdst: - fdst.close() - if fsrc: - fsrc.close() - -def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, - link=None, verbose=1, dry_run=0): - """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is - copied there with the same name; otherwise, it must be a filename. (If - the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' - is true (the default), the file's mode (type and permission bits, or - whatever is analogous on the current platform) is copied. If - 'preserve_times' is true (the default), the last-modified and - last-access times are copied as well. If 'update' is true, 'src' will - only be copied if 'dst' does not exist, or if 'dst' does exist but is - older than 'src'. - - 'link' allows you to make hard links (os.link) or symbolic links - (os.symlink) instead of copying: set it to "hard" or "sym"; if it is - None (the default), files are copied. Don't set 'link' on systems that - don't support it: 'copy_file()' doesn't check if hard or symbolic - linking is available. If hardlink fails, falls back to - _copy_file_contents(). - - Under Mac OS, uses the native file copy function in macostools; on - other systems, uses '_copy_file_contents()' to copy file contents. - - Return a tuple (dest_name, copied): 'dest_name' is the actual name of - the output file, and 'copied' is true if the file was copied (or would - have been copied, if 'dry_run' true). - """ - # XXX if the destination file already exists, we clobber it if - # copying, but blow up if linking. Hmmm. And I don't know what - # macostools.copyfile() does. Should definitely be consistent, and - # should probably blow up if destination exists and we would be - # changing it (ie. it's not already a hard/soft link to src OR - # (not update) and (src newer than dst). - - from distutils.dep_util import newer - from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE - - if not os.path.isfile(src): - raise DistutilsFileError( - "can't copy '%s': doesn't exist or not a regular file" % src) - - if os.path.isdir(dst): - dir = dst - dst = os.path.join(dst, os.path.basename(src)) - else: - dir = os.path.dirname(dst) - - if update and not newer(src, dst): - if verbose >= 1: - log.debug("not copying %s (output up-to-date)", src) - return (dst, 0) - - try: - action = _copy_action[link] - except KeyError: - raise ValueError("invalid value '%s' for 'link' argument" % link) - - if verbose >= 1: - if os.path.basename(dst) == os.path.basename(src): - log.info("%s %s -> %s", action, src, dir) - else: - log.info("%s %s -> %s", action, src, dst) - - if dry_run: - return (dst, 1) - - # If linking (hard or symbolic), use the appropriate system call - # (Unix only, of course, but that's the caller's responsibility) - elif link == 'hard': - if not (os.path.exists(dst) and os.path.samefile(src, dst)): - try: - os.link(src, dst) - return (dst, 1) - except OSError: - # If hard linking fails, fall back on copying file - # (some special filesystems don't support hard linking - # even under Unix, see issue #8876). - pass - elif link == 'sym': - if not (os.path.exists(dst) and os.path.samefile(src, dst)): - os.symlink(src, dst) - return (dst, 1) - - # Otherwise (non-Mac, not linking), copy the file contents and - # (optionally) copy the times and mode. - _copy_file_contents(src, dst) - if preserve_mode or preserve_times: - st = os.stat(src) - - # According to David Ascher , utime() should be done - # before chmod() (at least under NT). - if preserve_times: - os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) - if preserve_mode: - os.chmod(dst, S_IMODE(st[ST_MODE])) - - return (dst, 1) - - -# XXX I suspect this is Unix-specific -- need porting help! -def move_file (src, dst, - verbose=1, - dry_run=0): - - """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will - be moved into it with the same name; otherwise, 'src' is just renamed - to 'dst'. Return the new full name of the file. - - Handles cross-device moves on Unix using 'copy_file()'. What about - other systems??? - """ - from os.path import exists, isfile, isdir, basename, dirname - import errno - - if verbose >= 1: - log.info("moving %s -> %s", src, dst) - - if dry_run: - return dst - - if not isfile(src): - raise DistutilsFileError("can't move '%s': not a regular file" % src) - - if isdir(dst): - dst = os.path.join(dst, basename(src)) - elif exists(dst): - raise DistutilsFileError( - "can't move '%s': destination '%s' already exists" % - (src, dst)) - - if not isdir(dirname(dst)): - raise DistutilsFileError( - "can't move '%s': destination '%s' not a valid path" % - (src, dst)) - - copy_it = False - try: - os.rename(src, dst) - except OSError as e: - (num, msg) = e.args - if num == errno.EXDEV: - copy_it = True - else: - raise DistutilsFileError( - "couldn't move '%s' to '%s': %s" % (src, dst, msg)) - - if copy_it: - copy_file(src, dst, verbose=verbose) - try: - os.unlink(src) - except OSError as e: - (num, msg) = e.args - try: - os.unlink(dst) - except OSError: - pass - raise DistutilsFileError( - "couldn't move '%s' to '%s' by copy/delete: " - "delete '%s' failed: %s" - % (src, dst, src, msg)) - return dst - - -def write_file (filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - f = open(filename, "w") - try: - for line in contents: - f.write(line + "\n") - finally: - f.close() diff --git a/Lib/distutils/filelist.py b/Lib/distutils/filelist.py deleted file mode 100644 index c92d5fdba393bb..00000000000000 --- a/Lib/distutils/filelist.py +++ /dev/null @@ -1,327 +0,0 @@ -"""distutils.filelist - -Provides the FileList class, used for poking about the filesystem -and building lists of files. -""" - -import os, re -import fnmatch -import functools -from distutils.util import convert_path -from distutils.errors import DistutilsTemplateError, DistutilsInternalError -from distutils import log - -class FileList: - """A list of files built by on exploring the filesystem and filtered by - applying various patterns to what we find there. - - Instance attributes: - dir - directory from which files will be taken -- only used if - 'allfiles' not supplied to constructor - files - list of filenames currently being built/filtered/manipulated - allfiles - complete list of files under consideration (ie. without any - filtering applied) - """ - - def __init__(self, warn=None, debug_print=None): - # ignore argument to FileList, but keep them for backwards - # compatibility - self.allfiles = None - self.files = [] - - def set_allfiles(self, allfiles): - self.allfiles = allfiles - - def findall(self, dir=os.curdir): - self.allfiles = findall(dir) - - def debug_print(self, msg): - """Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - from distutils.debug import DEBUG - if DEBUG: - print(msg) - - # -- List-like methods --------------------------------------------- - - def append(self, item): - self.files.append(item) - - def extend(self, items): - self.files.extend(items) - - def sort(self): - # Not a strict lexical sort! - sortable_files = sorted(map(os.path.split, self.files)) - self.files = [] - for sort_tuple in sortable_files: - self.files.append(os.path.join(*sort_tuple)) - - - # -- Other miscellaneous utility methods --------------------------- - - def remove_duplicates(self): - # Assumes list has been sorted! - for i in range(len(self.files) - 1, 0, -1): - if self.files[i] == self.files[i - 1]: - del self.files[i] - - - # -- "File template" methods --------------------------------------- - - def _parse_template_line(self, line): - words = line.split() - action = words[0] - - patterns = dir = dir_pattern = None - - if action in ('include', 'exclude', - 'global-include', 'global-exclude'): - if len(words) < 2: - raise DistutilsTemplateError( - "'%s' expects ..." % action) - patterns = [convert_path(w) for w in words[1:]] - elif action in ('recursive-include', 'recursive-exclude'): - if len(words) < 3: - raise DistutilsTemplateError( - "'%s' expects ..." % action) - dir = convert_path(words[1]) - patterns = [convert_path(w) for w in words[2:]] - elif action in ('graft', 'prune'): - if len(words) != 2: - raise DistutilsTemplateError( - "'%s' expects a single " % action) - dir_pattern = convert_path(words[1]) - else: - raise DistutilsTemplateError("unknown action '%s'" % action) - - return (action, patterns, dir, dir_pattern) - - def process_template_line(self, line): - # Parse the line: split it up, make sure the right number of words - # is there, and return the relevant words. 'action' is always - # defined: it's the first word of the line. Which of the other - # three are defined depends on the action; it'll be either - # patterns, (dir and patterns), or (dir_pattern). - (action, patterns, dir, dir_pattern) = self._parse_template_line(line) - - # OK, now we know that the action is valid and we have the - # right number of words on the line for that action -- so we - # can proceed with minimal error-checking. - if action == 'include': - self.debug_print("include " + ' '.join(patterns)) - for pattern in patterns: - if not self.include_pattern(pattern, anchor=1): - log.warn("warning: no files found matching '%s'", - pattern) - - elif action == 'exclude': - self.debug_print("exclude " + ' '.join(patterns)) - for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=1): - log.warn(("warning: no previously-included files " - "found matching '%s'"), pattern) - - elif action == 'global-include': - self.debug_print("global-include " + ' '.join(patterns)) - for pattern in patterns: - if not self.include_pattern(pattern, anchor=0): - log.warn(("warning: no files found matching '%s' " - "anywhere in distribution"), pattern) - - elif action == 'global-exclude': - self.debug_print("global-exclude " + ' '.join(patterns)) - for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=0): - log.warn(("warning: no previously-included files matching " - "'%s' found anywhere in distribution"), - pattern) - - elif action == 'recursive-include': - self.debug_print("recursive-include %s %s" % - (dir, ' '.join(patterns))) - for pattern in patterns: - if not self.include_pattern(pattern, prefix=dir): - log.warn(("warning: no files found matching '%s' " - "under directory '%s'"), - pattern, dir) - - elif action == 'recursive-exclude': - self.debug_print("recursive-exclude %s %s" % - (dir, ' '.join(patterns))) - for pattern in patterns: - if not self.exclude_pattern(pattern, prefix=dir): - log.warn(("warning: no previously-included files matching " - "'%s' found under directory '%s'"), - pattern, dir) - - elif action == 'graft': - self.debug_print("graft " + dir_pattern) - if not self.include_pattern(None, prefix=dir_pattern): - log.warn("warning: no directories found matching '%s'", - dir_pattern) - - elif action == 'prune': - self.debug_print("prune " + dir_pattern) - if not self.exclude_pattern(None, prefix=dir_pattern): - log.warn(("no previously-included directories found " - "matching '%s'"), dir_pattern) - else: - raise DistutilsInternalError( - "this cannot happen: invalid action '%s'" % action) - - - # -- Filtering/selection methods ----------------------------------- - - def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): - """Select strings (presumably filenames) from 'self.files' that - match 'pattern', a Unix-style wildcard (glob) pattern. Patterns - are not quite the same as implemented by the 'fnmatch' module: '*' - and '?' match non-special characters, where "special" is platform- - dependent: slash on Unix; colon, slash, and backslash on - DOS/Windows; and colon on Mac OS. - - If 'anchor' is true (the default), then the pattern match is more - stringent: "*.py" will match "foo.py" but not "foo/bar.py". If - 'anchor' is false, both of these will match. - - If 'prefix' is supplied, then only filenames starting with 'prefix' - (itself a pattern) and ending with 'pattern', with anything in between - them, will match. 'anchor' is ignored in this case. - - If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and - 'pattern' is assumed to be either a string containing a regex or a - regex object -- no translation is done, the regex is just compiled - and used as-is. - - Selected strings will be added to self.files. - - Return True if files are found, False otherwise. - """ - # XXX docstring lying about what the special chars are? - files_found = False - pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("include_pattern: applying regex r'%s'" % - pattern_re.pattern) - - # delayed loading of allfiles list - if self.allfiles is None: - self.findall() - - for name in self.allfiles: - if pattern_re.search(name): - self.debug_print(" adding " + name) - self.files.append(name) - files_found = True - return files_found - - - def exclude_pattern (self, pattern, - anchor=1, prefix=None, is_regex=0): - """Remove strings (presumably filenames) from 'files' that match - 'pattern'. Other parameters are the same as for - 'include_pattern()', above. - The list 'self.files' is modified in place. - Return True if files are found, False otherwise. - """ - files_found = False - pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("exclude_pattern: applying regex r'%s'" % - pattern_re.pattern) - for i in range(len(self.files)-1, -1, -1): - if pattern_re.search(self.files[i]): - self.debug_print(" removing " + self.files[i]) - del self.files[i] - files_found = True - return files_found - - -# ---------------------------------------------------------------------- -# Utility functions - -def _find_all_simple(path): - """ - Find all files under 'path' - """ - results = ( - os.path.join(base, file) - for base, dirs, files in os.walk(path, followlinks=True) - for file in files - ) - return filter(os.path.isfile, results) - - -def findall(dir=os.curdir): - """ - Find all files under 'dir' and return the list of full filenames. - Unless dir is '.', return full filenames with dir prepended. - """ - files = _find_all_simple(dir) - if dir == os.curdir: - make_rel = functools.partial(os.path.relpath, start=dir) - files = map(make_rel, files) - return list(files) - - -def glob_to_re(pattern): - """Translate a shell-like glob pattern to a regular expression; return - a string containing the regex. Differs from 'fnmatch.translate()' in - that '*' does not match "special characters" (which are - platform-specific). - """ - pattern_re = fnmatch.translate(pattern) - - # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which - # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, - # and by extension they shouldn't match such "special characters" under - # any OS. So change all non-escaped dots in the RE to match any - # character except the special characters (currently: just os.sep). - sep = os.sep - if os.sep == '\\': - # we're using a regex to manipulate a regex, so we need - # to escape the backslash twice - sep = r'\\\\' - escaped = r'\1[^%s]' % sep - pattern_re = re.sub(r'((?= self.threshold: - if args: - msg = msg % args - if level in (WARN, ERROR, FATAL): - stream = sys.stderr - else: - stream = sys.stdout - try: - stream.write('%s\n' % msg) - except UnicodeEncodeError: - # emulate backslashreplace error handler - encoding = stream.encoding - msg = msg.encode(encoding, "backslashreplace").decode(encoding) - stream.write('%s\n' % msg) - stream.flush() - - def log(self, level, msg, *args): - self._log(level, msg, args) - - def debug(self, msg, *args): - self._log(DEBUG, msg, args) - - def info(self, msg, *args): - self._log(INFO, msg, args) - - def warn(self, msg, *args): - self._log(WARN, msg, args) - - def error(self, msg, *args): - self._log(ERROR, msg, args) - - def fatal(self, msg, *args): - self._log(FATAL, msg, args) - -_global_log = Log() -log = _global_log.log -debug = _global_log.debug -info = _global_log.info -warn = _global_log.warn -error = _global_log.error -fatal = _global_log.fatal - -def set_threshold(level): - # return the old threshold for use from tests - old = _global_log.threshold - _global_log.threshold = level - return old - -def set_verbosity(v): - if v <= 0: - set_threshold(WARN) - elif v == 1: - set_threshold(INFO) - elif v >= 2: - set_threshold(DEBUG) diff --git a/Lib/distutils/msvc9compiler.py b/Lib/distutils/msvc9compiler.py deleted file mode 100644 index a7976fbe3ed924..00000000000000 --- a/Lib/distutils/msvc9compiler.py +++ /dev/null @@ -1,788 +0,0 @@ -"""distutils.msvc9compiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio 2008. - -The module is compatible with VS 2005 and VS 2008. You can find legacy support -for older versions of VS in distutils.msvccompiler. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) -# ported to VS2005 and VS 2008 by Christian Heimes - -import os -import subprocess -import sys -import re - -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import CCompiler, gen_lib_options -from distutils import log -from distutils.util import get_platform - -import winreg - -RegOpenKeyEx = winreg.OpenKeyEx -RegEnumKey = winreg.EnumKey -RegEnumValue = winreg.EnumValue -RegError = winreg.error - -HKEYS = (winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT) - -NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32) -if NATIVE_WIN64: - # Visual C++ is a 32-bit application, so we need to look in - # the corresponding registry branch, if we're running a - # 64-bit Python on Win64 - VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f" - WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows" - NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework" -else: - VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" - WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" - NET_BASE = r"Software\Microsoft\.NETFramework" - -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is -# the param to cross-compile on x86 targeting amd64.) -PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'amd64', -} - -class Reg: - """Helper class to read values from the registry - """ - - def get_value(cls, path, key): - for base in HKEYS: - d = cls.read_values(base, path) - if d and key in d: - return d[key] - raise KeyError(key) - get_value = classmethod(get_value) - - def read_keys(cls, base, key): - """Return list of registry keys.""" - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i += 1 - return L - read_keys = classmethod(read_keys) - - def read_values(cls, base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) - i += 1 - return d - read_values = classmethod(read_values) - - def convert_mbcs(s): - dec = getattr(s, "decode", None) - if dec is not None: - try: - s = dec("mbcs") - except UnicodeError: - pass - return s - convert_mbcs = staticmethod(convert_mbcs) - -class MacroExpander: - - def __init__(self, version): - self.macros = {} - self.vsbase = VS_BASE % version - self.load_macros(version) - - def set_macro(self, macro, path, key): - self.macros["$(%s)" % macro] = Reg.get_value(path, key) - - def load_macros(self, version): - self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir") - self.set_macro("FrameworkDir", NET_BASE, "installroot") - try: - if version >= 8.0: - self.set_macro("FrameworkSDKDir", NET_BASE, - "sdkinstallrootv2.0") - else: - raise KeyError("sdkinstallrootv2.0") - except KeyError: - raise DistutilsPlatformError( - """Python was built with Visual Studio 2008; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2008 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") - - if version >= 9.0: - self.set_macro("FrameworkVersion", self.vsbase, "clr version") - self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder") - else: - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = Reg.get_value(base, r"%s\%s" % (p, key)) - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - if majorVersion >= 13: - # v13 was skipped and should be v14 - majorVersion += 1 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - -def removeDuplicates(variable): - """Remove duplicate values of an environment variable. - """ - oldList = variable.split(os.pathsep) - newList = [] - for i in oldList: - if i not in newList: - newList.append(i) - newVariable = os.pathsep.join(newList) - return newVariable - -def find_vcvarsall(version): - """Find the vcvarsall.bat file - - At first it tries to find the productdir of VS 2008 in the registry. If - that fails it falls back to the VS90COMNTOOLS env var. - """ - vsbase = VS_BASE % version - try: - productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, - "productdir") - except KeyError: - log.debug("Unable to find productdir in registry") - productdir = None - - if not productdir or not os.path.isdir(productdir): - toolskey = "VS%0.f0COMNTOOLS" % version - toolsdir = os.environ.get(toolskey, None) - - if toolsdir and os.path.isdir(toolsdir): - productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") - productdir = os.path.abspath(productdir) - if not os.path.isdir(productdir): - log.debug("%s is not a valid directory" % productdir) - return None - else: - log.debug("Env var %s is not set or invalid" % toolskey) - if not productdir: - log.debug("No productdir found") - return None - vcvarsall = os.path.join(productdir, "vcvarsall.bat") - if os.path.isfile(vcvarsall): - return vcvarsall - log.debug("Unable to find vcvarsall.bat") - return None - -def query_vcvarsall(version, arch="x86"): - """Launch vcvarsall.bat and read the settings from its environment - """ - vcvarsall = find_vcvarsall(version) - interesting = {"include", "lib", "libpath", "path"} - result = {} - - if vcvarsall is None: - raise DistutilsPlatformError("Unable to find vcvarsall.bat") - log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) - popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - try: - stdout, stderr = popen.communicate() - if popen.wait() != 0: - raise DistutilsPlatformError(stderr.decode("mbcs")) - - stdout = stdout.decode("mbcs") - for line in stdout.split("\n"): - line = Reg.convert_mbcs(line) - if '=' not in line: - continue - line = line.strip() - key, value = line.split('=', 1) - key = key.lower() - if key in interesting: - if value.endswith(os.pathsep): - value = value[:-1] - result[key] = removeDuplicates(value) - - finally: - popen.stdout.close() - popen.stderr.close() - - if len(result) != len(interesting): - raise ValueError(str(list(result.keys()))) - - return result - -# More globals -VERSION = get_build_version() -if VERSION < 8.0: - raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION) -# MACROS = MacroExpander(VERSION) - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) - self.__version = VERSION - self.__root = r"Software\Microsoft\VisualStudio" - # self.__macros = MACROS - self.__paths = [] - # target platform (.plat_name is consistent with 'bdist') - self.plat_name = None - self.__arch = None # deprecated name - self.initialized = False - - def initialize(self, plat_name=None): - # multi-init means we would need to check platform same each time... - assert not self.initialized, "don't init multiple times" - if plat_name is None: - plat_name = get_platform() - # sanity check for platforms to prevent obscure errors later. - ok_plats = 'win32', 'win-amd64' - if plat_name not in ok_plats: - raise DistutilsPlatformError("--plat-name must be one of %s" % - (ok_plats,)) - - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - # On x86, 'vcvars32.bat amd64' creates an env that doesn't work; - # to cross compile, you use 'x86_amd64'. - # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross - # compile use 'x86' (ie, it runs the x86 compiler directly) - if plat_name == get_platform() or plat_name == 'win32': - # native build or cross-compile to win32 - plat_spec = PLAT_TO_VCVARS[plat_name] - else: - # cross compile from win32 -> some 64bit - plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \ - PLAT_TO_VCVARS[plat_name] - - vc_env = query_vcvarsall(VERSION, plat_spec) - - self.__paths = vc_env['path'].split(os.pathsep) - os.environ['lib'] = vc_env['lib'] - os.environ['include'] = vc_env['include'] - - if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - #self.set_path_env_var('lib') - #self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ";".join(self.__paths) - - self.preprocess_options = None - if self.__arch == "x86": - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', - '/Z7', '/D_DEBUG'] - else: - # Win64 - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) - if strip_dir: - base = os.path.basename (base) - if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append ('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) - - output_opt = "/Fo" + obj - try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - (libraries, library_dirs, runtime_library_dirs) = fixed_args - - if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [] - for sym in (export_symbols or []): - export_opts.append("/EXPORT:" + sym) - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - build_temp = os.path.dirname(objects[0]) - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) - - self.manifest_setup_ldargs(output_filename, build_temp, ld_args) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - # embed the manifest - # XXX - this is somewhat fragile - if mt.exe fails, distutils - # will still consider the DLL up-to-date, but it will not have a - # manifest. Maybe we should link to a temp file? OTOH, that - # implies a build environment error that shouldn't go undetected. - mfinfo = self.manifest_get_embed_info(target_desc, ld_args) - if mfinfo is not None: - mffilename, mfid = mfinfo - out_arg = '-outputresource:%s;%s' % (output_filename, mfid) - try: - self.spawn(['mt.exe', '-nologo', '-manifest', - mffilename, out_arg]) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): - # If we need a manifest at all, an embedded manifest is recommended. - # See MSDN article titled - # "How to: Embed a Manifest Inside a C/C++ Application" - # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx) - # Ask the linker to generate the manifest in the temp dir, so - # we can check it, and possibly embed it, later. - temp_manifest = os.path.join( - build_temp, - os.path.basename(output_filename) + ".manifest") - ld_args.append('/MANIFESTFILE:' + temp_manifest) - - def manifest_get_embed_info(self, target_desc, ld_args): - # If a manifest should be embedded, return a tuple of - # (manifest_filename, resource_id). Returns None if no manifest - # should be embedded. See http://bugs.python.org/issue7833 for why - # we want to avoid any manifest for extension modules if we can. - for arg in ld_args: - if arg.startswith("/MANIFESTFILE:"): - temp_manifest = arg.split(":", 1)[1] - break - else: - # no /MANIFESTFILE so nothing to do. - return None - if target_desc == CCompiler.EXECUTABLE: - # by default, executables always get the manifest with the - # CRT referenced. - mfid = 1 - else: - # Extension modules try and avoid any manifest if possible. - mfid = 2 - temp_manifest = self._remove_visual_c_ref(temp_manifest) - if temp_manifest is None: - return None - return temp_manifest, mfid - - def _remove_visual_c_ref(self, manifest_file): - try: - # Remove references to the Visual C runtime, so they will - # fall through to the Visual C dependency of Python.exe. - # This way, when installed for a restricted user (e.g. - # runtimes are not in WinSxS folder, but in Python's own - # folder), the runtimes do not need to be in every folder - # with .pyd's. - # Returns either the filename of the modified manifest or - # None if no manifest should be embedded. - manifest_f = open(manifest_file) - try: - manifest_buf = manifest_f.read() - finally: - manifest_f.close() - pattern = re.compile( - r"""|)""", - re.DOTALL) - manifest_buf = re.sub(pattern, "", manifest_buf) - pattern = r"\s*" - manifest_buf = re.sub(pattern, "", manifest_buf) - # Now see if any other assemblies are referenced - if not, we - # don't want a manifest embedded. - pattern = re.compile( - r"""|)""", re.DOTALL) - if re.search(pattern, manifest_buf) is None: - return None - - manifest_f = open(manifest_file, 'w') - try: - manifest_f.write(manifest_buf) - return manifest_file - finally: - manifest_f.close() - except OSError: - pass - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") - - def library_option(self, lib): - return self.library_filename(lib) - - - def find_library_file(self, dirs, lib, debug=0): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) - if os.path.isfile(fn): - return fn - - return exe diff --git a/Lib/distutils/msvccompiler.py b/Lib/distutils/msvccompiler.py deleted file mode 100644 index f0d04fdb7f4178..00000000000000 --- a/Lib/distutils/msvccompiler.py +++ /dev/null @@ -1,642 +0,0 @@ -"""distutils.msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) - -import sys, os -from distutils.errors import \ - DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import \ - CCompiler, gen_lib_options -from distutils import log - -_can_read_reg = False -try: - import winreg - - _can_read_reg = True - hkey_mod = winreg - - RegOpenKeyEx = winreg.OpenKeyEx - RegEnumKey = winreg.EnumKey - RegEnumValue = winreg.EnumValue - RegError = winreg.error - -except ImportError: - try: - import win32api - import win32con - _can_read_reg = True - hkey_mod = win32con - - RegOpenKeyEx = win32api.RegOpenKeyEx - RegEnumKey = win32api.RegEnumKey - RegEnumValue = win32api.RegEnumValue - RegError = win32api.error - except ImportError: - log.info("Warning: Can't read registry to find the " - "necessary compiler setting\n" - "Make sure that Python modules winreg, " - "win32api or win32con are installed.") - -if _can_read_reg: - HKEYS = (hkey_mod.HKEY_USERS, - hkey_mod.HKEY_CURRENT_USER, - hkey_mod.HKEY_LOCAL_MACHINE, - hkey_mod.HKEY_CLASSES_ROOT) - -def read_keys(base, key): - """Return list of registry keys.""" - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i += 1 - return L - -def read_values(base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[convert_mbcs(name)] = convert_mbcs(value) - i += 1 - return d - -def convert_mbcs(s): - dec = getattr(s, "decode", None) - if dec is not None: - try: - s = dec("mbcs") - except UnicodeError: - pass - return s - -class MacroExpander: - def __init__(self, version): - self.macros = {} - self.load_macros(version) - - def set_macro(self, macro, path, key): - for base in HKEYS: - d = read_values(base, path) - if d: - self.macros["$(%s)" % macro] = d[key] - break - - def load_macros(self, version): - vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version - self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir") - net = r"Software\Microsoft\.NETFramework" - self.set_macro("FrameworkDir", net, "installroot") - try: - if version > 7.0: - self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") - else: - self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") - except KeyError as exc: # - raise DistutilsPlatformError( - """Python was built with Visual Studio 2003; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2003 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") - - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = read_values(base, r"%s\%s" % (p, key)) - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - if majorVersion >= 13: - # v13 was skipped and should be v14 - majorVersion += 1 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - -def get_build_architecture(): - """Return the processor architecture. - - Possible results are "Intel" or "AMD64". - """ - - prefix = " bit (" - i = sys.version.find(prefix) - if i == -1: - return "Intel" - j = sys.version.find(")", i) - return sys.version[i+len(prefix):j] - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) - self.__version = get_build_version() - self.__arch = get_build_architecture() - if self.__arch == "Intel": - # x86 - if self.__version >= 7: - self.__root = r"Software\Microsoft\VisualStudio" - self.__macros = MacroExpander(self.__version) - else: - self.__root = r"Software\Microsoft\Devstudio" - self.__product = "Visual Studio version %s" % self.__version - else: - # Win64. Assume this was built with the platform SDK - self.__product = "Microsoft SDK compiler %s" % (self.__version + 6) - - self.initialized = False - - def initialize(self): - self.__paths = [] - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - self.__paths = self.get_msvc_paths("path") - - if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - self.set_path_env_var('lib') - self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ";".join(self.__paths) - - self.preprocess_options = None - if self.__arch == "Intel": - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX', - '/Z7', '/D_DEBUG'] - else: - # Win64 - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] - else: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) - if strip_dir: - base = os.path.basename (base) - if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append ('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) - - output_opt = "/Fo" + obj - try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - (libraries, library_dirs, runtime_library_dirs) = fixed_args - - if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [] - for sym in (export_symbols or []): - export_opts.append("/EXPORT:" + sym) - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - os.path.dirname(objects[0]), - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") - - def library_option(self, lib): - return self.library_filename(lib) - - - def find_library_file(self, dirs, lib, debug=0): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) - if os.path.isfile(fn): - return fn - - return exe - - def get_msvc_paths(self, path, platform='x86'): - """Get a list of devstudio directories (include, lib or path). - - Return a list of strings. The list will be empty if unable to - access the registry or appropriate registry keys not found. - """ - if not _can_read_reg: - return [] - - path = path + " dirs" - if self.__version >= 7: - key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" - % (self.__root, self.__version)) - else: - key = (r"%s\6.0\Build System\Components\Platforms" - r"\Win32 (%s)\Directories" % (self.__root, platform)) - - for base in HKEYS: - d = read_values(base, key) - if d: - if self.__version >= 7: - return self.__macros.sub(d[path]).split(";") - else: - return d[path].split(";") - # MSVC 6 seems to create the registry entries we need only when - # the GUI is run. - if self.__version == 6: - for base in HKEYS: - if read_values(base, r"%s\6.0" % self.__root) is not None: - self.warn("It seems you have Visual Studio 6 installed, " - "but the expected registry settings are not present.\n" - "You must at least run the Visual Studio GUI once " - "so that these entries are created.") - break - return [] - - def set_path_env_var(self, name): - """Set environment variable 'name' to an MSVC path type value. - - This is equivalent to a SET command prior to execution of spawned - commands. - """ - - if name == "lib": - p = self.get_msvc_paths("library") - else: - p = self.get_msvc_paths(name) - if p: - os.environ[name] = ';'.join(p) - - -if get_build_version() >= 8.0: - log.debug("Importing new compiler from distutils.msvc9compiler") - OldMSVCCompiler = MSVCCompiler - from distutils.msvc9compiler import MSVCCompiler - # get_build_architecture not really relevant now we support cross-compile - from distutils.msvc9compiler import MacroExpander diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py deleted file mode 100644 index 31df3f7faca552..00000000000000 --- a/Lib/distutils/spawn.py +++ /dev/null @@ -1,129 +0,0 @@ -"""distutils.spawn - -Provides the 'spawn()' function, a front-end to various platform- -specific functions for launching another program in a sub-process. -Also provides the 'find_executable()' to search the path for a given -executable name. -""" - -import sys -import os -import subprocess - -from distutils.errors import DistutilsPlatformError, DistutilsExecError -from distutils.debug import DEBUG -from distutils import log - - -if sys.platform == 'darwin': - _cfg_target = None - _cfg_target_split = None - - -def spawn(cmd, search_path=1, verbose=0, dry_run=0): - """Run another program, specified as a command list 'cmd', in a new process. - - 'cmd' is just the argument list for the new process, ie. - cmd[0] is the program to run and cmd[1:] are the rest of its arguments. - There is no way to run a program with a name different from that of its - executable. - - If 'search_path' is true (the default), the system's executable - search path will be used to find the program; otherwise, cmd[0] - must be the exact path to the executable. If 'dry_run' is true, - the command will not actually be run. - - Raise DistutilsExecError if running the program fails in any way; just - return on success. - """ - # cmd is documented as a list, but just in case some code passes a tuple - # in, protect our %-formatting code against horrible death - cmd = list(cmd) - - log.info(' '.join(cmd)) - if dry_run: - return - - if search_path: - executable = find_executable(cmd[0]) - if executable is not None: - cmd[0] = executable - - env = None - if sys.platform == 'darwin': - global _cfg_target, _cfg_target_split - if _cfg_target is None: - from distutils import sysconfig - _cfg_target = sysconfig.get_config_var( - 'MACOSX_DEPLOYMENT_TARGET') or '' - if _cfg_target: - _cfg_target_split = [int(x) for x in _cfg_target.split('.')] - if _cfg_target: - # Ensure that the deployment target of the build process is not - # less than 10.3 if the interpreter was built for 10.3 or later. - # This ensures extension modules are built with correct - # compatibility values, specifically LDSHARED which can use - # '-undefined dynamic_lookup' which only works on >= 10.3. - cur_target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', _cfg_target) - cur_target_split = [int(x) for x in cur_target.split('.')] - if _cfg_target_split[:2] >= [10, 3] and cur_target_split[:2] < [10, 3]: - my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: ' - 'now "%s" but "%s" during configure;' - 'must use 10.3 or later' - % (cur_target, _cfg_target)) - raise DistutilsPlatformError(my_msg) - env = dict(os.environ, - MACOSX_DEPLOYMENT_TARGET=cur_target) - - try: - proc = subprocess.Popen(cmd, env=env) - proc.wait() - exitcode = proc.returncode - except OSError as exc: - if not DEBUG: - cmd = cmd[0] - raise DistutilsExecError( - "command %r failed: %s" % (cmd, exc.args[-1])) from exc - - if exitcode: - if not DEBUG: - cmd = cmd[0] - raise DistutilsExecError( - "command %r failed with exit code %s" % (cmd, exitcode)) - - -def find_executable(executable, path=None): - """Tries to find 'executable' in the directories listed in 'path'. - - A string listing directories separated by 'os.pathsep'; defaults to - os.environ['PATH']. Returns the complete filename or None if not found. - """ - _, ext = os.path.splitext(executable) - if (sys.platform == 'win32') and (ext != '.exe'): - executable = executable + '.exe' - - if os.path.isfile(executable): - return executable - - if path is None: - path = os.environ.get('PATH', None) - if path is None: - try: - path = os.confstr("CS_PATH") - except (AttributeError, ValueError): - # os.confstr() or CS_PATH is not available - path = os.defpath - # bpo-35755: Don't use os.defpath if the PATH environment variable is - # set to an empty string - - # PATH='' doesn't match, whereas PATH=':' looks in the current directory - if not path: - return None - - paths = path.split(os.pathsep) - for p in paths: - f = os.path.join(p, executable) - if os.path.isfile(f): - # the file exists, we have a shot at spawn working - return f - return None diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py deleted file mode 100644 index 8eada987c13819..00000000000000 --- a/Lib/distutils/sysconfig.py +++ /dev/null @@ -1,345 +0,0 @@ -"""Provide access to Python's configuration information. The specific -configuration variables available depend heavily on the platform and -configuration. The values may be retrieved using -get_config_var(name), and the list of variables is available via -get_config_vars().keys(). Additional convenience functions are also -available. - -Written by: Fred L. Drake, Jr. -Email: -""" - -import os -import re -import sys -import warnings - -from functools import partial - -from .errors import DistutilsPlatformError - -from sysconfig import ( - _PREFIX as PREFIX, - _BASE_PREFIX as BASE_PREFIX, - _EXEC_PREFIX as EXEC_PREFIX, - _BASE_EXEC_PREFIX as BASE_EXEC_PREFIX, - _PROJECT_BASE as project_base, - _PYTHON_BUILD as python_build, - _init_posix as sysconfig_init_posix, - parse_config_h as sysconfig_parse_config_h, - - _init_non_posix, - - _variable_rx, - _findvar1_rx, - _findvar2_rx, - - expand_makefile_vars, - is_python_build, - get_config_h_filename, - get_config_var, - get_config_vars, - get_makefile_filename, - get_python_version, -) - -# This is better than -# from sysconfig import _CONFIG_VARS as _config_vars -# because it makes sure that the global dictionary is initialized -# which might not be true in the time of import. -_config_vars = get_config_vars() - -warnings.warn( - 'The distutils.sysconfig module is deprecated, use sysconfig instead', - DeprecationWarning, - stacklevel=2 -) - - -# Following functions are the same as in sysconfig but with different API -def parse_config_h(fp, g=None): - return sysconfig_parse_config_h(fp, vars=g) - - -_python_build = partial(is_python_build, check_home=True) -_init_posix = partial(sysconfig_init_posix, _config_vars) -_init_nt = partial(_init_non_posix, _config_vars) - - -# Similar function is also implemented in sysconfig as _parse_makefile -# but without the parsing capabilities of distutils.text_file.TextFile. -def parse_makefile(fn, g=None): - """Parse a Makefile-style file. - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - from distutils.text_file import TextFile - fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") - - if g is None: - g = {} - done = {} - notdone = {} - - while True: - line = fp.readline() - if line is None: # eof - break - m = re.match(_variable_rx, line) - if m: - n, v = m.group(1, 2) - v = v.strip() - # `$$' is a literal `$' in make - tmpv = v.replace('$$', '') - - if "$" in tmpv: - notdone[n] = v - else: - try: - v = int(v) - except ValueError: - # insert literal `$' - done[n] = v.replace('$$', '$') - else: - done[n] = v - - # Variables with a 'PY_' prefix in the makefile. These need to - # be made available without that prefix through sysconfig. - # Special care is needed to ensure that variable expansion works, even - # if the expansion uses the name without a prefix. - renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') - - # do variable interpolation here - while notdone: - for name in list(notdone): - value = notdone[name] - m = re.search(_findvar1_rx, value) or re.search(_findvar2_rx, value) - if m: - n = m.group(1) - found = True - if n in done: - item = str(done[n]) - elif n in notdone: - # get it on a subsequent round - found = False - elif n in os.environ: - # do it like make: fall back to environment - item = os.environ[n] - - elif n in renamed_variables: - if name.startswith('PY_') and name[3:] in renamed_variables: - item = "" - - elif 'PY_' + n in notdone: - found = False - - else: - item = str(done['PY_' + n]) - else: - done[n] = item = "" - if found: - after = value[m.end():] - value = value[:m.start()] + item + after - if "$" in after: - notdone[name] = value - else: - try: value = int(value) - except ValueError: - done[name] = value.strip() - else: - done[name] = value - del notdone[name] - - if name.startswith('PY_') \ - and name[3:] in renamed_variables: - - name = name[3:] - if name not in done: - done[name] = value - else: - # bogus variable reference; just drop it since we can't deal - del notdone[name] - - fp.close() - - # strip spurious spaces - for k, v in done.items(): - if isinstance(v, str): - done[k] = v.strip() - - # save the results in the global dictionary - g.update(done) - return g - - -# Following functions are deprecated together with this module and they -# have no direct replacement - -# Calculate the build qualifier flags if they are defined. Adding the flags -# to the include and lib directories only makes sense for an installation, not -# an in-source build. -build_flags = '' -try: - if not python_build: - build_flags = sys.abiflags -except AttributeError: - # It's not a configure-based build, so the sys module doesn't have - # this attribute, which is fine. - pass - - -def customize_compiler(compiler): - """Do any platform-specific customization of a CCompiler instance. - - Mainly needed on Unix, so we can plug in the information that - varies across Unices and is stored in Python's Makefile. - """ - if compiler.compiler_type == "unix": - if sys.platform == "darwin": - # Perform first-time customization of compiler-related - # config vars on OS X now that we know we need a compiler. - # This is primarily to support Pythons from binary - # installers. The kind and paths to build tools on - # the user system may vary significantly from the system - # that Python itself was built on. Also the user OS - # version and build tools may not support the same set - # of CPU architectures for universal builds. - if not _config_vars.get('CUSTOMIZED_OSX_COMPILER'): - import _osx_support - _osx_support.customize_compiler(_config_vars) - _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' - - (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ - get_config_vars('CC', 'CXX', 'CFLAGS', - 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') - - if 'CC' in os.environ: - newcc = os.environ['CC'] - if (sys.platform == 'darwin' - and 'LDSHARED' not in os.environ - and ldshared.startswith(cc)): - # On OS X, if CC is overridden, use that as the default - # command for LDSHARED as well - ldshared = newcc + ldshared[len(cc):] - cc = newcc - if 'CXX' in os.environ: - cxx = os.environ['CXX'] - if 'LDSHARED' in os.environ: - ldshared = os.environ['LDSHARED'] - if 'CPP' in os.environ: - cpp = os.environ['CPP'] - else: - cpp = cc + " -E" # not always - if 'LDFLAGS' in os.environ: - ldshared = ldshared + ' ' + os.environ['LDFLAGS'] - if 'CFLAGS' in os.environ: - cflags = cflags + ' ' + os.environ['CFLAGS'] - ldshared = ldshared + ' ' + os.environ['CFLAGS'] - if 'CPPFLAGS' in os.environ: - cpp = cpp + ' ' + os.environ['CPPFLAGS'] - cflags = cflags + ' ' + os.environ['CPPFLAGS'] - ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] - if 'AR' in os.environ: - ar = os.environ['AR'] - if 'ARFLAGS' in os.environ: - archiver = ar + ' ' + os.environ['ARFLAGS'] - else: - archiver = ar + ' ' + ar_flags - - cc_cmd = cc + ' ' + cflags - compiler.set_executables( - preprocessor=cpp, - compiler=cc_cmd, - compiler_so=cc_cmd + ' ' + ccshared, - compiler_cxx=cxx, - linker_so=ldshared, - linker_exe=cc, - archiver=archiver) - - compiler.shared_lib_extension = shlib_suffix - - -def get_python_inc(plat_specific=0, prefix=None): - """Return the directory containing installed Python header files. - - If 'plat_specific' is false (the default), this is the path to the - non-platform-specific header files, i.e. Python.h and so on; - otherwise, this is the path to platform-specific header files - (namely pyconfig.h). - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - if prefix is None: - prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX - if os.name == "posix": - if python_build: - # Assume the executable is in the build directory. The - # pyconfig.h file should be in the same directory. Since - # the build directory may not be the source directory, we - # must use "srcdir" from the makefile to find the "Include" - # directory. - if plat_specific: - return project_base - else: - incdir = os.path.join(get_config_var('srcdir'), 'Include') - return os.path.normpath(incdir) - python_dir = 'python' + get_python_version() + build_flags - return os.path.join(prefix, "include", python_dir) - elif os.name == "nt": - if python_build: - # Include both the include and PC dir to ensure we can find - # pyconfig.h - return (os.path.join(prefix, "include") + os.path.pathsep + - os.path.join(prefix, "PC")) - return os.path.join(prefix, "include") - else: - raise DistutilsPlatformError( - "I don't know where Python installs its C header files " - "on platform '%s'" % os.name) - - -def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): - """Return the directory containing the Python library (standard or - site additions). - - If 'plat_specific' is true, return the directory containing - platform-specific modules, i.e. any module from a non-pure-Python - module distribution; otherwise, return the platform-shared library - directory. If 'standard_lib' is true, return the directory - containing standard Python library modules; otherwise, return the - directory for site-specific modules. - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - if prefix is None: - if standard_lib: - prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX - else: - prefix = plat_specific and EXEC_PREFIX or PREFIX - - if os.name == "posix": - if plat_specific or standard_lib: - # Platform-specific modules (any module from a non-pure-Python - # module distribution) or standard Python library modules. - libdir = sys.platlibdir - else: - # Pure Python - libdir = "lib" - libpython = os.path.join(prefix, libdir, - "python" + get_python_version()) - if standard_lib: - return libpython - else: - return os.path.join(libpython, "site-packages") - elif os.name == "nt": - if standard_lib: - return os.path.join(prefix, "Lib") - else: - return os.path.join(prefix, "Lib", "site-packages") - else: - raise DistutilsPlatformError( - "I don't know where Python installs its library " - "on platform '%s'" % os.name) diff --git a/Lib/distutils/tests/Setup.sample b/Lib/distutils/tests/Setup.sample deleted file mode 100644 index 36c4290d8ff482..00000000000000 --- a/Lib/distutils/tests/Setup.sample +++ /dev/null @@ -1,67 +0,0 @@ -# Setup file from the pygame project - -#--StartConfig -SDL = -I/usr/include/SDL -D_REENTRANT -lSDL -FONT = -lSDL_ttf -IMAGE = -lSDL_image -MIXER = -lSDL_mixer -SMPEG = -lsmpeg -PNG = -lpng -JPEG = -ljpeg -SCRAP = -lX11 -PORTMIDI = -lportmidi -PORTTIME = -lporttime -#--EndConfig - -#DEBUG = -C-W -C-Wall -DEBUG = - -#the following modules are optional. you will want to compile -#everything you can, but you can ignore ones you don't have -#dependencies for, just comment them out - -imageext src/imageext.c $(SDL) $(IMAGE) $(PNG) $(JPEG) $(DEBUG) -font src/font.c $(SDL) $(FONT) $(DEBUG) -mixer src/mixer.c $(SDL) $(MIXER) $(DEBUG) -mixer_music src/music.c $(SDL) $(MIXER) $(DEBUG) -_numericsurfarray src/_numericsurfarray.c $(SDL) $(DEBUG) -_numericsndarray src/_numericsndarray.c $(SDL) $(MIXER) $(DEBUG) -movie src/movie.c $(SDL) $(SMPEG) $(DEBUG) -scrap src/scrap.c $(SDL) $(SCRAP) $(DEBUG) -_camera src/_camera.c src/camera_v4l2.c src/camera_v4l.c $(SDL) $(DEBUG) -pypm src/pypm.c $(SDL) $(PORTMIDI) $(PORTTIME) $(DEBUG) - -GFX = src/SDL_gfx/SDL_gfxPrimitives.c -#GFX = src/SDL_gfx/SDL_gfxBlitFunc.c src/SDL_gfx/SDL_gfxPrimitives.c -gfxdraw src/gfxdraw.c $(SDL) $(GFX) $(DEBUG) - - - -#these modules are required for pygame to run. they only require -#SDL as a dependency. these should not be altered - -base src/base.c $(SDL) $(DEBUG) -cdrom src/cdrom.c $(SDL) $(DEBUG) -color src/color.c $(SDL) $(DEBUG) -constants src/constants.c $(SDL) $(DEBUG) -display src/display.c $(SDL) $(DEBUG) -event src/event.c $(SDL) $(DEBUG) -fastevent src/fastevent.c src/fastevents.c $(SDL) $(DEBUG) -key src/key.c $(SDL) $(DEBUG) -mouse src/mouse.c $(SDL) $(DEBUG) -rect src/rect.c $(SDL) $(DEBUG) -rwobject src/rwobject.c $(SDL) $(DEBUG) -surface src/surface.c src/alphablit.c src/surface_fill.c $(SDL) $(DEBUG) -surflock src/surflock.c $(SDL) $(DEBUG) -time src/time.c $(SDL) $(DEBUG) -joystick src/joystick.c $(SDL) $(DEBUG) -draw src/draw.c $(SDL) $(DEBUG) -image src/image.c $(SDL) $(DEBUG) -overlay src/overlay.c $(SDL) $(DEBUG) -transform src/transform.c src/rotozoom.c src/scale2x.c src/scale_mmx.c $(SDL) $(DEBUG) -mask src/mask.c src/bitmask.c $(SDL) $(DEBUG) -bufferproxy src/bufferproxy.c $(SDL) $(DEBUG) -pixelarray src/pixelarray.c $(SDL) $(DEBUG) -_arraysurfarray src/_arraysurfarray.c $(SDL) $(DEBUG) - - diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py deleted file mode 100644 index 16d011fd9ee6e7..00000000000000 --- a/Lib/distutils/tests/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Test suite for distutils. - -This test suite consists of a collection of test modules in the -distutils.tests package. Each test module has a name starting with -'test' and contains a function test_suite(). The function is expected -to return an initialized unittest.TestSuite instance. - -Tests for the command classes in the distutils.command package are -included in distutils.tests as well, instead of using a separate -distutils.command.tests package, since command identification is done -by import rather than matching pre-defined names. - -""" - -import os -import sys -import unittest -from test.support import run_unittest -from test.support.warnings_helper import save_restore_warnings_filters - - -here = os.path.dirname(__file__) or os.curdir - - -def test_suite(): - suite = unittest.TestSuite() - for fn in os.listdir(here): - if fn.startswith("test") and fn.endswith(".py"): - modname = "distutils.tests." + fn[:-3] - # bpo-40055: Save/restore warnings filters to leave them unchanged. - # Importing tests imports docutils which imports pkg_resources - # which adds a warnings filter. - with save_restore_warnings_filters(): - __import__(modname) - module = sys.modules[modname] - suite.addTest(module.test_suite()) - return suite - - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/includetest.rst b/Lib/distutils/tests/includetest.rst deleted file mode 100644 index d7b4ae38b09d86..00000000000000 --- a/Lib/distutils/tests/includetest.rst +++ /dev/null @@ -1 +0,0 @@ -This should be included. diff --git a/Lib/distutils/tests/support.py b/Lib/distutils/tests/support.py deleted file mode 100644 index 23b907b607efad..00000000000000 --- a/Lib/distutils/tests/support.py +++ /dev/null @@ -1,209 +0,0 @@ -"""Support code for distutils test cases.""" -import os -import sys -import shutil -import tempfile -import unittest -import sysconfig -from copy import deepcopy -from test.support import os_helper - -from distutils import log -from distutils.log import DEBUG, INFO, WARN, ERROR, FATAL -from distutils.core import Distribution - - -class LoggingSilencer(object): - - def setUp(self): - super().setUp() - self.threshold = log.set_threshold(log.FATAL) - # catching warnings - # when log will be replaced by logging - # we won't need such monkey-patch anymore - self._old_log = log.Log._log - log.Log._log = self._log - self.logs = [] - - def tearDown(self): - log.set_threshold(self.threshold) - log.Log._log = self._old_log - super().tearDown() - - def _log(self, level, msg, args): - if level not in (DEBUG, INFO, WARN, ERROR, FATAL): - raise ValueError('%s wrong log level' % str(level)) - if not isinstance(msg, str): - raise TypeError("msg should be str, not '%.200s'" - % (type(msg).__name__)) - self.logs.append((level, msg, args)) - - def get_logs(self, *levels): - return [msg % args for level, msg, args - in self.logs if level in levels] - - def clear_logs(self): - self.logs = [] - - -class TempdirManager(object): - """Mix-in class that handles temporary directories for test cases. - - This is intended to be used with unittest.TestCase. - """ - - def setUp(self): - super().setUp() - self.old_cwd = os.getcwd() - self.tempdirs = [] - - def tearDown(self): - # Restore working dir, for Solaris and derivatives, where rmdir() - # on the current directory fails. - os.chdir(self.old_cwd) - super().tearDown() - while self.tempdirs: - tmpdir = self.tempdirs.pop() - os_helper.rmtree(tmpdir) - - def mkdtemp(self): - """Create a temporary directory that will be cleaned up. - - Returns the path of the directory. - """ - d = tempfile.mkdtemp() - self.tempdirs.append(d) - return d - - def write_file(self, path, content='xxx'): - """Writes a file in the given path. - - - path can be a string or a sequence. - """ - if isinstance(path, (list, tuple)): - path = os.path.join(*path) - f = open(path, 'w') - try: - f.write(content) - finally: - f.close() - - def create_dist(self, pkg_name='foo', **kw): - """Will generate a test environment. - - This function creates: - - a Distribution instance using keywords - - a temporary directory with a package structure - - It returns the package directory and the distribution - instance. - """ - tmp_dir = self.mkdtemp() - pkg_dir = os.path.join(tmp_dir, pkg_name) - os.mkdir(pkg_dir) - dist = Distribution(attrs=kw) - - return pkg_dir, dist - - -class DummyCommand: - """Class to store options for retrieval via set_undefined_options().""" - - def __init__(self, **kwargs): - for kw, val in kwargs.items(): - setattr(self, kw, val) - - def ensure_finalized(self): - pass - - -class EnvironGuard(object): - - def setUp(self): - super(EnvironGuard, self).setUp() - self.old_environ = deepcopy(os.environ) - - def tearDown(self): - for key, value in self.old_environ.items(): - if os.environ.get(key) != value: - os.environ[key] = value - - for key in tuple(os.environ.keys()): - if key not in self.old_environ: - del os.environ[key] - - super(EnvironGuard, self).tearDown() - - -def copy_xxmodule_c(directory): - """Helper for tests that need the xxmodule.c source file. - - Example use: - - def test_compile(self): - copy_xxmodule_c(self.tmpdir) - self.assertIn('xxmodule.c', os.listdir(self.tmpdir)) - - If the source file can be found, it will be copied to *directory*. If not, - the test will be skipped. Errors during copy are not caught. - """ - filename = _get_xxmodule_path() - if filename is None: - raise unittest.SkipTest('cannot find xxmodule.c (test must run in ' - 'the python build dir)') - shutil.copy(filename, directory) - - -def _get_xxmodule_path(): - srcdir = sysconfig.get_config_var('srcdir') - candidates = [ - # use installed copy if available - os.path.join(os.path.dirname(__file__), 'xxmodule.c'), - # otherwise try using copy from build directory - os.path.join(srcdir, 'Modules', 'xxmodule.c'), - # srcdir mysteriously can be $srcdir/Lib/distutils/tests when - # this file is run from its parent directory, so walk up the - # tree to find the real srcdir - os.path.join(srcdir, '..', '..', '..', 'Modules', 'xxmodule.c'), - ] - for path in candidates: - if os.path.exists(path): - return path - - -def fixup_build_ext(cmd): - """Function needed to make build_ext tests pass. - - When Python was built with --enable-shared on Unix, -L. is not enough to - find libpython.so, because regrtest runs in a tempdir, not in the - source directory where the .so lives. - - When Python was built with in debug mode on Windows, build_ext commands - need their debug attribute set, and it is not done automatically for - some reason. - - This function handles both of these things. Example use: - - cmd = build_ext(dist) - support.fixup_build_ext(cmd) - cmd.ensure_finalized() - - Unlike most other Unix platforms, Mac OS X embeds absolute paths - to shared libraries into executables, so the fixup is not needed there. - """ - if os.name == 'nt': - cmd.debug = sys.executable.endswith('_d.exe') - elif sysconfig.get_config_var('Py_ENABLE_SHARED'): - # To further add to the shared builds fun on Unix, we can't just add - # library_dirs to the Extension() instance because that doesn't get - # plumbed through to the final compiler command. - runshared = sysconfig.get_config_var('RUNSHARED') - if runshared is None: - cmd.library_dirs = ['.'] - else: - if sys.platform == 'darwin': - cmd.library_dirs = [] - else: - name, equals, value = runshared.partition('=') - cmd.library_dirs = [d for d in value.split(os.pathsep) if d] diff --git a/Lib/distutils/tests/test_archive_util.py b/Lib/distutils/tests/test_archive_util.py deleted file mode 100644 index 8aec84078ed48f..00000000000000 --- a/Lib/distutils/tests/test_archive_util.py +++ /dev/null @@ -1,396 +0,0 @@ -# -*- coding: utf-8 -*- -"""Tests for distutils.archive_util.""" -import unittest -import os -import sys -import tarfile -from os.path import splitdrive -import warnings - -from distutils import archive_util -from distutils.archive_util import (check_archive_formats, make_tarball, - make_zipfile, make_archive, - ARCHIVE_FORMATS) -from distutils.spawn import find_executable, spawn -from distutils.tests import support -from test.support import run_unittest, patch -from test.support.os_helper import change_cwd -from test.support.warnings_helper import check_warnings - -try: - import grp - import pwd - UID_GID_SUPPORT = True -except ImportError: - UID_GID_SUPPORT = False - -try: - import zipfile - ZIP_SUPPORT = True -except ImportError: - ZIP_SUPPORT = find_executable('zip') - -try: - import zlib - ZLIB_SUPPORT = True -except ImportError: - ZLIB_SUPPORT = False - -try: - import bz2 -except ImportError: - bz2 = None - -try: - import lzma -except ImportError: - lzma = None - -def can_fs_encode(filename): - """ - Return True if the filename can be saved in the file system. - """ - if os.path.supports_unicode_filenames: - return True - try: - filename.encode(sys.getfilesystemencoding()) - except UnicodeEncodeError: - return False - return True - - -class ArchiveUtilTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_make_tarball(self, name='archive'): - # creating something to tar - tmpdir = self._create_files() - self._make_tarball(tmpdir, name, '.tar.gz') - # trying an uncompressed one - self._make_tarball(tmpdir, name, '.tar', compress=None) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_make_tarball_gzip(self): - tmpdir = self._create_files() - self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip') - - @unittest.skipUnless(bz2, 'Need bz2 support to run') - def test_make_tarball_bzip2(self): - tmpdir = self._create_files() - self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2') - - @unittest.skipUnless(lzma, 'Need lzma support to run') - def test_make_tarball_xz(self): - tmpdir = self._create_files() - self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz') - - @unittest.skipUnless(can_fs_encode('årchiv'), - 'File system cannot handle this filename') - def test_make_tarball_latin1(self): - """ - Mirror test_make_tarball, except filename contains latin characters. - """ - self.test_make_tarball('årchiv') # note this isn't a real word - - @unittest.skipUnless(can_fs_encode('のアーカイブ'), - 'File system cannot handle this filename') - def test_make_tarball_extended(self): - """ - Mirror test_make_tarball, except filename contains extended - characters outside the latin charset. - """ - self.test_make_tarball('のアーカイブ') # japanese for archive - - def _make_tarball(self, tmpdir, target_name, suffix, **kwargs): - tmpdir2 = self.mkdtemp() - unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], - "source and target should be on same drive") - - base_name = os.path.join(tmpdir2, target_name) - - # working with relative paths to avoid tar warnings - with change_cwd(tmpdir): - make_tarball(splitdrive(base_name)[1], 'dist', **kwargs) - - # check if the compressed tarball was created - tarball = base_name + suffix - self.assertTrue(os.path.exists(tarball)) - self.assertEqual(self._tarinfo(tarball), self._created_files) - - def _tarinfo(self, path): - tar = tarfile.open(path) - try: - names = tar.getnames() - names.sort() - return names - finally: - tar.close() - - _zip_created_files = ['dist/', 'dist/file1', 'dist/file2', - 'dist/sub/', 'dist/sub/file3', 'dist/sub2/'] - _created_files = [p.rstrip('/') for p in _zip_created_files] - - def _create_files(self): - # creating something to tar - tmpdir = self.mkdtemp() - dist = os.path.join(tmpdir, 'dist') - os.mkdir(dist) - self.write_file([dist, 'file1'], 'xxx') - self.write_file([dist, 'file2'], 'xxx') - os.mkdir(os.path.join(dist, 'sub')) - self.write_file([dist, 'sub', 'file3'], 'xxx') - os.mkdir(os.path.join(dist, 'sub2')) - return tmpdir - - @unittest.skipUnless(find_executable('tar') and find_executable('gzip') - and ZLIB_SUPPORT, - 'Need the tar, gzip and zlib command to run') - def test_tarfile_vs_tar(self): - tmpdir = self._create_files() - tmpdir2 = self.mkdtemp() - base_name = os.path.join(tmpdir2, 'archive') - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - make_tarball(base_name, 'dist') - finally: - os.chdir(old_dir) - - # check if the compressed tarball was created - tarball = base_name + '.tar.gz' - self.assertTrue(os.path.exists(tarball)) - - # now create another tarball using `tar` - tarball2 = os.path.join(tmpdir, 'archive2.tar.gz') - tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist'] - gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar'] - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - spawn(tar_cmd) - spawn(gzip_cmd) - finally: - os.chdir(old_dir) - - self.assertTrue(os.path.exists(tarball2)) - # let's compare both tarballs - self.assertEqual(self._tarinfo(tarball), self._created_files) - self.assertEqual(self._tarinfo(tarball2), self._created_files) - - # trying an uncompressed one - base_name = os.path.join(tmpdir2, 'archive') - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - make_tarball(base_name, 'dist', compress=None) - finally: - os.chdir(old_dir) - tarball = base_name + '.tar' - self.assertTrue(os.path.exists(tarball)) - - # now for a dry_run - base_name = os.path.join(tmpdir2, 'archive') - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - make_tarball(base_name, 'dist', compress=None, dry_run=True) - finally: - os.chdir(old_dir) - tarball = base_name + '.tar' - self.assertTrue(os.path.exists(tarball)) - - @unittest.skipUnless(find_executable('compress'), - 'The compress program is required') - def test_compress_deprecated(self): - tmpdir = self._create_files() - base_name = os.path.join(self.mkdtemp(), 'archive') - - # using compress and testing the PendingDeprecationWarning - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - with check_warnings() as w: - warnings.simplefilter("always") - make_tarball(base_name, 'dist', compress='compress') - finally: - os.chdir(old_dir) - tarball = base_name + '.tar.Z' - self.assertTrue(os.path.exists(tarball)) - self.assertEqual(len(w.warnings), 1) - - # same test with dry_run - os.remove(tarball) - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - with check_warnings() as w: - warnings.simplefilter("always") - make_tarball(base_name, 'dist', compress='compress', - dry_run=True) - finally: - os.chdir(old_dir) - self.assertFalse(os.path.exists(tarball)) - self.assertEqual(len(w.warnings), 1) - - @unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT, - 'Need zip and zlib support to run') - def test_make_zipfile(self): - # creating something to tar - tmpdir = self._create_files() - base_name = os.path.join(self.mkdtemp(), 'archive') - with change_cwd(tmpdir): - make_zipfile(base_name, 'dist') - - # check if the compressed tarball was created - tarball = base_name + '.zip' - self.assertTrue(os.path.exists(tarball)) - with zipfile.ZipFile(tarball) as zf: - self.assertEqual(sorted(zf.namelist()), self._zip_created_files) - - @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') - def test_make_zipfile_no_zlib(self): - patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError - - called = [] - zipfile_class = zipfile.ZipFile - def fake_zipfile(*a, **kw): - if kw.get('compression', None) == zipfile.ZIP_STORED: - called.append((a, kw)) - return zipfile_class(*a, **kw) - - patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile) - - # create something to tar and compress - tmpdir = self._create_files() - base_name = os.path.join(self.mkdtemp(), 'archive') - with change_cwd(tmpdir): - make_zipfile(base_name, 'dist') - - tarball = base_name + '.zip' - self.assertEqual(called, - [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]) - self.assertTrue(os.path.exists(tarball)) - with zipfile.ZipFile(tarball) as zf: - self.assertEqual(sorted(zf.namelist()), self._zip_created_files) - - def test_check_archive_formats(self): - self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']), - 'xxx') - self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar', - 'ztar', 'tar', 'zip'])) - - def test_make_archive(self): - tmpdir = self.mkdtemp() - base_name = os.path.join(tmpdir, 'archive') - self.assertRaises(ValueError, make_archive, base_name, 'xxx') - - def test_make_archive_cwd(self): - current_dir = os.getcwd() - def _breaks(*args, **kw): - raise RuntimeError() - ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file') - try: - try: - make_archive('xxx', 'xxx', root_dir=self.mkdtemp()) - except: - pass - self.assertEqual(os.getcwd(), current_dir) - finally: - del ARCHIVE_FORMATS['xxx'] - - def test_make_archive_tar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'tar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar') - self.assertEqual(self._tarinfo(res), self._created_files) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_make_archive_gztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'gztar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar.gz') - self.assertEqual(self._tarinfo(res), self._created_files) - - @unittest.skipUnless(bz2, 'Need bz2 support to run') - def test_make_archive_bztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'bztar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar.bz2') - self.assertEqual(self._tarinfo(res), self._created_files) - - @unittest.skipUnless(lzma, 'Need xz support to run') - def test_make_archive_xztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'xztar', base_dir, 'dist') - self.assertTrue(os.path.exists(res)) - self.assertEqual(os.path.basename(res), 'archive.tar.xz') - self.assertEqual(self._tarinfo(res), self._created_files) - - def test_make_archive_owner_group(self): - # testing make_archive with owner and group, with various combinations - # this works even if there's not gid/uid support - if UID_GID_SUPPORT: - group = grp.getgrgid(0)[0] - owner = pwd.getpwuid(0)[0] - else: - group = owner = 'root' - - base_dir = self._create_files() - root_dir = self.mkdtemp() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, - group=group) - self.assertTrue(os.path.exists(res)) - - res = make_archive(base_name, 'zip', root_dir, base_dir) - self.assertTrue(os.path.exists(res)) - - res = make_archive(base_name, 'tar', root_dir, base_dir, - owner=owner, group=group) - self.assertTrue(os.path.exists(res)) - - res = make_archive(base_name, 'tar', root_dir, base_dir, - owner='kjhkjhkjg', group='oihohoh') - self.assertTrue(os.path.exists(res)) - - @unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib") - @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") - def test_tarfile_root_owner(self): - tmpdir = self._create_files() - base_name = os.path.join(self.mkdtemp(), 'archive') - old_dir = os.getcwd() - os.chdir(tmpdir) - group = grp.getgrgid(0)[0] - owner = pwd.getpwuid(0)[0] - try: - archive_name = make_tarball(base_name, 'dist', compress=None, - owner=owner, group=group) - finally: - os.chdir(old_dir) - - # check if the compressed tarball was created - self.assertTrue(os.path.exists(archive_name)) - - # now checks the rights - archive = tarfile.open(archive_name) - try: - for member in archive.getmembers(): - self.assertEqual(member.uid, 0) - self.assertEqual(member.gid, 0) - finally: - archive.close() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(ArchiveUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_bdist.py b/Lib/distutils/tests/test_bdist.py deleted file mode 100644 index 5676f7f34d4292..00000000000000 --- a/Lib/distutils/tests/test_bdist.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Tests for distutils.command.bdist.""" -import unittest -from test.support import run_unittest - -import warnings -with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - from distutils.command.bdist import bdist - from distutils.tests import support - - -class BuildTestCase(support.TempdirManager, - unittest.TestCase): - - def test_formats(self): - # let's create a command and make sure - # we can set the format - dist = self.create_dist()[1] - cmd = bdist(dist) - cmd.formats = ['tar'] - cmd.ensure_finalized() - self.assertEqual(cmd.formats, ['tar']) - - # what formats does bdist offer? - formats = ['bztar', 'gztar', 'rpm', 'tar', 'xztar', 'zip', 'ztar'] - found = sorted(cmd.format_command) - self.assertEqual(found, formats) - - def test_skip_build(self): - # bug #10946: bdist --skip-build should trickle down to subcommands - dist = self.create_dist()[1] - cmd = bdist(dist) - cmd.skip_build = 1 - cmd.ensure_finalized() - dist.command_obj['bdist'] = cmd - - for name in ['bdist_dumb']: # bdist_rpm does not support --skip-build - subcmd = cmd.get_finalized_command(name) - if getattr(subcmd, '_unsupported', False): - # command is not supported on this build - continue - self.assertTrue(subcmd.skip_build, - '%s should take --skip-build from bdist' % name) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) - - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_bdist_dumb.py b/Lib/distutils/tests/test_bdist_dumb.py deleted file mode 100644 index bb860c8ac70345..00000000000000 --- a/Lib/distutils/tests/test_bdist_dumb.py +++ /dev/null @@ -1,97 +0,0 @@ -"""Tests for distutils.command.bdist_dumb.""" - -import os -import sys -import zipfile -import unittest -from test.support import run_unittest - -from distutils.core import Distribution -from distutils.command.bdist_dumb import bdist_dumb -from distutils.tests import support - -SETUP_PY = """\ -from distutils.core import setup -import foo - -setup(name='foo', version='0.1', py_modules=['foo'], - url='xxx', author='xxx', author_email='xxx') - -""" - -try: - import zlib - ZLIB_SUPPORT = True -except ImportError: - ZLIB_SUPPORT = False - - -class BuildDumbTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - super(BuildDumbTestCase, self).setUp() - self.old_location = os.getcwd() - self.old_sys_argv = sys.argv, sys.argv[:] - - def tearDown(self): - os.chdir(self.old_location) - sys.argv = self.old_sys_argv[0] - sys.argv[:] = self.old_sys_argv[1] - super(BuildDumbTestCase, self).tearDown() - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_simple_built(self): - - # let's create a simple package - tmp_dir = self.mkdtemp() - pkg_dir = os.path.join(tmp_dir, 'foo') - os.mkdir(pkg_dir) - self.write_file((pkg_dir, 'setup.py'), SETUP_PY) - self.write_file((pkg_dir, 'foo.py'), '#') - self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') - self.write_file((pkg_dir, 'README'), '') - - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) - dist.script_name = 'setup.py' - os.chdir(pkg_dir) - - sys.argv = ['setup.py'] - cmd = bdist_dumb(dist) - - # so the output is the same no matter - # what is the platform - cmd.format = 'zip' - - cmd.ensure_finalized() - cmd.run() - - # see what we have - dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name) - - self.assertEqual(dist_created, [base]) - - # now let's check what we have in the zip file - fp = zipfile.ZipFile(os.path.join('dist', base)) - try: - contents = fp.namelist() - finally: - fp.close() - - contents = sorted(filter(None, map(os.path.basename, contents))) - wanted = ['foo-0.1-py%s.%s.egg-info' % sys.version_info[:2], 'foo.py'] - if not sys.dont_write_bytecode: - wanted.append('foo.%s.pyc' % sys.implementation.cache_tag) - self.assertEqual(contents, sorted(wanted)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildDumbTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_bdist_rpm.py b/Lib/distutils/tests/test_bdist_rpm.py deleted file mode 100644 index 7eefa7b9cad84f..00000000000000 --- a/Lib/distutils/tests/test_bdist_rpm.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Tests for distutils.command.bdist_rpm.""" - -import unittest -import sys -import os -from test.support import run_unittest, requires_zlib - -from distutils.core import Distribution -from distutils.command.bdist_rpm import bdist_rpm -from distutils.tests import support -from distutils.spawn import find_executable - -SETUP_PY = """\ -from distutils.core import setup -import foo - -setup(name='foo', version='0.1', py_modules=['foo'], - url='xxx', author='xxx', author_email='xxx') - -""" - -class BuildRpmTestCase(support.TempdirManager, - support.EnvironGuard, - support.LoggingSilencer, - unittest.TestCase): - - def setUp(self): - try: - sys.executable.encode("UTF-8") - except UnicodeEncodeError: - raise unittest.SkipTest("sys.executable is not encodable to UTF-8") - - super(BuildRpmTestCase, self).setUp() - self.old_location = os.getcwd() - self.old_sys_argv = sys.argv, sys.argv[:] - - def tearDown(self): - os.chdir(self.old_location) - sys.argv = self.old_sys_argv[0] - sys.argv[:] = self.old_sys_argv[1] - super(BuildRpmTestCase, self).tearDown() - - # XXX I am unable yet to make this test work without - # spurious sdtout/stderr output under Mac OS X - @unittest.skipUnless(sys.platform.startswith('linux'), - 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib() - @unittest.skipIf(find_executable('rpm') is None, - 'the rpm command is not found') - @unittest.skipIf(find_executable('rpmbuild') is None, - 'the rpmbuild command is not found') - # import foo fails with safe path - @unittest.skipIf(sys.flags.safe_path, - 'PYTHONSAFEPATH changes default sys.path') - def test_quiet(self): - # let's create a package - tmp_dir = self.mkdtemp() - os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation - pkg_dir = os.path.join(tmp_dir, 'foo') - os.mkdir(pkg_dir) - self.write_file((pkg_dir, 'setup.py'), SETUP_PY) - self.write_file((pkg_dir, 'foo.py'), '#') - self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') - self.write_file((pkg_dir, 'README'), '') - - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) - dist.script_name = 'setup.py' - os.chdir(pkg_dir) - - sys.argv = ['setup.py'] - cmd = bdist_rpm(dist) - cmd.fix_python = True - - # running in quiet mode - cmd.quiet = 1 - cmd.ensure_finalized() - cmd.run() - - dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - self.assertIn('foo-0.1-1.noarch.rpm', dist_created) - - # bug #2945: upload ignores bdist_rpm files - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) - - # XXX I am unable yet to make this test work without - # spurious sdtout/stderr output under Mac OS X - @unittest.skipUnless(sys.platform.startswith('linux'), - 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib() - # http://bugs.python.org/issue1533164 - @unittest.skipIf(find_executable('rpm') is None, - 'the rpm command is not found') - @unittest.skipIf(find_executable('rpmbuild') is None, - 'the rpmbuild command is not found') - # import foo fails with safe path - @unittest.skipIf(sys.flags.safe_path, - 'PYTHONSAFEPATH changes default sys.path') - def test_no_optimize_flag(self): - # let's create a package that breaks bdist_rpm - tmp_dir = self.mkdtemp() - os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation - pkg_dir = os.path.join(tmp_dir, 'foo') - os.mkdir(pkg_dir) - self.write_file((pkg_dir, 'setup.py'), SETUP_PY) - self.write_file((pkg_dir, 'foo.py'), '#') - self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') - self.write_file((pkg_dir, 'README'), '') - - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) - dist.script_name = 'setup.py' - os.chdir(pkg_dir) - - sys.argv = ['setup.py'] - cmd = bdist_rpm(dist) - cmd.fix_python = True - - cmd.quiet = 1 - cmd.ensure_finalized() - cmd.run() - - dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - self.assertIn('foo-0.1-1.noarch.rpm', dist_created) - - # bug #2945: upload ignores bdist_rpm files - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) - - os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm')) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildRpmTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_build.py b/Lib/distutils/tests/test_build.py deleted file mode 100644 index 71b5e164bae14a..00000000000000 --- a/Lib/distutils/tests/test_build.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Tests for distutils.command.build.""" -import unittest -import os -import sys -from test.support import run_unittest - -from distutils.command.build import build -from distutils.tests import support -from sysconfig import get_platform - -class BuildTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - @unittest.skipUnless(sys.executable, "test requires sys.executable") - def test_finalize_options(self): - pkg_dir, dist = self.create_dist() - cmd = build(dist) - cmd.finalize_options() - - # if not specified, plat_name gets the current platform - self.assertEqual(cmd.plat_name, get_platform()) - - # build_purelib is build + lib - wanted = os.path.join(cmd.build_base, 'lib') - self.assertEqual(cmd.build_purelib, wanted) - - # build_platlib is 'build/lib.platform-x.x[-pydebug]' - # examples: - # build/lib.macosx-10.3-i386-2.7 - plat_spec = '.%s-%d.%d' % (cmd.plat_name, *sys.version_info[:2]) - if hasattr(sys, 'gettotalrefcount'): - self.assertTrue(cmd.build_platlib.endswith('-pydebug')) - plat_spec += '-pydebug' - wanted = os.path.join(cmd.build_base, 'lib' + plat_spec) - self.assertEqual(cmd.build_platlib, wanted) - - # by default, build_lib = build_purelib - self.assertEqual(cmd.build_lib, cmd.build_purelib) - - # build_temp is build/temp. - wanted = os.path.join(cmd.build_base, 'temp' + plat_spec) - self.assertEqual(cmd.build_temp, wanted) - - # build_scripts is build/scripts-x.x - wanted = os.path.join(cmd.build_base, - 'scripts-%d.%d' % sys.version_info[:2]) - self.assertEqual(cmd.build_scripts, wanted) - - # executable is os.path.normpath(sys.executable) - self.assertEqual(cmd.executable, os.path.normpath(sys.executable)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_build_clib.py b/Lib/distutils/tests/test_build_clib.py deleted file mode 100644 index 95f928288e0048..00000000000000 --- a/Lib/distutils/tests/test_build_clib.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Tests for distutils.command.build_clib.""" -import unittest -import os -import sys -import sysconfig - -from test.support import ( - run_unittest, missing_compiler_executable, requires_subprocess -) - -from distutils.command.build_clib import build_clib -from distutils.errors import DistutilsSetupError -from distutils.tests import support - -class BuildCLibTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def setUp(self): - super().setUp() - self._backup_CONFIG_VARS = dict(sysconfig._CONFIG_VARS) - - def tearDown(self): - super().tearDown() - sysconfig._CONFIG_VARS.clear() - sysconfig._CONFIG_VARS.update(self._backup_CONFIG_VARS) - - def test_check_library_dist(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - # 'libraries' option must be a list - self.assertRaises(DistutilsSetupError, cmd.check_library_list, 'foo') - - # each element of 'libraries' must a 2-tuple - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - ['foo1', 'foo2']) - - # first element of each tuple in 'libraries' - # must be a string (the library name) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [(1, 'foo1'), ('name', 'foo2')]) - - # library name may not contain directory separators - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [('name', 'foo1'), - ('another/name', 'foo2')]) - - # second element of each tuple must be a dictionary (build info) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [('name', {}), - ('another', 'foo2')]) - - # those work - libs = [('name', {}), ('name', {'ok': 'good'})] - cmd.check_library_list(libs) - - def test_get_source_files(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - # "in 'libraries' option 'sources' must be present and must be - # a list of source filenames - cmd.libraries = [('name', {})] - self.assertRaises(DistutilsSetupError, cmd.get_source_files) - - cmd.libraries = [('name', {'sources': 1})] - self.assertRaises(DistutilsSetupError, cmd.get_source_files) - - cmd.libraries = [('name', {'sources': ['a', 'b']})] - self.assertEqual(cmd.get_source_files(), ['a', 'b']) - - cmd.libraries = [('name', {'sources': ('a', 'b')})] - self.assertEqual(cmd.get_source_files(), ['a', 'b']) - - cmd.libraries = [('name', {'sources': ('a', 'b')}), - ('name2', {'sources': ['c', 'd']})] - self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd']) - - def test_build_libraries(self): - - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - class FakeCompiler: - def compile(*args, **kw): - pass - create_static_lib = compile - - cmd.compiler = FakeCompiler() - - # build_libraries is also doing a bit of typo checking - lib = [('name', {'sources': 'notvalid'})] - self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib) - - lib = [('name', {'sources': list()})] - cmd.build_libraries(lib) - - lib = [('name', {'sources': tuple()})] - cmd.build_libraries(lib) - - def test_finalize_options(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - cmd.include_dirs = 'one-dir' - cmd.finalize_options() - self.assertEqual(cmd.include_dirs, ['one-dir']) - - cmd.include_dirs = None - cmd.finalize_options() - self.assertEqual(cmd.include_dirs, []) - - cmd.distribution.libraries = 'WONTWORK' - self.assertRaises(DistutilsSetupError, cmd.finalize_options) - - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") - @requires_subprocess() - def test_run(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - foo_c = os.path.join(pkg_dir, 'foo.c') - self.write_file(foo_c, 'int main(void) { return 1;}\n') - cmd.libraries = [('foo', {'sources': [foo_c]})] - - build_temp = os.path.join(pkg_dir, 'build') - os.mkdir(build_temp) - cmd.build_temp = build_temp - cmd.build_clib = build_temp - - # Before we run the command, we want to make sure - # all commands are present on the system. - ccmd = missing_compiler_executable() - if ccmd is not None: - self.skipTest('The %r command is not found' % ccmd) - - # this should work - cmd.run() - - # let's check the result - self.assertIn('libfoo.a', os.listdir(build_temp)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildCLibTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_build_ext.py b/Lib/distutils/tests/test_build_ext.py deleted file mode 100644 index 4ebeafecef03c3..00000000000000 --- a/Lib/distutils/tests/test_build_ext.py +++ /dev/null @@ -1,555 +0,0 @@ -import sys -import os -from io import StringIO -import textwrap - -from distutils.core import Distribution -from distutils.command.build_ext import build_ext -from distutils import sysconfig -from distutils.tests.support import (TempdirManager, LoggingSilencer, - copy_xxmodule_c, fixup_build_ext) -from distutils.extension import Extension -from distutils.errors import ( - CompileError, DistutilsPlatformError, DistutilsSetupError, - UnknownFileError) - -import unittest -from test import support -from test.support import os_helper -from test.support.script_helper import assert_python_ok -from test.support import threading_helper - -# http://bugs.python.org/issue4373 -# Don't load the xx module more than once. -ALREADY_TESTED = False - - -class BuildExtTestCase(TempdirManager, - LoggingSilencer, - unittest.TestCase): - def setUp(self): - # Create a simple test environment - super(BuildExtTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - import site - self.old_user_base = site.USER_BASE - site.USER_BASE = self.mkdtemp() - from distutils.command import build_ext - build_ext.USER_BASE = site.USER_BASE - self.old_config_vars = dict(sysconfig._config_vars) - - # bpo-30132: On Windows, a .pdb file may be created in the current - # working directory. Create a temporary working directory to cleanup - # everything at the end of the test. - self.enterContext(os_helper.change_cwd(self.tmp_dir)) - - def tearDown(self): - import site - site.USER_BASE = self.old_user_base - from distutils.command import build_ext - build_ext.USER_BASE = self.old_user_base - sysconfig._config_vars.clear() - sysconfig._config_vars.update(self.old_config_vars) - super(BuildExtTestCase, self).tearDown() - - def build_ext(self, *args, **kwargs): - return build_ext(*args, **kwargs) - - @support.requires_subprocess() - def test_build_ext(self): - cmd = support.missing_compiler_executable() - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) - global ALREADY_TESTED - copy_xxmodule_c(self.tmp_dir) - xx_c = os.path.join(self.tmp_dir, 'xxmodule.c') - xx_ext = Extension('xx', [xx_c]) - dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]}) - dist.package_dir = self.tmp_dir - cmd = self.build_ext(dist) - fixup_build_ext(cmd) - cmd.build_lib = self.tmp_dir - cmd.build_temp = self.tmp_dir - - old_stdout = sys.stdout - if not support.verbose: - # silence compiler output - sys.stdout = StringIO() - try: - cmd.ensure_finalized() - cmd.run() - finally: - sys.stdout = old_stdout - - if ALREADY_TESTED: - self.skipTest('Already tested in %s' % ALREADY_TESTED) - else: - ALREADY_TESTED = type(self).__name__ - - code = textwrap.dedent(f""" - tmp_dir = {self.tmp_dir!r} - - import sys - import unittest - from test import support - - sys.path.insert(0, tmp_dir) - import xx - - class Tests(unittest.TestCase): - def test_xx(self): - for attr in ('error', 'foo', 'new', 'roj'): - self.assertTrue(hasattr(xx, attr)) - - self.assertEqual(xx.foo(2, 5), 7) - self.assertEqual(xx.foo(13,15), 28) - self.assertEqual(xx.new().demo(), None) - if support.HAVE_DOCSTRINGS: - doc = 'This is a template module just for instruction.' - self.assertEqual(xx.__doc__, doc) - self.assertIsInstance(xx.Null(), xx.Null) - self.assertIsInstance(xx.Str(), xx.Str) - - - unittest.main() - """) - assert_python_ok('-c', code) - - def test_solaris_enable_shared(self): - dist = Distribution({'name': 'xx'}) - cmd = self.build_ext(dist) - old = sys.platform - - sys.platform = 'sunos' # fooling finalize_options - from distutils.sysconfig import _config_vars - old_var = _config_vars.get('Py_ENABLE_SHARED') - _config_vars['Py_ENABLE_SHARED'] = 1 - try: - cmd.ensure_finalized() - finally: - sys.platform = old - if old_var is None: - del _config_vars['Py_ENABLE_SHARED'] - else: - _config_vars['Py_ENABLE_SHARED'] = old_var - - # make sure we get some library dirs under solaris - self.assertGreater(len(cmd.library_dirs), 0) - - def test_user_site(self): - import site - dist = Distribution({'name': 'xx'}) - cmd = self.build_ext(dist) - - # making sure the user option is there - options = [name for name, short, lable in - cmd.user_options] - self.assertIn('user', options) - - # setting a value - cmd.user = 1 - - # setting user based lib and include - lib = os.path.join(site.USER_BASE, 'lib') - incl = os.path.join(site.USER_BASE, 'include') - os.mkdir(lib) - os.mkdir(incl) - - # let's run finalize - cmd.ensure_finalized() - - # see if include_dirs and library_dirs - # were set - self.assertIn(lib, cmd.library_dirs) - self.assertIn(lib, cmd.rpath) - self.assertIn(incl, cmd.include_dirs) - - @threading_helper.requires_working_threading() - def test_optional_extension(self): - - # this extension will fail, but let's ignore this failure - # with the optional argument. - modules = [Extension('foo', ['xxx'], optional=False)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.ensure_finalized() - self.assertRaises((UnknownFileError, CompileError), - cmd.run) # should raise an error - - modules = [Extension('foo', ['xxx'], optional=True)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.ensure_finalized() - cmd.run() # should pass - - def test_finalize_options(self): - # Make sure Python's include directories (for Python.h, pyconfig.h, - # etc.) are in the include search path. - modules = [Extension('foo', ['xxx'], optional=False)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.finalize_options() - - py_include = sysconfig.get_python_inc() - for p in py_include.split(os.path.pathsep): - self.assertIn(p, cmd.include_dirs) - - plat_py_include = sysconfig.get_python_inc(plat_specific=1) - for p in plat_py_include.split(os.path.pathsep): - self.assertIn(p, cmd.include_dirs) - - # make sure cmd.libraries is turned into a list - # if it's a string - cmd = self.build_ext(dist) - cmd.libraries = 'my_lib, other_lib lastlib' - cmd.finalize_options() - self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib']) - - # make sure cmd.library_dirs is turned into a list - # if it's a string - cmd = self.build_ext(dist) - cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep - cmd.finalize_options() - self.assertIn('my_lib_dir', cmd.library_dirs) - self.assertIn('other_lib_dir', cmd.library_dirs) - - # make sure rpath is turned into a list - # if it's a string - cmd = self.build_ext(dist) - cmd.rpath = 'one%stwo' % os.pathsep - cmd.finalize_options() - self.assertEqual(cmd.rpath, ['one', 'two']) - - # make sure cmd.link_objects is turned into a list - # if it's a string - cmd = build_ext(dist) - cmd.link_objects = 'one two,three' - cmd.finalize_options() - self.assertEqual(cmd.link_objects, ['one', 'two', 'three']) - - # XXX more tests to perform for win32 - - # make sure define is turned into 2-tuples - # strings if they are ','-separated strings - cmd = self.build_ext(dist) - cmd.define = 'one,two' - cmd.finalize_options() - self.assertEqual(cmd.define, [('one', '1'), ('two', '1')]) - - # make sure undef is turned into a list of - # strings if they are ','-separated strings - cmd = self.build_ext(dist) - cmd.undef = 'one,two' - cmd.finalize_options() - self.assertEqual(cmd.undef, ['one', 'two']) - - # make sure swig_opts is turned into a list - cmd = self.build_ext(dist) - cmd.swig_opts = None - cmd.finalize_options() - self.assertEqual(cmd.swig_opts, []) - - cmd = self.build_ext(dist) - cmd.swig_opts = '1 2' - cmd.finalize_options() - self.assertEqual(cmd.swig_opts, ['1', '2']) - - def test_check_extensions_list(self): - dist = Distribution() - cmd = self.build_ext(dist) - cmd.finalize_options() - - #'extensions' option must be a list of Extension instances - self.assertRaises(DistutilsSetupError, - cmd.check_extensions_list, 'foo') - - # each element of 'ext_modules' option must be an - # Extension instance or 2-tuple - exts = [('bar', 'foo', 'bar'), 'foo'] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) - - # first element of each tuple in 'ext_modules' - # must be the extension name (a string) and match - # a python dotted-separated name - exts = [('foo-bar', '')] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) - - # second element of each tuple in 'ext_modules' - # must be a dictionary (build info) - exts = [('foo.bar', '')] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) - - # ok this one should pass - exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', - 'some': 'bar'})] - cmd.check_extensions_list(exts) - ext = exts[0] - self.assertIsInstance(ext, Extension) - - # check_extensions_list adds in ext the values passed - # when they are in ('include_dirs', 'library_dirs', 'libraries' - # 'extra_objects', 'extra_compile_args', 'extra_link_args') - self.assertEqual(ext.libraries, 'foo') - self.assertFalse(hasattr(ext, 'some')) - - # 'macros' element of build info dict must be 1- or 2-tuple - exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', - 'some': 'bar', 'macros': [('1', '2', '3'), 'foo']})] - self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) - - exts[0][1]['macros'] = [('1', '2'), ('3',)] - cmd.check_extensions_list(exts) - self.assertEqual(exts[0].undef_macros, ['3']) - self.assertEqual(exts[0].define_macros, [('1', '2')]) - - def test_get_source_files(self): - modules = [Extension('foo', ['xxx'], optional=False)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.ensure_finalized() - self.assertEqual(cmd.get_source_files(), ['xxx']) - - def test_unicode_module_names(self): - modules = [ - Extension('foo', ['aaa'], optional=False), - Extension('föö', ['uuu'], optional=False), - ] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = self.build_ext(dist) - cmd.ensure_finalized() - self.assertRegex(cmd.get_ext_filename(modules[0].name), r'foo(_d)?\..*') - self.assertRegex(cmd.get_ext_filename(modules[1].name), r'föö(_d)?\..*') - self.assertEqual(cmd.get_export_symbols(modules[0]), ['PyInit_foo']) - self.assertEqual(cmd.get_export_symbols(modules[1]), ['PyInitU_f_gkaa']) - - def test_compiler_option(self): - # cmd.compiler is an option and - # should not be overridden by a compiler instance - # when the command is run - dist = Distribution() - cmd = self.build_ext(dist) - cmd.compiler = 'unix' - cmd.ensure_finalized() - cmd.run() - self.assertEqual(cmd.compiler, 'unix') - - @support.requires_subprocess() - def test_get_outputs(self): - cmd = support.missing_compiler_executable() - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) - tmp_dir = self.mkdtemp() - c_file = os.path.join(tmp_dir, 'foo.c') - self.write_file(c_file, 'void PyInit_foo(void) {}\n') - ext = Extension('foo', [c_file], optional=False) - dist = Distribution({'name': 'xx', - 'ext_modules': [ext]}) - cmd = self.build_ext(dist) - fixup_build_ext(cmd) - cmd.ensure_finalized() - self.assertEqual(len(cmd.get_outputs()), 1) - - cmd.build_lib = os.path.join(self.tmp_dir, 'build') - cmd.build_temp = os.path.join(self.tmp_dir, 'tempt') - - # issue #5977 : distutils build_ext.get_outputs - # returns wrong result with --inplace - other_tmp_dir = os.path.realpath(self.mkdtemp()) - old_wd = os.getcwd() - os.chdir(other_tmp_dir) - try: - cmd.inplace = 1 - cmd.run() - so_file = cmd.get_outputs()[0] - finally: - os.chdir(old_wd) - self.assertTrue(os.path.exists(so_file)) - ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') - self.assertTrue(so_file.endswith(ext_suffix)) - so_dir = os.path.dirname(so_file) - self.assertEqual(so_dir, other_tmp_dir) - - cmd.inplace = 0 - cmd.compiler = None - cmd.run() - so_file = cmd.get_outputs()[0] - self.assertTrue(os.path.exists(so_file)) - self.assertTrue(so_file.endswith(ext_suffix)) - so_dir = os.path.dirname(so_file) - self.assertEqual(so_dir, cmd.build_lib) - - # inplace = 0, cmd.package = 'bar' - build_py = cmd.get_finalized_command('build_py') - build_py.package_dir = {'': 'bar'} - path = cmd.get_ext_fullpath('foo') - # checking that the last directory is the build_dir - path = os.path.split(path)[0] - self.assertEqual(path, cmd.build_lib) - - # inplace = 1, cmd.package = 'bar' - cmd.inplace = 1 - other_tmp_dir = os.path.realpath(self.mkdtemp()) - old_wd = os.getcwd() - os.chdir(other_tmp_dir) - try: - path = cmd.get_ext_fullpath('foo') - finally: - os.chdir(old_wd) - # checking that the last directory is bar - path = os.path.split(path)[0] - lastdir = os.path.split(path)[-1] - self.assertEqual(lastdir, 'bar') - - def test_ext_fullpath(self): - ext = sysconfig.get_config_var('EXT_SUFFIX') - # building lxml.etree inplace - #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c') - #etree_ext = Extension('lxml.etree', [etree_c]) - #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) - dist = Distribution() - cmd = self.build_ext(dist) - cmd.inplace = 1 - cmd.distribution.package_dir = {'': 'src'} - cmd.distribution.packages = ['lxml', 'lxml.html'] - curdir = os.getcwd() - wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) - path = cmd.get_ext_fullpath('lxml.etree') - self.assertEqual(wanted, path) - - # building lxml.etree not inplace - cmd.inplace = 0 - cmd.build_lib = os.path.join(curdir, 'tmpdir') - wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext) - path = cmd.get_ext_fullpath('lxml.etree') - self.assertEqual(wanted, path) - - # building twisted.runner.portmap not inplace - build_py = cmd.get_finalized_command('build_py') - build_py.package_dir = {} - cmd.distribution.packages = ['twisted', 'twisted.runner.portmap'] - path = cmd.get_ext_fullpath('twisted.runner.portmap') - wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', - 'portmap' + ext) - self.assertEqual(wanted, path) - - # building twisted.runner.portmap inplace - cmd.inplace = 1 - path = cmd.get_ext_fullpath('twisted.runner.portmap') - wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) - self.assertEqual(wanted, path) - - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') - def test_deployment_target_default(self): - # Issue 9516: Test that, in the absence of the environment variable, - # an extension module is compiled with the same deployment target as - # the interpreter. - self._try_compile_deployment_target('==', None) - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') - def test_deployment_target_too_low(self): - # Issue 9516: Test that an extension module is not allowed to be - # compiled with a deployment target less than that of the interpreter. - self.assertRaises(DistutilsPlatformError, - self._try_compile_deployment_target, '>', '10.1') - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') - def test_deployment_target_higher_ok(self): - # Issue 9516: Test that an extension module can be compiled with a - # deployment target higher than that of the interpreter: the ext - # module may depend on some newer OS feature. - deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if deptarget: - # increment the minor version number (i.e. 10.6 -> 10.7) - deptarget = [int(x) for x in deptarget.split('.')] - deptarget[-1] += 1 - deptarget = '.'.join(str(i) for i in deptarget) - self._try_compile_deployment_target('<', deptarget) - - def _try_compile_deployment_target(self, operator, target): - orig_environ = os.environ - os.environ = orig_environ.copy() - self.addCleanup(setattr, os, 'environ', orig_environ) - - if target is None: - if os.environ.get('MACOSX_DEPLOYMENT_TARGET'): - del os.environ['MACOSX_DEPLOYMENT_TARGET'] - else: - os.environ['MACOSX_DEPLOYMENT_TARGET'] = target - - deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c') - - with open(deptarget_c, 'w') as fp: - fp.write(textwrap.dedent('''\ - #include - - int dummy; - - #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED - #else - #error "Unexpected target" - #endif - - ''' % operator)) - - # get the deployment target that the interpreter was built with - target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - target = tuple(map(int, target.split('.')[0:2])) - # format the target value as defined in the Apple - # Availability Macros. We can't use the macro names since - # at least one value we test with will not exist yet. - if target[:2] < (10, 10): - # for 10.1 through 10.9.x -> "10n0" - target = '%02d%01d0' % target - else: - # for 10.10 and beyond -> "10nn00" - if len(target) >= 2: - target = '%02d%02d00' % target - else: - # 11 and later can have no minor version (11 instead of 11.0) - target = '%02d0000' % target - deptarget_ext = Extension( - 'deptarget', - [deptarget_c], - extra_compile_args=['-DTARGET=%s'%(target,)], - ) - dist = Distribution({ - 'name': 'deptarget', - 'ext_modules': [deptarget_ext] - }) - dist.package_dir = self.tmp_dir - cmd = self.build_ext(dist) - cmd.build_lib = self.tmp_dir - cmd.build_temp = self.tmp_dir - - try: - old_stdout = sys.stdout - if not support.verbose: - # silence compiler output - sys.stdout = StringIO() - try: - cmd.ensure_finalized() - cmd.run() - finally: - sys.stdout = old_stdout - - except CompileError: - self.fail("Wrong deployment target during compilation") - - -class ParallelBuildExtTestCase(BuildExtTestCase): - - def build_ext(self, *args, **kwargs): - build_ext = super().build_ext(*args, **kwargs) - build_ext.parallel = True - return build_ext - - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(BuildExtTestCase)) - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(ParallelBuildExtTestCase)) - return suite - -if __name__ == '__main__': - support.run_unittest(__name__) diff --git a/Lib/distutils/tests/test_build_py.py b/Lib/distutils/tests/test_build_py.py deleted file mode 100644 index 44a06cc963aa3c..00000000000000 --- a/Lib/distutils/tests/test_build_py.py +++ /dev/null @@ -1,181 +0,0 @@ -"""Tests for distutils.command.build_py.""" - -import os -import sys -import unittest - -from distutils.command.build_py import build_py -from distutils.core import Distribution -from distutils.errors import DistutilsFileError - -from distutils.tests import support -from test.support import run_unittest, requires_subprocess - - -class BuildPyTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_package_data(self): - sources = self.mkdtemp() - f = open(os.path.join(sources, "__init__.py"), "w") - try: - f.write("# Pretend this is a package.") - finally: - f.close() - f = open(os.path.join(sources, "README.txt"), "w") - try: - f.write("Info about this package") - finally: - f.close() - - destination = self.mkdtemp() - - dist = Distribution({"packages": ["pkg"], - "package_dir": {"pkg": sources}}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(sources, "setup.py") - dist.command_obj["build"] = support.DummyCommand( - force=0, - build_lib=destination) - dist.packages = ["pkg"] - dist.package_data = {"pkg": ["README.txt"]} - dist.package_dir = {"pkg": sources} - - cmd = build_py(dist) - cmd.compile = 1 - cmd.ensure_finalized() - self.assertEqual(cmd.package_data, dist.package_data) - - cmd.run() - - # This makes sure the list of outputs includes byte-compiled - # files for Python modules but not for package data files - # (there shouldn't *be* byte-code files for those!). - self.assertEqual(len(cmd.get_outputs()), 3) - pkgdest = os.path.join(destination, "pkg") - files = os.listdir(pkgdest) - pycache_dir = os.path.join(pkgdest, "__pycache__") - self.assertIn("__init__.py", files) - self.assertIn("README.txt", files) - if sys.dont_write_bytecode: - self.assertFalse(os.path.exists(pycache_dir)) - else: - pyc_files = os.listdir(pycache_dir) - self.assertIn("__init__.%s.pyc" % sys.implementation.cache_tag, - pyc_files) - - def test_empty_package_dir(self): - # See bugs #1668596/#1720897 - sources = self.mkdtemp() - open(os.path.join(sources, "__init__.py"), "w").close() - - testdir = os.path.join(sources, "doc") - os.mkdir(testdir) - open(os.path.join(testdir, "testfile"), "w").close() - - os.chdir(sources) - dist = Distribution({"packages": ["pkg"], - "package_dir": {"pkg": ""}, - "package_data": {"pkg": ["doc/*"]}}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(sources, "setup.py") - dist.script_args = ["build"] - dist.parse_command_line() - - try: - dist.run_commands() - except DistutilsFileError: - self.fail("failed package_data test when package_dir is ''") - - @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') - @requires_subprocess() - def test_byte_compile(self): - project_dir, dist = self.create_dist(py_modules=['boiledeggs']) - os.chdir(project_dir) - self.write_file('boiledeggs.py', 'import antigravity') - cmd = build_py(dist) - cmd.compile = 1 - cmd.build_lib = 'here' - cmd.finalize_options() - cmd.run() - - found = os.listdir(cmd.build_lib) - self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) - found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - self.assertEqual(found, - ['boiledeggs.%s.pyc' % sys.implementation.cache_tag]) - - @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') - @requires_subprocess() - def test_byte_compile_optimized(self): - project_dir, dist = self.create_dist(py_modules=['boiledeggs']) - os.chdir(project_dir) - self.write_file('boiledeggs.py', 'import antigravity') - cmd = build_py(dist) - cmd.compile = 0 - cmd.optimize = 1 - cmd.build_lib = 'here' - cmd.finalize_options() - cmd.run() - - found = os.listdir(cmd.build_lib) - self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) - found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - expect = 'boiledeggs.{}.opt-1.pyc'.format(sys.implementation.cache_tag) - self.assertEqual(sorted(found), [expect]) - - def test_dir_in_package_data(self): - """ - A directory in package_data should not be added to the filelist. - """ - # See bug 19286 - sources = self.mkdtemp() - pkg_dir = os.path.join(sources, "pkg") - - os.mkdir(pkg_dir) - open(os.path.join(pkg_dir, "__init__.py"), "w").close() - - docdir = os.path.join(pkg_dir, "doc") - os.mkdir(docdir) - open(os.path.join(docdir, "testfile"), "w").close() - - # create the directory that could be incorrectly detected as a file - os.mkdir(os.path.join(docdir, 'otherdir')) - - os.chdir(sources) - dist = Distribution({"packages": ["pkg"], - "package_data": {"pkg": ["doc/*"]}}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(sources, "setup.py") - dist.script_args = ["build"] - dist.parse_command_line() - - try: - dist.run_commands() - except DistutilsFileError: - self.fail("failed package_data when data dir includes a dir") - - def test_dont_write_bytecode(self): - # makes sure byte_compile is not used - dist = self.create_dist()[1] - cmd = build_py(dist) - cmd.compile = 1 - cmd.optimize = 1 - - old_dont_write_bytecode = sys.dont_write_bytecode - sys.dont_write_bytecode = True - try: - cmd.byte_compile([]) - finally: - sys.dont_write_bytecode = old_dont_write_bytecode - - self.assertIn('byte-compiling is disabled', - self.logs[0][1] % self.logs[0][2]) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildPyTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_build_scripts.py b/Lib/distutils/tests/test_build_scripts.py deleted file mode 100644 index f299e51ef79fac..00000000000000 --- a/Lib/distutils/tests/test_build_scripts.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Tests for distutils.command.build_scripts.""" - -import os -import unittest - -from distutils.command.build_scripts import build_scripts -from distutils.core import Distribution -from distutils import sysconfig - -from distutils.tests import support -from test.support import run_unittest - - -class BuildScriptsTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_default_settings(self): - cmd = self.get_build_scripts_cmd("/foo/bar", []) - self.assertFalse(cmd.force) - self.assertIsNone(cmd.build_dir) - - cmd.finalize_options() - - self.assertTrue(cmd.force) - self.assertEqual(cmd.build_dir, "/foo/bar") - - def test_build(self): - source = self.mkdtemp() - target = self.mkdtemp() - expected = self.write_sample_scripts(source) - - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) - cmd.finalize_options() - cmd.run() - - built = os.listdir(target) - for name in expected: - self.assertIn(name, built) - - def get_build_scripts_cmd(self, target, scripts): - import sys - dist = Distribution() - dist.scripts = scripts - dist.command_obj["build"] = support.DummyCommand( - build_scripts=target, - force=1, - executable=sys.executable - ) - return build_scripts(dist) - - def write_sample_scripts(self, dir): - expected = [] - expected.append("script1.py") - self.write_script(dir, "script1.py", - ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - expected.append("script2.py") - self.write_script(dir, "script2.py", - ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - expected.append("shell.sh") - self.write_script(dir, "shell.sh", - ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) - return expected - - def write_script(self, dir, name, text): - f = open(os.path.join(dir, name), "w") - try: - f.write(text) - finally: - f.close() - - def test_version_int(self): - source = self.mkdtemp() - target = self.mkdtemp() - expected = self.write_sample_scripts(source) - - - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) - cmd.finalize_options() - - # http://bugs.python.org/issue4524 - # - # On linux-g++-32 with command line `./configure --enable-ipv6 - # --with-suffix=3`, python is compiled okay but the build scripts - # failed when writing the name of the executable - old = sysconfig.get_config_vars().get('VERSION') - sysconfig._config_vars['VERSION'] = 4 - try: - cmd.run() - finally: - if old is not None: - sysconfig._config_vars['VERSION'] = old - - built = os.listdir(target) - for name in expected: - self.assertIn(name, built) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(BuildScriptsTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_check.py b/Lib/distutils/tests/test_check.py deleted file mode 100644 index 91bcdceb43bc69..00000000000000 --- a/Lib/distutils/tests/test_check.py +++ /dev/null @@ -1,163 +0,0 @@ -"""Tests for distutils.command.check.""" -import os -import textwrap -import unittest -from test.support import run_unittest - -from distutils.command.check import check, HAS_DOCUTILS -from distutils.tests import support -from distutils.errors import DistutilsSetupError - -try: - import pygments -except ImportError: - pygments = None - - -HERE = os.path.dirname(__file__) - - -class CheckTestCase(support.LoggingSilencer, - support.TempdirManager, - unittest.TestCase): - - def _run(self, metadata=None, cwd=None, **options): - if metadata is None: - metadata = {} - if cwd is not None: - old_dir = os.getcwd() - os.chdir(cwd) - pkg_info, dist = self.create_dist(**metadata) - cmd = check(dist) - cmd.initialize_options() - for name, value in options.items(): - setattr(cmd, name, value) - cmd.ensure_finalized() - cmd.run() - if cwd is not None: - os.chdir(old_dir) - return cmd - - def test_check_metadata(self): - # let's run the command with no metadata at all - # by default, check is checking the metadata - # should have some warnings - cmd = self._run() - self.assertEqual(cmd._warnings, 2) - - # now let's add the required fields - # and run it again, to make sure we don't get - # any warning anymore - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx'} - cmd = self._run(metadata) - self.assertEqual(cmd._warnings, 0) - - # now with the strict mode, we should - # get an error if there are missing metadata - self.assertRaises(DistutilsSetupError, self._run, {}, **{'strict': 1}) - - # and of course, no error when all metadata are present - cmd = self._run(metadata, strict=1) - self.assertEqual(cmd._warnings, 0) - - # now a test with non-ASCII characters - metadata = {'url': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': 'xxx', - 'description': 'Something about esszet \u00df', - 'long_description': 'More things about esszet \u00df'} - cmd = self._run(metadata) - self.assertEqual(cmd._warnings, 0) - - @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") - def test_check_document(self): - pkg_info, dist = self.create_dist() - cmd = check(dist) - - # let's see if it detects broken rest - broken_rest = 'title\n===\n\ntest' - msgs = cmd._check_rst_data(broken_rest) - self.assertEqual(len(msgs), 1) - - # and non-broken rest - rest = 'title\n=====\n\ntest' - msgs = cmd._check_rst_data(rest) - self.assertEqual(len(msgs), 0) - - @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") - def test_check_restructuredtext(self): - # let's see if it detects broken rest in long_description - broken_rest = 'title\n===\n\ntest' - pkg_info, dist = self.create_dist(long_description=broken_rest) - cmd = check(dist) - cmd.check_restructuredtext() - self.assertEqual(cmd._warnings, 1) - - # let's see if we have an error with strict=1 - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', - 'long_description': broken_rest} - self.assertRaises(DistutilsSetupError, self._run, metadata, - **{'strict': 1, 'restructuredtext': 1}) - - # and non-broken rest, including a non-ASCII character to test #12114 - metadata['long_description'] = 'title\n=====\n\ntest \u00df' - cmd = self._run(metadata, strict=1, restructuredtext=1) - self.assertEqual(cmd._warnings, 0) - - # check that includes work to test #31292 - metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst' - cmd = self._run(metadata, cwd=HERE, strict=1, restructuredtext=1) - self.assertEqual(cmd._warnings, 0) - - @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") - def test_check_restructuredtext_with_syntax_highlight(self): - # Don't fail if there is a `code` or `code-block` directive - - example_rst_docs = [] - example_rst_docs.append(textwrap.dedent("""\ - Here's some code: - - .. code:: python - - def foo(): - pass - """)) - example_rst_docs.append(textwrap.dedent("""\ - Here's some code: - - .. code-block:: python - - def foo(): - pass - """)) - - for rest_with_code in example_rst_docs: - pkg_info, dist = self.create_dist(long_description=rest_with_code) - cmd = check(dist) - cmd.check_restructuredtext() - msgs = cmd._check_rst_data(rest_with_code) - if pygments is not None: - self.assertEqual(len(msgs), 0) - else: - self.assertEqual(len(msgs), 1) - self.assertEqual( - str(msgs[0][1]), - 'Cannot analyze code. Pygments package not found.' - ) - - def test_check_all(self): - - metadata = {'url': 'xxx', 'author': 'xxx'} - self.assertRaises(DistutilsSetupError, self._run, - {}, **{'strict': 1, - 'restructuredtext': 1}) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CheckTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_clean.py b/Lib/distutils/tests/test_clean.py deleted file mode 100644 index 92367499cefc04..00000000000000 --- a/Lib/distutils/tests/test_clean.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Tests for distutils.command.clean.""" -import os -import unittest - -from distutils.command.clean import clean -from distutils.tests import support -from test.support import run_unittest - -class cleanTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_simple_run(self): - pkg_dir, dist = self.create_dist() - cmd = clean(dist) - - # let's add some elements clean should remove - dirs = [(d, os.path.join(pkg_dir, d)) - for d in ('build_temp', 'build_lib', 'bdist_base', - 'build_scripts', 'build_base')] - - for name, path in dirs: - os.mkdir(path) - setattr(cmd, name, path) - if name == 'build_base': - continue - for f in ('one', 'two', 'three'): - self.write_file(os.path.join(path, f)) - - # let's run the command - cmd.all = 1 - cmd.ensure_finalized() - cmd.run() - - # make sure the files where removed - for name, path in dirs: - self.assertFalse(os.path.exists(path), - '%s was not removed' % path) - - # let's run the command again (should spit warnings but succeed) - cmd.all = 1 - cmd.ensure_finalized() - cmd.run() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(cleanTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_cmd.py b/Lib/distutils/tests/test_cmd.py deleted file mode 100644 index 2319214a9e332b..00000000000000 --- a/Lib/distutils/tests/test_cmd.py +++ /dev/null @@ -1,126 +0,0 @@ -"""Tests for distutils.cmd.""" -import unittest -import os -from test.support import captured_stdout, run_unittest - -from distutils.cmd import Command -from distutils.dist import Distribution -from distutils.errors import DistutilsOptionError -from distutils import debug - -class MyCmd(Command): - def initialize_options(self): - pass - -class CommandTestCase(unittest.TestCase): - - def setUp(self): - dist = Distribution() - self.cmd = MyCmd(dist) - - def test_ensure_string_list(self): - - cmd = self.cmd - cmd.not_string_list = ['one', 2, 'three'] - cmd.yes_string_list = ['one', 'two', 'three'] - cmd.not_string_list2 = object() - cmd.yes_string_list2 = 'ok' - cmd.ensure_string_list('yes_string_list') - cmd.ensure_string_list('yes_string_list2') - - self.assertRaises(DistutilsOptionError, - cmd.ensure_string_list, 'not_string_list') - - self.assertRaises(DistutilsOptionError, - cmd.ensure_string_list, 'not_string_list2') - - cmd.option1 = 'ok,dok' - cmd.ensure_string_list('option1') - self.assertEqual(cmd.option1, ['ok', 'dok']) - - cmd.option2 = ['xxx', 'www'] - cmd.ensure_string_list('option2') - - cmd.option3 = ['ok', 2] - self.assertRaises(DistutilsOptionError, cmd.ensure_string_list, - 'option3') - - - def test_make_file(self): - - cmd = self.cmd - - # making sure it raises when infiles is not a string or a list/tuple - self.assertRaises(TypeError, cmd.make_file, - infiles=1, outfile='', func='func', args=()) - - # making sure execute gets called properly - def _execute(func, args, exec_msg, level): - self.assertEqual(exec_msg, 'generating out from in') - cmd.force = True - cmd.execute = _execute - cmd.make_file(infiles='in', outfile='out', func='func', args=()) - - def test_dump_options(self): - - msgs = [] - def _announce(msg, level): - msgs.append(msg) - cmd = self.cmd - cmd.announce = _announce - cmd.option1 = 1 - cmd.option2 = 1 - cmd.user_options = [('option1', '', ''), ('option2', '', '')] - cmd.dump_options() - - wanted = ["command options for 'MyCmd':", ' option1 = 1', - ' option2 = 1'] - self.assertEqual(msgs, wanted) - - def test_ensure_string(self): - cmd = self.cmd - cmd.option1 = 'ok' - cmd.ensure_string('option1') - - cmd.option2 = None - cmd.ensure_string('option2', 'xxx') - self.assertTrue(hasattr(cmd, 'option2')) - - cmd.option3 = 1 - self.assertRaises(DistutilsOptionError, cmd.ensure_string, 'option3') - - def test_ensure_filename(self): - cmd = self.cmd - cmd.option1 = __file__ - cmd.ensure_filename('option1') - cmd.option2 = 'xxx' - self.assertRaises(DistutilsOptionError, cmd.ensure_filename, 'option2') - - def test_ensure_dirname(self): - cmd = self.cmd - cmd.option1 = os.path.dirname(__file__) or os.curdir - cmd.ensure_dirname('option1') - cmd.option2 = 'xxx' - self.assertRaises(DistutilsOptionError, cmd.ensure_dirname, 'option2') - - def test_debug_print(self): - cmd = self.cmd - with captured_stdout() as stdout: - cmd.debug_print('xxx') - stdout.seek(0) - self.assertEqual(stdout.read(), '') - - debug.DEBUG = True - try: - with captured_stdout() as stdout: - cmd.debug_print('xxx') - stdout.seek(0) - self.assertEqual(stdout.read(), 'xxx\n') - finally: - debug.DEBUG = False - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CommandTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_config.py b/Lib/distutils/tests/test_config.py deleted file mode 100644 index 8ab70efb161cbd..00000000000000 --- a/Lib/distutils/tests/test_config.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Tests for distutils.pypirc.pypirc.""" -import os -import unittest - -from distutils.core import PyPIRCCommand -from distutils.core import Distribution -from distutils.log import set_threshold -from distutils.log import WARN - -from distutils.tests import support -from test.support import run_unittest - -PYPIRC = """\ -[distutils] - -index-servers = - server1 - server2 - server3 - -[server1] -username:me -password:secret - -[server2] -username:meagain -password: secret -realm:acme -repository:http://another.pypi/ - -[server3] -username:cbiggles -password:yh^%#rest-of-my-password -""" - -PYPIRC_OLD = """\ -[server-login] -username:tarek -password:secret -""" - -WANTED = """\ -[distutils] -index-servers = - pypi - -[pypi] -username:tarek -password:xxx -""" - - -class BasePyPIRCCommandTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - """Patches the environment.""" - super(BasePyPIRCCommandTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - os.environ['HOME'] = self.tmp_dir - os.environ['USERPROFILE'] = self.tmp_dir - self.rc = os.path.join(self.tmp_dir, '.pypirc') - self.dist = Distribution() - - class command(PyPIRCCommand): - def __init__(self, dist): - PyPIRCCommand.__init__(self, dist) - def initialize_options(self): - pass - finalize_options = initialize_options - - self._cmd = command - self.old_threshold = set_threshold(WARN) - - def tearDown(self): - """Removes the patch.""" - set_threshold(self.old_threshold) - super(BasePyPIRCCommandTestCase, self).tearDown() - - -class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase): - - def test_server_registration(self): - # This test makes sure PyPIRCCommand knows how to: - # 1. handle several sections in .pypirc - # 2. handle the old format - - # new format - self.write_file(self.rc, PYPIRC) - cmd = self._cmd(self.dist) - config = cmd._read_pypirc() - - config = list(sorted(config.items())) - waited = [('password', 'secret'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server1'), ('username', 'me')] - self.assertEqual(config, waited) - - # old format - self.write_file(self.rc, PYPIRC_OLD) - config = cmd._read_pypirc() - config = list(sorted(config.items())) - waited = [('password', 'secret'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server-login'), ('username', 'tarek')] - self.assertEqual(config, waited) - - def test_server_empty_registration(self): - cmd = self._cmd(self.dist) - rc = cmd._get_rc_file() - self.assertFalse(os.path.exists(rc)) - cmd._store_pypirc('tarek', 'xxx') - self.assertTrue(os.path.exists(rc)) - f = open(rc) - try: - content = f.read() - self.assertEqual(content, WANTED) - finally: - f.close() - - def test_config_interpolation(self): - # using the % character in .pypirc should not raise an error (#20120) - self.write_file(self.rc, PYPIRC) - cmd = self._cmd(self.dist) - cmd.repository = 'server3' - config = cmd._read_pypirc() - - config = list(sorted(config.items())) - waited = [('password', 'yh^%#rest-of-my-password'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server3'), ('username', 'cbiggles')] - self.assertEqual(config, waited) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(PyPIRCCommandTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_config_cmd.py b/Lib/distutils/tests/test_config_cmd.py deleted file mode 100644 index c79db68aae115d..00000000000000 --- a/Lib/distutils/tests/test_config_cmd.py +++ /dev/null @@ -1,103 +0,0 @@ -"""Tests for distutils.command.config.""" -import unittest -import os -import sys -import sysconfig -from test.support import ( - run_unittest, missing_compiler_executable, requires_subprocess -) - -from distutils.command.config import dump_file, config -from distutils.tests import support -from distutils import log - -class ConfigTestCase(support.LoggingSilencer, - support.TempdirManager, - unittest.TestCase): - - def _info(self, msg, *args): - for line in msg.splitlines(): - self._logs.append(line) - - def setUp(self): - super(ConfigTestCase, self).setUp() - self._logs = [] - self.old_log = log.info - log.info = self._info - self.old_config_vars = dict(sysconfig._CONFIG_VARS) - - def tearDown(self): - log.info = self.old_log - sysconfig._CONFIG_VARS.clear() - sysconfig._CONFIG_VARS.update(self.old_config_vars) - super(ConfigTestCase, self).tearDown() - - def test_dump_file(self): - this_file = os.path.splitext(__file__)[0] + '.py' - f = open(this_file) - try: - numlines = len(f.readlines()) - finally: - f.close() - - dump_file(this_file, 'I am the header') - self.assertEqual(len(self._logs), numlines+1) - - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") - @requires_subprocess() - def test_search_cpp(self): - cmd = missing_compiler_executable(['preprocessor']) - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) - pkg_dir, dist = self.create_dist() - cmd = config(dist) - cmd._check_compiler() - compiler = cmd.compiler - if sys.platform[:3] == "aix" and "xlc" in compiler.preprocessor[0].lower(): - self.skipTest('xlc: The -E option overrides the -P, -o, and -qsyntaxonly options') - - # simple pattern searches - match = cmd.search_cpp(pattern='xxx', body='/* xxx */') - self.assertEqual(match, 0) - - match = cmd.search_cpp(pattern='_configtest', body='/* xxx */') - self.assertEqual(match, 1) - - def test_finalize_options(self): - # finalize_options does a bit of transformation - # on options - pkg_dir, dist = self.create_dist() - cmd = config(dist) - cmd.include_dirs = 'one%stwo' % os.pathsep - cmd.libraries = 'one' - cmd.library_dirs = 'three%sfour' % os.pathsep - cmd.ensure_finalized() - - self.assertEqual(cmd.include_dirs, ['one', 'two']) - self.assertEqual(cmd.libraries, ['one']) - self.assertEqual(cmd.library_dirs, ['three', 'four']) - - def test_clean(self): - # _clean removes files - tmp_dir = self.mkdtemp() - f1 = os.path.join(tmp_dir, 'one') - f2 = os.path.join(tmp_dir, 'two') - - self.write_file(f1, 'xxx') - self.write_file(f2, 'xxx') - - for f in (f1, f2): - self.assertTrue(os.path.exists(f)) - - pkg_dir, dist = self.create_dist() - cmd = config(dist) - cmd._clean(f1, f2) - - for f in (f1, f2): - self.assertFalse(os.path.exists(f)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(ConfigTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_core.py b/Lib/distutils/tests/test_core.py deleted file mode 100644 index 700a22da045d45..00000000000000 --- a/Lib/distutils/tests/test_core.py +++ /dev/null @@ -1,140 +0,0 @@ -"""Tests for distutils.core.""" - -import io -import distutils.core -import os -import shutil -import sys -from test.support import captured_stdout, run_unittest -from test.support import os_helper -import unittest -from distutils.tests import support -from distutils import log - -# setup script that uses __file__ -setup_using___file__ = """\ - -__file__ - -from distutils.core import setup -setup() -""" - -setup_prints_cwd = """\ - -import os -print(os.getcwd()) - -from distutils.core import setup -setup() -""" - -setup_does_nothing = """\ -from distutils.core import setup -setup() -""" - - -setup_defines_subclass = """\ -from distutils.core import setup -from distutils.command.install import install as _install - -class install(_install): - sub_commands = _install.sub_commands + ['cmd'] - -setup(cmdclass={'install': install}) -""" - -class CoreTestCase(support.EnvironGuard, unittest.TestCase): - - def setUp(self): - super(CoreTestCase, self).setUp() - self.old_stdout = sys.stdout - self.cleanup_testfn() - self.old_argv = sys.argv, sys.argv[:] - self.addCleanup(log.set_threshold, log._global_log.threshold) - - def tearDown(self): - sys.stdout = self.old_stdout - self.cleanup_testfn() - sys.argv = self.old_argv[0] - sys.argv[:] = self.old_argv[1] - super(CoreTestCase, self).tearDown() - - def cleanup_testfn(self): - path = os_helper.TESTFN - if os.path.isfile(path): - os.remove(path) - elif os.path.isdir(path): - shutil.rmtree(path) - - def write_setup(self, text, path=os_helper.TESTFN): - f = open(path, "w") - try: - f.write(text) - finally: - f.close() - return path - - def test_run_setup_provides_file(self): - # Make sure the script can use __file__; if that's missing, the test - # setup.py script will raise NameError. - distutils.core.run_setup( - self.write_setup(setup_using___file__)) - - def test_run_setup_preserves_sys_argv(self): - # Make sure run_setup does not clobber sys.argv - argv_copy = sys.argv.copy() - distutils.core.run_setup( - self.write_setup(setup_does_nothing)) - self.assertEqual(sys.argv, argv_copy) - - def test_run_setup_defines_subclass(self): - # Make sure the script can use __file__; if that's missing, the test - # setup.py script will raise NameError. - dist = distutils.core.run_setup( - self.write_setup(setup_defines_subclass)) - install = dist.get_command_obj('install') - self.assertIn('cmd', install.sub_commands) - - def test_run_setup_uses_current_dir(self): - # This tests that the setup script is run with the current directory - # as its own current directory; this was temporarily broken by a - # previous patch when TESTFN did not use the current directory. - sys.stdout = io.StringIO() - cwd = os.getcwd() - - # Create a directory and write the setup.py file there: - os.mkdir(os_helper.TESTFN) - setup_py = os.path.join(os_helper.TESTFN, "setup.py") - distutils.core.run_setup( - self.write_setup(setup_prints_cwd, path=setup_py)) - - output = sys.stdout.getvalue() - if output.endswith("\n"): - output = output[:-1] - self.assertEqual(cwd, output) - - def test_debug_mode(self): - # this covers the code called when DEBUG is set - sys.argv = ['setup.py', '--name'] - with captured_stdout() as stdout: - distutils.core.setup(name='bar') - stdout.seek(0) - self.assertEqual(stdout.read(), 'bar\n') - - distutils.core.DEBUG = True - try: - with captured_stdout() as stdout: - distutils.core.setup(name='bar') - finally: - distutils.core.DEBUG = False - stdout.seek(0) - wanted = "options (after parsing config files):\n" - self.assertEqual(stdout.readlines()[0], wanted) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CoreTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_cygwinccompiler.py b/Lib/distutils/tests/test_cygwinccompiler.py deleted file mode 100644 index 0912ffd15c8ee9..00000000000000 --- a/Lib/distutils/tests/test_cygwinccompiler.py +++ /dev/null @@ -1,154 +0,0 @@ -"""Tests for distutils.cygwinccompiler.""" -import unittest -import sys -import os -from io import BytesIO -from test.support import run_unittest - -from distutils import cygwinccompiler -from distutils.cygwinccompiler import (check_config_h, - CONFIG_H_OK, CONFIG_H_NOTOK, - CONFIG_H_UNCERTAIN, get_versions, - get_msvcr) -from distutils.tests import support - -class FakePopen(object): - test_class = None - - def __init__(self, cmd, shell, stdout): - self.cmd = cmd.split()[0] - exes = self.test_class._exes - if self.cmd in exes: - # issue #6438 in Python 3.x, Popen returns bytes - self.stdout = BytesIO(exes[self.cmd]) - else: - self.stdout = os.popen(cmd, 'r') - - -class CygwinCCompilerTestCase(support.TempdirManager, - unittest.TestCase): - - def setUp(self): - super(CygwinCCompilerTestCase, self).setUp() - self.version = sys.version - self.python_h = os.path.join(self.mkdtemp(), 'python.h') - from distutils import sysconfig - self.old_get_config_h_filename = sysconfig.get_config_h_filename - sysconfig.get_config_h_filename = self._get_config_h_filename - self.old_find_executable = cygwinccompiler.find_executable - cygwinccompiler.find_executable = self._find_executable - self._exes = {} - self.old_popen = cygwinccompiler.Popen - FakePopen.test_class = self - cygwinccompiler.Popen = FakePopen - - def tearDown(self): - sys.version = self.version - from distutils import sysconfig - sysconfig.get_config_h_filename = self.old_get_config_h_filename - cygwinccompiler.find_executable = self.old_find_executable - cygwinccompiler.Popen = self.old_popen - super(CygwinCCompilerTestCase, self).tearDown() - - def _get_config_h_filename(self): - return self.python_h - - def _find_executable(self, name): - if name in self._exes: - return name - return None - - def test_check_config_h(self): - - # check_config_h looks for "GCC" in sys.version first - # returns CONFIG_H_OK if found - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC ' - '4.0.1 (Apple Computer, Inc. build 5370)]') - - self.assertEqual(check_config_h()[0], CONFIG_H_OK) - - # then it tries to see if it can find "__GNUC__" in pyconfig.h - sys.version = 'something without the *CC word' - - # if the file doesn't exist it returns CONFIG_H_UNCERTAIN - self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN) - - # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK - self.write_file(self.python_h, 'xxx') - self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK) - - # and CONFIG_H_OK if __GNUC__ is found - self.write_file(self.python_h, 'xxx __GNUC__ xxx') - self.assertEqual(check_config_h()[0], CONFIG_H_OK) - - def test_get_versions(self): - - # get_versions calls distutils.spawn.find_executable on - # 'gcc', 'ld' and 'dllwrap' - self.assertEqual(get_versions(), (None, None, None)) - - # Let's fake we have 'gcc' and it returns '3.4.5' - self._exes['gcc'] = b'gcc (GCC) 3.4.5 (mingw special)\nFSF' - res = get_versions() - self.assertEqual(str(res[0]), '3.4.5') - - # and let's see what happens when the version - # doesn't match the regular expression - # (\d+\.\d+(\.\d+)*) - self._exes['gcc'] = b'very strange output' - res = get_versions() - self.assertEqual(res[0], None) - - # same thing for ld - self._exes['ld'] = b'GNU ld version 2.17.50 20060824' - res = get_versions() - self.assertEqual(str(res[1]), '2.17.50') - self._exes['ld'] = b'@(#)PROGRAM:ld PROJECT:ld64-77' - res = get_versions() - self.assertEqual(res[1], None) - - # and dllwrap - self._exes['dllwrap'] = b'GNU dllwrap 2.17.50 20060824\nFSF' - res = get_versions() - self.assertEqual(str(res[2]), '2.17.50') - self._exes['dllwrap'] = b'Cheese Wrap' - res = get_versions() - self.assertEqual(res[2], None) - - def test_get_msvcr(self): - - # none - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' - '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]') - self.assertEqual(get_msvcr(), None) - - # MSVC 7.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1300 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr70']) - - # MSVC 7.1 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1310 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr71']) - - # VS2005 / MSVC 8.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1400 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr80']) - - # VS2008 / MSVC 9.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1500 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr90']) - - # unknown - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1999 32 bits (Intel)]') - self.assertRaises(ValueError, get_msvcr) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(CygwinCCompilerTestCase) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_dep_util.py b/Lib/distutils/tests/test_dep_util.py deleted file mode 100644 index 0d52740a9edda3..00000000000000 --- a/Lib/distutils/tests/test_dep_util.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Tests for distutils.dep_util.""" -import unittest -import os - -from distutils.dep_util import newer, newer_pairwise, newer_group -from distutils.errors import DistutilsFileError -from distutils.tests import support -from test.support import run_unittest - -class DepUtilTestCase(support.TempdirManager, unittest.TestCase): - - def test_newer(self): - - tmpdir = self.mkdtemp() - new_file = os.path.join(tmpdir, 'new') - old_file = os.path.abspath(__file__) - - # Raise DistutilsFileError if 'new_file' does not exist. - self.assertRaises(DistutilsFileError, newer, new_file, old_file) - - # Return true if 'new_file' exists and is more recently modified than - # 'old_file', or if 'new_file' exists and 'old_file' doesn't. - self.write_file(new_file) - self.assertTrue(newer(new_file, 'I_dont_exist')) - self.assertTrue(newer(new_file, old_file)) - - # Return false if both exist and 'old_file' is the same age or younger - # than 'new_file'. - self.assertFalse(newer(old_file, new_file)) - - def test_newer_pairwise(self): - tmpdir = self.mkdtemp() - sources = os.path.join(tmpdir, 'sources') - targets = os.path.join(tmpdir, 'targets') - os.mkdir(sources) - os.mkdir(targets) - one = os.path.join(sources, 'one') - two = os.path.join(sources, 'two') - three = os.path.abspath(__file__) # I am the old file - four = os.path.join(targets, 'four') - self.write_file(one) - self.write_file(two) - self.write_file(four) - - self.assertEqual(newer_pairwise([one, two], [three, four]), - ([one],[three])) - - def test_newer_group(self): - tmpdir = self.mkdtemp() - sources = os.path.join(tmpdir, 'sources') - os.mkdir(sources) - one = os.path.join(sources, 'one') - two = os.path.join(sources, 'two') - three = os.path.join(sources, 'three') - old_file = os.path.abspath(__file__) - - # return true if 'old_file' is out-of-date with respect to any file - # listed in 'sources'. - self.write_file(one) - self.write_file(two) - self.write_file(three) - self.assertTrue(newer_group([one, two, three], old_file)) - self.assertFalse(newer_group([one, two, old_file], three)) - - # missing handling - os.remove(one) - self.assertRaises(OSError, newer_group, [one, two, old_file], three) - - self.assertFalse(newer_group([one, two, old_file], three, - missing='ignore')) - - self.assertTrue(newer_group([one, two, old_file], three, - missing='newer')) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(DepUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_dir_util.py b/Lib/distutils/tests/test_dir_util.py deleted file mode 100644 index ebd89f320dca89..00000000000000 --- a/Lib/distutils/tests/test_dir_util.py +++ /dev/null @@ -1,143 +0,0 @@ -"""Tests for distutils.dir_util.""" -import unittest -import os -import stat -import sys -from unittest.mock import patch - -from distutils import dir_util, errors -from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree, - ensure_relative) - -from distutils import log -from distutils.tests import support -from test.support import run_unittest, is_emscripten, is_wasi - - -class DirUtilTestCase(support.TempdirManager, unittest.TestCase): - - def _log(self, msg, *args): - if len(args) > 0: - self._logs.append(msg % args) - else: - self._logs.append(msg) - - def setUp(self): - super(DirUtilTestCase, self).setUp() - self._logs = [] - tmp_dir = self.mkdtemp() - self.root_target = os.path.join(tmp_dir, 'deep') - self.target = os.path.join(self.root_target, 'here') - self.target2 = os.path.join(tmp_dir, 'deep2') - self.old_log = log.info - log.info = self._log - - def tearDown(self): - log.info = self.old_log - super(DirUtilTestCase, self).tearDown() - - def test_mkpath_remove_tree_verbosity(self): - - mkpath(self.target, verbose=0) - wanted = [] - self.assertEqual(self._logs, wanted) - remove_tree(self.root_target, verbose=0) - - mkpath(self.target, verbose=1) - wanted = ['creating %s' % self.root_target, - 'creating %s' % self.target] - self.assertEqual(self._logs, wanted) - self._logs = [] - - remove_tree(self.root_target, verbose=1) - wanted = ["removing '%s' (and everything under it)" % self.root_target] - self.assertEqual(self._logs, wanted) - - @unittest.skipIf(sys.platform.startswith('win'), - "This test is only appropriate for POSIX-like systems.") - @unittest.skipIf( - is_emscripten or is_wasi, - "Emscripten's/WASI's umask is a stub." - ) - def test_mkpath_with_custom_mode(self): - # Get and set the current umask value for testing mode bits. - umask = os.umask(0o002) - os.umask(umask) - mkpath(self.target, 0o700) - self.assertEqual( - stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask) - mkpath(self.target2, 0o555) - self.assertEqual( - stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask) - - def test_create_tree_verbosity(self): - - create_tree(self.root_target, ['one', 'two', 'three'], verbose=0) - self.assertEqual(self._logs, []) - remove_tree(self.root_target, verbose=0) - - wanted = ['creating %s' % self.root_target] - create_tree(self.root_target, ['one', 'two', 'three'], verbose=1) - self.assertEqual(self._logs, wanted) - - remove_tree(self.root_target, verbose=0) - - def test_copy_tree_verbosity(self): - - mkpath(self.target, verbose=0) - - copy_tree(self.target, self.target2, verbose=0) - self.assertEqual(self._logs, []) - - remove_tree(self.root_target, verbose=0) - - mkpath(self.target, verbose=0) - a_file = os.path.join(self.target, 'ok.txt') - with open(a_file, 'w') as f: - f.write('some content') - - wanted = ['copying %s -> %s' % (a_file, self.target2)] - copy_tree(self.target, self.target2, verbose=1) - self.assertEqual(self._logs, wanted) - - remove_tree(self.root_target, verbose=0) - remove_tree(self.target2, verbose=0) - - def test_copy_tree_skips_nfs_temp_files(self): - mkpath(self.target, verbose=0) - - a_file = os.path.join(self.target, 'ok.txt') - nfs_file = os.path.join(self.target, '.nfs123abc') - for f in a_file, nfs_file: - with open(f, 'w') as fh: - fh.write('some content') - - copy_tree(self.target, self.target2) - self.assertEqual(os.listdir(self.target2), ['ok.txt']) - - remove_tree(self.root_target, verbose=0) - remove_tree(self.target2, verbose=0) - - def test_ensure_relative(self): - if os.sep == '/': - self.assertEqual(ensure_relative('/home/foo'), 'home/foo') - self.assertEqual(ensure_relative('some/path'), 'some/path') - else: # \\ - self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo') - self.assertEqual(ensure_relative('home\\foo'), 'home\\foo') - - def test_copy_tree_exception_in_listdir(self): - """ - An exception in listdir should raise a DistutilsFileError - """ - with patch("os.listdir", side_effect=OSError()), \ - self.assertRaises(errors.DistutilsFileError): - src = self.tempdirs[-1] - dir_util.copy_tree(src, None) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(DirUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_dist.py b/Lib/distutils/tests/test_dist.py deleted file mode 100644 index 2ef70d987f36bb..00000000000000 --- a/Lib/distutils/tests/test_dist.py +++ /dev/null @@ -1,529 +0,0 @@ -"""Tests for distutils.dist.""" -import os -import io -import sys -import unittest -import warnings -import textwrap - -from unittest import mock - -from distutils.dist import Distribution, fix_help_options -from distutils.cmd import Command - -from test.support import ( - captured_stdout, captured_stderr, run_unittest -) -from test.support.os_helper import TESTFN -from distutils.tests import support -from distutils import log - - -class test_dist(Command): - """Sample distutils extension command.""" - - user_options = [ - ("sample-option=", "S", "help text"), - ] - - def initialize_options(self): - self.sample_option = None - - -class TestDistribution(Distribution): - """Distribution subclasses that avoids the default search for - configuration files. - - The ._config_files attribute must be set before - .parse_config_files() is called. - """ - - def find_config_files(self): - return self._config_files - - -class DistributionTestCase(support.LoggingSilencer, - support.TempdirManager, - support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - super(DistributionTestCase, self).setUp() - self.argv = sys.argv, sys.argv[:] - del sys.argv[1:] - - def tearDown(self): - sys.argv = self.argv[0] - sys.argv[:] = self.argv[1] - super(DistributionTestCase, self).tearDown() - - def create_distribution(self, configfiles=()): - d = TestDistribution() - d._config_files = configfiles - d.parse_config_files() - d.parse_command_line() - return d - - def test_command_packages_unspecified(self): - sys.argv.append("build") - d = self.create_distribution() - self.assertEqual(d.get_command_packages(), ["distutils.command"]) - - def test_command_packages_cmdline(self): - from distutils.tests.test_dist import test_dist - sys.argv.extend(["--command-packages", - "foo.bar,distutils.tests", - "test_dist", - "-Ssometext", - ]) - d = self.create_distribution() - # let's actually try to load our test command: - self.assertEqual(d.get_command_packages(), - ["distutils.command", "foo.bar", "distutils.tests"]) - cmd = d.get_command_obj("test_dist") - self.assertIsInstance(cmd, test_dist) - self.assertEqual(cmd.sample_option, "sometext") - - def test_venv_install_options(self): - sys.argv.append("install") - self.addCleanup(os.unlink, TESTFN) - - fakepath = '/somedir' - - with open(TESTFN, "w") as f: - print(("[install]\n" - "install-base = {0}\n" - "install-platbase = {0}\n" - "install-lib = {0}\n" - "install-platlib = {0}\n" - "install-purelib = {0}\n" - "install-headers = {0}\n" - "install-scripts = {0}\n" - "install-data = {0}\n" - "prefix = {0}\n" - "exec-prefix = {0}\n" - "home = {0}\n" - "user = {0}\n" - "root = {0}").format(fakepath), file=f) - - # Base case: Not in a Virtual Environment - with mock.patch.multiple(sys, prefix='/a', base_prefix='/a') as values: - d = self.create_distribution([TESTFN]) - - option_tuple = (TESTFN, fakepath) - - result_dict = { - 'install_base': option_tuple, - 'install_platbase': option_tuple, - 'install_lib': option_tuple, - 'install_platlib': option_tuple, - 'install_purelib': option_tuple, - 'install_headers': option_tuple, - 'install_scripts': option_tuple, - 'install_data': option_tuple, - 'prefix': option_tuple, - 'exec_prefix': option_tuple, - 'home': option_tuple, - 'user': option_tuple, - 'root': option_tuple, - } - - self.assertEqual( - sorted(d.command_options.get('install').keys()), - sorted(result_dict.keys())) - - for (key, value) in d.command_options.get('install').items(): - self.assertEqual(value, result_dict[key]) - - # Test case: In a Virtual Environment - with mock.patch.multiple(sys, prefix='/a', base_prefix='/b') as values: - d = self.create_distribution([TESTFN]) - - for key in result_dict.keys(): - self.assertNotIn(key, d.command_options.get('install', {})) - - def test_command_packages_configfile(self): - sys.argv.append("build") - self.addCleanup(os.unlink, TESTFN) - f = open(TESTFN, "w") - try: - print("[global]", file=f) - print("command_packages = foo.bar, splat", file=f) - finally: - f.close() - - d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), - ["distutils.command", "foo.bar", "splat"]) - - # ensure command line overrides config: - sys.argv[1:] = ["--command-packages", "spork", "build"] - d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), - ["distutils.command", "spork"]) - - # Setting --command-packages to '' should cause the default to - # be used even if a config file specified something else: - sys.argv[1:] = ["--command-packages", "", "build"] - d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), ["distutils.command"]) - - def test_empty_options(self): - # an empty options dictionary should not stay in the - # list of attributes - - # catching warnings - warns = [] - - def _warn(msg): - warns.append(msg) - - self.addCleanup(setattr, warnings, 'warn', warnings.warn) - warnings.warn = _warn - dist = Distribution(attrs={'author': 'xxx', 'name': 'xxx', - 'version': 'xxx', 'url': 'xxxx', - 'options': {}}) - - self.assertEqual(len(warns), 0) - self.assertNotIn('options', dir(dist)) - - def test_finalize_options(self): - attrs = {'keywords': 'one,two', - 'platforms': 'one,two'} - - dist = Distribution(attrs=attrs) - dist.finalize_options() - - # finalize_option splits platforms and keywords - self.assertEqual(dist.metadata.platforms, ['one', 'two']) - self.assertEqual(dist.metadata.keywords, ['one', 'two']) - - attrs = {'keywords': 'foo bar', - 'platforms': 'foo bar'} - dist = Distribution(attrs=attrs) - dist.finalize_options() - self.assertEqual(dist.metadata.platforms, ['foo bar']) - self.assertEqual(dist.metadata.keywords, ['foo bar']) - - def test_get_command_packages(self): - dist = Distribution() - self.assertEqual(dist.command_packages, None) - cmds = dist.get_command_packages() - self.assertEqual(cmds, ['distutils.command']) - self.assertEqual(dist.command_packages, - ['distutils.command']) - - dist.command_packages = 'one,two' - cmds = dist.get_command_packages() - self.assertEqual(cmds, ['distutils.command', 'one', 'two']) - - def test_announce(self): - # make sure the level is known - dist = Distribution() - args = ('ok',) - kwargs = {'level': 'ok2'} - self.assertRaises(ValueError, dist.announce, args, kwargs) - - - def test_find_config_files_disable(self): - # Ticket #1180: Allow user to disable their home config file. - temp_home = self.mkdtemp() - if os.name == 'posix': - user_filename = os.path.join(temp_home, ".pydistutils.cfg") - else: - user_filename = os.path.join(temp_home, "pydistutils.cfg") - - with open(user_filename, 'w') as f: - f.write('[distutils]\n') - - def _expander(path): - return temp_home - - old_expander = os.path.expanduser - os.path.expanduser = _expander - try: - d = Distribution() - all_files = d.find_config_files() - - d = Distribution(attrs={'script_args': ['--no-user-cfg']}) - files = d.find_config_files() - finally: - os.path.expanduser = old_expander - - # make sure --no-user-cfg disables the user cfg file - self.assertEqual(len(all_files)-1, len(files)) - -class MetadataTestCase(support.TempdirManager, support.EnvironGuard, - unittest.TestCase): - - def setUp(self): - super(MetadataTestCase, self).setUp() - self.argv = sys.argv, sys.argv[:] - - def tearDown(self): - sys.argv = self.argv[0] - sys.argv[:] = self.argv[1] - super(MetadataTestCase, self).tearDown() - - def format_metadata(self, dist): - sio = io.StringIO() - dist.metadata.write_pkg_file(sio) - return sio.getvalue() - - def test_simple_metadata(self): - attrs = {"name": "package", - "version": "1.0"} - dist = Distribution(attrs) - meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.0", meta) - self.assertNotIn("provides:", meta.lower()) - self.assertNotIn("requires:", meta.lower()) - self.assertNotIn("obsoletes:", meta.lower()) - - def test_provides(self): - attrs = {"name": "package", - "version": "1.0", - "provides": ["package", "package.sub"]} - dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_provides(), - ["package", "package.sub"]) - self.assertEqual(dist.get_provides(), - ["package", "package.sub"]) - meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.1", meta) - self.assertNotIn("requires:", meta.lower()) - self.assertNotIn("obsoletes:", meta.lower()) - - def test_provides_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "provides": ["my.pkg (splat)"]}) - - def test_requires(self): - attrs = {"name": "package", - "version": "1.0", - "requires": ["other", "another (==1.0)"]} - dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_requires(), - ["other", "another (==1.0)"]) - self.assertEqual(dist.get_requires(), - ["other", "another (==1.0)"]) - meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.1", meta) - self.assertNotIn("provides:", meta.lower()) - self.assertIn("Requires: other", meta) - self.assertIn("Requires: another (==1.0)", meta) - self.assertNotIn("obsoletes:", meta.lower()) - - def test_requires_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "requires": ["my.pkg (splat)"]}) - - def test_requires_to_list(self): - attrs = {"name": "package", - "requires": iter(["other"])} - dist = Distribution(attrs) - self.assertIsInstance(dist.metadata.requires, list) - - - def test_obsoletes(self): - attrs = {"name": "package", - "version": "1.0", - "obsoletes": ["other", "another (<1.0)"]} - dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_obsoletes(), - ["other", "another (<1.0)"]) - self.assertEqual(dist.get_obsoletes(), - ["other", "another (<1.0)"]) - meta = self.format_metadata(dist) - self.assertIn("Metadata-Version: 1.1", meta) - self.assertNotIn("provides:", meta.lower()) - self.assertNotIn("requires:", meta.lower()) - self.assertIn("Obsoletes: other", meta) - self.assertIn("Obsoletes: another (<1.0)", meta) - - def test_obsoletes_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "obsoletes": ["my.pkg (splat)"]}) - - def test_obsoletes_to_list(self): - attrs = {"name": "package", - "obsoletes": iter(["other"])} - dist = Distribution(attrs) - self.assertIsInstance(dist.metadata.obsoletes, list) - - def test_classifier(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'classifiers': ['Programming Language :: Python :: 3']} - dist = Distribution(attrs) - self.assertEqual(dist.get_classifiers(), - ['Programming Language :: Python :: 3']) - meta = self.format_metadata(dist) - self.assertIn('Metadata-Version: 1.1', meta) - - def test_classifier_invalid_type(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'classifiers': ('Programming Language :: Python :: 3',)} - with captured_stderr() as error: - d = Distribution(attrs) - # should have warning about passing a non-list - self.assertIn('should be a list', error.getvalue()) - # should be converted to a list - self.assertIsInstance(d.metadata.classifiers, list) - self.assertEqual(d.metadata.classifiers, - list(attrs['classifiers'])) - - def test_keywords(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'keywords': ['spam', 'eggs', 'life of brian']} - dist = Distribution(attrs) - self.assertEqual(dist.get_keywords(), - ['spam', 'eggs', 'life of brian']) - - def test_keywords_invalid_type(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'keywords': ('spam', 'eggs', 'life of brian')} - with captured_stderr() as error: - d = Distribution(attrs) - # should have warning about passing a non-list - self.assertIn('should be a list', error.getvalue()) - # should be converted to a list - self.assertIsInstance(d.metadata.keywords, list) - self.assertEqual(d.metadata.keywords, list(attrs['keywords'])) - - def test_platforms(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'platforms': ['GNU/Linux', 'Some Evil Platform']} - dist = Distribution(attrs) - self.assertEqual(dist.get_platforms(), - ['GNU/Linux', 'Some Evil Platform']) - - def test_platforms_invalid_types(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'platforms': ('GNU/Linux', 'Some Evil Platform')} - with captured_stderr() as error: - d = Distribution(attrs) - # should have warning about passing a non-list - self.assertIn('should be a list', error.getvalue()) - # should be converted to a list - self.assertIsInstance(d.metadata.platforms, list) - self.assertEqual(d.metadata.platforms, list(attrs['platforms'])) - - def test_download_url(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'download_url': 'http://example.org/boa'} - dist = Distribution(attrs) - meta = self.format_metadata(dist) - self.assertIn('Metadata-Version: 1.1', meta) - - def test_long_description(self): - long_desc = textwrap.dedent("""\ - example:: - We start here - and continue here - and end here.""") - attrs = {"name": "package", - "version": "1.0", - "long_description": long_desc} - - dist = Distribution(attrs) - meta = self.format_metadata(dist) - meta = meta.replace('\n' + 8 * ' ', '\n') - self.assertIn(long_desc, meta) - - def test_custom_pydistutils(self): - # fixes #2166 - # make sure pydistutils.cfg is found - if os.name == 'posix': - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - - temp_dir = self.mkdtemp() - user_filename = os.path.join(temp_dir, user_filename) - f = open(user_filename, 'w') - try: - f.write('.') - finally: - f.close() - - try: - dist = Distribution() - - # linux-style - if sys.platform in ('linux', 'darwin'): - os.environ['HOME'] = temp_dir - files = dist.find_config_files() - self.assertIn(user_filename, files) - - # win32-style - if sys.platform == 'win32': - # home drive should be found - os.environ['USERPROFILE'] = temp_dir - files = dist.find_config_files() - self.assertIn(user_filename, files, - '%r not found in %r' % (user_filename, files)) - finally: - os.remove(user_filename) - - def test_fix_help_options(self): - help_tuples = [('a', 'b', 'c', 'd'), (1, 2, 3, 4)] - fancy_options = fix_help_options(help_tuples) - self.assertEqual(fancy_options[0], ('a', 'b', 'c')) - self.assertEqual(fancy_options[1], (1, 2, 3)) - - def test_show_help(self): - # smoke test, just makes sure some help is displayed - self.addCleanup(log.set_threshold, log._global_log.threshold) - dist = Distribution() - sys.argv = [] - dist.help = 1 - dist.script_name = 'setup.py' - with captured_stdout() as s: - dist.parse_command_line() - - output = [line for line in s.getvalue().split('\n') - if line.strip() != ''] - self.assertTrue(output) - - - def test_read_metadata(self): - attrs = {"name": "package", - "version": "1.0", - "long_description": "desc", - "description": "xxx", - "download_url": "http://example.com", - "keywords": ['one', 'two'], - "requires": ['foo']} - - dist = Distribution(attrs) - metadata = dist.metadata - - # write it then reloads it - PKG_INFO = io.StringIO() - metadata.write_pkg_file(PKG_INFO) - PKG_INFO.seek(0) - metadata.read_pkg_file(PKG_INFO) - - self.assertEqual(metadata.name, "package") - self.assertEqual(metadata.version, "1.0") - self.assertEqual(metadata.description, "xxx") - self.assertEqual(metadata.download_url, 'http://example.com') - self.assertEqual(metadata.keywords, ['one', 'two']) - self.assertEqual(metadata.platforms, ['UNKNOWN']) - self.assertEqual(metadata.obsoletes, None) - self.assertEqual(metadata.requires, ['foo']) - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DistributionTestCase)) - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(MetadataTestCase)) - return suite - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_extension.py b/Lib/distutils/tests/test_extension.py deleted file mode 100644 index 2b08930eafb10a..00000000000000 --- a/Lib/distutils/tests/test_extension.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Tests for distutils.extension.""" -import unittest -import os -import warnings - -from test.support import run_unittest -from test.support.warnings_helper import check_warnings -from distutils.extension import read_setup_file, Extension - -class ExtensionTestCase(unittest.TestCase): - - def test_read_setup_file(self): - # trying to read a Setup file - # (sample extracted from the PyGame project) - setup = os.path.join(os.path.dirname(__file__), 'Setup.sample') - - exts = read_setup_file(setup) - names = [ext.name for ext in exts] - names.sort() - - # here are the extensions read_setup_file should have created - # out of the file - wanted = ['_arraysurfarray', '_camera', '_numericsndarray', - '_numericsurfarray', 'base', 'bufferproxy', 'cdrom', - 'color', 'constants', 'display', 'draw', 'event', - 'fastevent', 'font', 'gfxdraw', 'image', 'imageext', - 'joystick', 'key', 'mask', 'mixer', 'mixer_music', - 'mouse', 'movie', 'overlay', 'pixelarray', 'pypm', - 'rect', 'rwobject', 'scrap', 'surface', 'surflock', - 'time', 'transform'] - - self.assertEqual(names, wanted) - - def test_extension_init(self): - # the first argument, which is the name, must be a string - self.assertRaises(AssertionError, Extension, 1, []) - ext = Extension('name', []) - self.assertEqual(ext.name, 'name') - - # the second argument, which is the list of files, must - # be a list of strings - self.assertRaises(AssertionError, Extension, 'name', 'file') - self.assertRaises(AssertionError, Extension, 'name', ['file', 1]) - ext = Extension('name', ['file1', 'file2']) - self.assertEqual(ext.sources, ['file1', 'file2']) - - # others arguments have defaults - for attr in ('include_dirs', 'define_macros', 'undef_macros', - 'library_dirs', 'libraries', 'runtime_library_dirs', - 'extra_objects', 'extra_compile_args', 'extra_link_args', - 'export_symbols', 'swig_opts', 'depends'): - self.assertEqual(getattr(ext, attr), []) - - self.assertEqual(ext.language, None) - self.assertEqual(ext.optional, None) - - # if there are unknown keyword options, warn about them - with check_warnings() as w: - warnings.simplefilter('always') - ext = Extension('name', ['file1', 'file2'], chic=True) - - self.assertEqual(len(w.warnings), 1) - self.assertEqual(str(w.warnings[0].message), - "Unknown Extension options: 'chic'") - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(ExtensionTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_file_util.py b/Lib/distutils/tests/test_file_util.py deleted file mode 100644 index 551151b0143661..00000000000000 --- a/Lib/distutils/tests/test_file_util.py +++ /dev/null @@ -1,126 +0,0 @@ -"""Tests for distutils.file_util.""" -import unittest -import os -import errno -from unittest.mock import patch - -from distutils.file_util import move_file, copy_file -from distutils import log -from distutils.tests import support -from distutils.errors import DistutilsFileError -from test.support import run_unittest -from test.support.os_helper import unlink - - -class FileUtilTestCase(support.TempdirManager, unittest.TestCase): - - def _log(self, msg, *args): - if len(args) > 0: - self._logs.append(msg % args) - else: - self._logs.append(msg) - - def setUp(self): - super(FileUtilTestCase, self).setUp() - self._logs = [] - self.old_log = log.info - log.info = self._log - tmp_dir = self.mkdtemp() - self.source = os.path.join(tmp_dir, 'f1') - self.target = os.path.join(tmp_dir, 'f2') - self.target_dir = os.path.join(tmp_dir, 'd1') - - def tearDown(self): - log.info = self.old_log - super(FileUtilTestCase, self).tearDown() - - def test_move_file_verbosity(self): - f = open(self.source, 'w') - try: - f.write('some content') - finally: - f.close() - - move_file(self.source, self.target, verbose=0) - wanted = [] - self.assertEqual(self._logs, wanted) - - # back to original state - move_file(self.target, self.source, verbose=0) - - move_file(self.source, self.target, verbose=1) - wanted = ['moving %s -> %s' % (self.source, self.target)] - self.assertEqual(self._logs, wanted) - - # back to original state - move_file(self.target, self.source, verbose=0) - - self._logs = [] - # now the target is a dir - os.mkdir(self.target_dir) - move_file(self.source, self.target_dir, verbose=1) - wanted = ['moving %s -> %s' % (self.source, self.target_dir)] - self.assertEqual(self._logs, wanted) - - def test_move_file_exception_unpacking_rename(self): - # see issue 22182 - with patch("os.rename", side_effect=OSError("wrong", 1)), \ - self.assertRaises(DistutilsFileError): - with open(self.source, 'w') as fobj: - fobj.write('spam eggs') - move_file(self.source, self.target, verbose=0) - - def test_move_file_exception_unpacking_unlink(self): - # see issue 22182 - with patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")), \ - patch("os.unlink", side_effect=OSError("wrong", 1)), \ - self.assertRaises(DistutilsFileError): - with open(self.source, 'w') as fobj: - fobj.write('spam eggs') - move_file(self.source, self.target, verbose=0) - - @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') - def test_copy_file_hard_link(self): - with open(self.source, 'w') as f: - f.write('some content') - # Check first that copy_file() will not fall back on copying the file - # instead of creating the hard link. - try: - os.link(self.source, self.target) - except OSError as e: - self.skipTest('os.link: %s' % e) - else: - unlink(self.target) - st = os.stat(self.source) - copy_file(self.source, self.target, link='hard') - st2 = os.stat(self.source) - st3 = os.stat(self.target) - self.assertTrue(os.path.samestat(st, st2), (st, st2)) - self.assertTrue(os.path.samestat(st2, st3), (st2, st3)) - with open(self.source, 'r') as f: - self.assertEqual(f.read(), 'some content') - - @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') - def test_copy_file_hard_link_failure(self): - # If hard linking fails, copy_file() falls back on copying file - # (some special filesystems don't support hard linking even under - # Unix, see issue #8876). - with open(self.source, 'w') as f: - f.write('some content') - st = os.stat(self.source) - with patch("os.link", side_effect=OSError(0, "linking unsupported")): - copy_file(self.source, self.target, link='hard') - st2 = os.stat(self.source) - st3 = os.stat(self.target) - self.assertTrue(os.path.samestat(st, st2), (st, st2)) - self.assertFalse(os.path.samestat(st2, st3), (st2, st3)) - for fn in (self.source, self.target): - with open(fn, 'r') as f: - self.assertEqual(f.read(), 'some content') - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(FileUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_filelist.py b/Lib/distutils/tests/test_filelist.py deleted file mode 100644 index 98c97e49f80db5..00000000000000 --- a/Lib/distutils/tests/test_filelist.py +++ /dev/null @@ -1,340 +0,0 @@ -"""Tests for distutils.filelist.""" -import os -import re -import unittest -from distutils import debug -from distutils.log import WARN -from distutils.errors import DistutilsTemplateError -from distutils.filelist import glob_to_re, translate_pattern, FileList -from distutils import filelist - -from test.support import os_helper -from test.support import captured_stdout, run_unittest -from distutils.tests import support - -MANIFEST_IN = """\ -include ok -include xo -exclude xo -include foo.tmp -include buildout.cfg -global-include *.x -global-include *.txt -global-exclude *.tmp -recursive-include f *.oo -recursive-exclude global *.x -graft dir -prune dir3 -""" - - -def make_local_path(s): - """Converts '/' in a string to os.sep""" - return s.replace('/', os.sep) - - -class FileListTestCase(support.LoggingSilencer, - unittest.TestCase): - - def assertNoWarnings(self): - self.assertEqual(self.get_logs(WARN), []) - self.clear_logs() - - def assertWarnings(self): - self.assertGreater(len(self.get_logs(WARN)), 0) - self.clear_logs() - - def test_glob_to_re(self): - sep = os.sep - if os.sep == '\\': - sep = re.escape(os.sep) - - for glob, regex in ( - # simple cases - ('foo*', r'(?s:foo[^%(sep)s]*)\Z'), - ('foo?', r'(?s:foo[^%(sep)s])\Z'), - ('foo??', r'(?s:foo[^%(sep)s][^%(sep)s])\Z'), - # special cases - (r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'), - (r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'), - ('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'), - (r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z')): - regex = regex % {'sep': sep} - self.assertEqual(glob_to_re(glob), regex) - - def test_process_template_line(self): - # testing all MANIFEST.in template patterns - file_list = FileList() - l = make_local_path - - # simulated file list - file_list.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt', - 'buildout.cfg', - # filelist does not filter out VCS directories, - # it's sdist that does - l('.hg/last-message.txt'), - l('global/one.txt'), - l('global/two.txt'), - l('global/files.x'), - l('global/here.tmp'), - l('f/o/f.oo'), - l('dir/graft-one'), - l('dir/dir2/graft2'), - l('dir3/ok'), - l('dir3/sub/ok.txt'), - ] - - for line in MANIFEST_IN.split('\n'): - if line.strip() == '': - continue - file_list.process_template_line(line) - - wanted = ['ok', - 'buildout.cfg', - 'four.txt', - l('.hg/last-message.txt'), - l('global/one.txt'), - l('global/two.txt'), - l('f/o/f.oo'), - l('dir/graft-one'), - l('dir/dir2/graft2'), - ] - - self.assertEqual(file_list.files, wanted) - - def test_debug_print(self): - file_list = FileList() - with captured_stdout() as stdout: - file_list.debug_print('xxx') - self.assertEqual(stdout.getvalue(), '') - - debug.DEBUG = True - try: - with captured_stdout() as stdout: - file_list.debug_print('xxx') - self.assertEqual(stdout.getvalue(), 'xxx\n') - finally: - debug.DEBUG = False - - def test_set_allfiles(self): - file_list = FileList() - files = ['a', 'b', 'c'] - file_list.set_allfiles(files) - self.assertEqual(file_list.allfiles, files) - - def test_remove_duplicates(self): - file_list = FileList() - file_list.files = ['a', 'b', 'a', 'g', 'c', 'g'] - # files must be sorted beforehand (sdist does it) - file_list.sort() - file_list.remove_duplicates() - self.assertEqual(file_list.files, ['a', 'b', 'c', 'g']) - - def test_translate_pattern(self): - # not regex - self.assertTrue(hasattr( - translate_pattern('a', anchor=True, is_regex=False), - 'search')) - - # is a regex - regex = re.compile('a') - self.assertEqual( - translate_pattern(regex, anchor=True, is_regex=True), - regex) - - # plain string flagged as regex - self.assertTrue(hasattr( - translate_pattern('a', anchor=True, is_regex=True), - 'search')) - - # glob support - self.assertTrue(translate_pattern( - '*.py', anchor=True, is_regex=False).search('filelist.py')) - - def test_exclude_pattern(self): - # return False if no match - file_list = FileList() - self.assertFalse(file_list.exclude_pattern('*.py')) - - # return True if files match - file_list = FileList() - file_list.files = ['a.py', 'b.py'] - self.assertTrue(file_list.exclude_pattern('*.py')) - - # test excludes - file_list = FileList() - file_list.files = ['a.py', 'a.txt'] - file_list.exclude_pattern('*.py') - self.assertEqual(file_list.files, ['a.txt']) - - def test_include_pattern(self): - # return False if no match - file_list = FileList() - file_list.set_allfiles([]) - self.assertFalse(file_list.include_pattern('*.py')) - - # return True if files match - file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt']) - self.assertTrue(file_list.include_pattern('*.py')) - - # test * matches all files - file_list = FileList() - self.assertIsNone(file_list.allfiles) - file_list.set_allfiles(['a.py', 'b.txt']) - file_list.include_pattern('*') - self.assertEqual(file_list.allfiles, ['a.py', 'b.txt']) - - def test_process_template(self): - l = make_local_path - # invalid lines - file_list = FileList() - for action in ('include', 'exclude', 'global-include', - 'global-exclude', 'recursive-include', - 'recursive-exclude', 'graft', 'prune', 'blarg'): - self.assertRaises(DistutilsTemplateError, - file_list.process_template_line, action) - - # include - file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) - - file_list.process_template_line('include *.py') - self.assertEqual(file_list.files, ['a.py']) - self.assertNoWarnings() - - file_list.process_template_line('include *.rb') - self.assertEqual(file_list.files, ['a.py']) - self.assertWarnings() - - # exclude - file_list = FileList() - file_list.files = ['a.py', 'b.txt', l('d/c.py')] - - file_list.process_template_line('exclude *.py') - self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) - self.assertNoWarnings() - - file_list.process_template_line('exclude *.rb') - self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) - self.assertWarnings() - - # global-include - file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) - - file_list.process_template_line('global-include *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) - self.assertNoWarnings() - - file_list.process_template_line('global-include *.rb') - self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) - self.assertWarnings() - - # global-exclude - file_list = FileList() - file_list.files = ['a.py', 'b.txt', l('d/c.py')] - - file_list.process_template_line('global-exclude *.py') - self.assertEqual(file_list.files, ['b.txt']) - self.assertNoWarnings() - - file_list.process_template_line('global-exclude *.rb') - self.assertEqual(file_list.files, ['b.txt']) - self.assertWarnings() - - # recursive-include - file_list = FileList() - file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'), - l('d/d/e.py')]) - - file_list.process_template_line('recursive-include d *.py') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertNoWarnings() - - file_list.process_template_line('recursive-include e *.py') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertWarnings() - - # recursive-exclude - file_list = FileList() - file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')] - - file_list.process_template_line('recursive-exclude d *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) - self.assertNoWarnings() - - file_list.process_template_line('recursive-exclude e *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) - self.assertWarnings() - - # graft - file_list = FileList() - file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'), - l('f/f.py')]) - - file_list.process_template_line('graft d') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertNoWarnings() - - file_list.process_template_line('graft e') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) - self.assertWarnings() - - # prune - file_list = FileList() - file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')] - - file_list.process_template_line('prune d') - self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) - self.assertNoWarnings() - - file_list.process_template_line('prune e') - self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) - self.assertWarnings() - - -class FindAllTestCase(unittest.TestCase): - @os_helper.skip_unless_symlink - def test_missing_symlink(self): - with os_helper.temp_cwd(): - os.symlink('foo', 'bar') - self.assertEqual(filelist.findall(), []) - - def test_basic_discovery(self): - """ - When findall is called with no parameters or with - '.' as the parameter, the dot should be omitted from - the results. - """ - with os_helper.temp_cwd(): - os.mkdir('foo') - file1 = os.path.join('foo', 'file1.txt') - os_helper.create_empty_file(file1) - os.mkdir('bar') - file2 = os.path.join('bar', 'file2.txt') - os_helper.create_empty_file(file2) - expected = [file2, file1] - self.assertEqual(sorted(filelist.findall()), expected) - - def test_non_local_discovery(self): - """ - When findall is called with another path, the full - path name should be returned. - """ - with os_helper.temp_dir() as temp_dir: - file1 = os.path.join(temp_dir, 'file1.txt') - os_helper.create_empty_file(file1) - expected = [file1] - self.assertEqual(filelist.findall(temp_dir), expected) - - -def test_suite(): - return unittest.TestSuite([ - unittest.TestLoader().loadTestsFromTestCase(FileListTestCase), - unittest.TestLoader().loadTestsFromTestCase(FindAllTestCase), - ]) - - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py deleted file mode 100644 index c38f98b8b2c294..00000000000000 --- a/Lib/distutils/tests/test_install.py +++ /dev/null @@ -1,261 +0,0 @@ -"""Tests for distutils.command.install.""" - -import os -import sys -import unittest -import site - -from test.support import captured_stdout, run_unittest, requires_subprocess - -from distutils import sysconfig -from distutils.command.install import install, HAS_USER_SITE -from distutils.command import install as install_module -from distutils.command.build_ext import build_ext -from distutils.command.install import INSTALL_SCHEMES -from distutils.core import Distribution -from distutils.errors import DistutilsOptionError -from distutils.extension import Extension - -from distutils.tests import support -from test import support as test_support - - -def _make_ext_name(modname): - return modname + sysconfig.get_config_var('EXT_SUFFIX') - - -class InstallTestCase(support.TempdirManager, - support.EnvironGuard, - support.LoggingSilencer, - unittest.TestCase): - - def setUp(self): - super().setUp() - self._backup_config_vars = dict(sysconfig._config_vars) - - def tearDown(self): - super().tearDown() - sysconfig._config_vars.clear() - sysconfig._config_vars.update(self._backup_config_vars) - - def test_home_installation_scheme(self): - # This ensure two things: - # - that --home generates the desired set of directory names - # - test --home is supported on all platforms - builddir = self.mkdtemp() - destination = os.path.join(builddir, "installation") - - dist = Distribution({"name": "foopkg"}) - # script_name need not exist, it just need to be initialized - dist.script_name = os.path.join(builddir, "setup.py") - dist.command_obj["build"] = support.DummyCommand( - build_base=builddir, - build_lib=os.path.join(builddir, "lib"), - ) - - cmd = install(dist) - cmd.home = destination - cmd.ensure_finalized() - - self.assertEqual(cmd.install_base, destination) - self.assertEqual(cmd.install_platbase, destination) - - def check_path(got, expected): - got = os.path.normpath(got) - expected = os.path.normpath(expected) - self.assertEqual(got, expected) - - libdir = os.path.join(destination, "lib", "python") - check_path(cmd.install_lib, libdir) - platlibdir = os.path.join(destination, sys.platlibdir, "python") - check_path(cmd.install_platlib, platlibdir) - check_path(cmd.install_purelib, libdir) - check_path(cmd.install_headers, - os.path.join(destination, "include", "python", "foopkg")) - check_path(cmd.install_scripts, os.path.join(destination, "bin")) - check_path(cmd.install_data, destination) - - @unittest.skipUnless(HAS_USER_SITE, 'need user site') - def test_user_site(self): - # test install with --user - # preparing the environment for the test - self.old_user_base = site.USER_BASE - self.old_user_site = site.USER_SITE - self.tmpdir = self.mkdtemp() - self.user_base = os.path.join(self.tmpdir, 'B') - self.user_site = os.path.join(self.tmpdir, 'S') - site.USER_BASE = self.user_base - site.USER_SITE = self.user_site - install_module.USER_BASE = self.user_base - install_module.USER_SITE = self.user_site - - def _expanduser(path): - return self.tmpdir - self.old_expand = os.path.expanduser - os.path.expanduser = _expanduser - - def cleanup(): - site.USER_BASE = self.old_user_base - site.USER_SITE = self.old_user_site - install_module.USER_BASE = self.old_user_base - install_module.USER_SITE = self.old_user_site - os.path.expanduser = self.old_expand - - self.addCleanup(cleanup) - - if HAS_USER_SITE: - for key in ('nt_user', 'unix_user'): - self.assertIn(key, INSTALL_SCHEMES) - - dist = Distribution({'name': 'xx'}) - cmd = install(dist) - - # making sure the user option is there - options = [name for name, short, lable in - cmd.user_options] - self.assertIn('user', options) - - # setting a value - cmd.user = 1 - - # user base and site shouldn't be created yet - self.assertFalse(os.path.exists(self.user_base)) - self.assertFalse(os.path.exists(self.user_site)) - - # let's run finalize - cmd.ensure_finalized() - - # now they should - self.assertTrue(os.path.exists(self.user_base)) - self.assertTrue(os.path.exists(self.user_site)) - - self.assertIn('userbase', cmd.config_vars) - self.assertIn('usersite', cmd.config_vars) - - def test_handle_extra_path(self): - dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'}) - cmd = install(dist) - - # two elements - cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, ['path', 'dirs']) - self.assertEqual(cmd.extra_dirs, 'dirs') - self.assertEqual(cmd.path_file, 'path') - - # one element - cmd.extra_path = ['path'] - cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, ['path']) - self.assertEqual(cmd.extra_dirs, 'path') - self.assertEqual(cmd.path_file, 'path') - - # none - dist.extra_path = cmd.extra_path = None - cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, None) - self.assertEqual(cmd.extra_dirs, '') - self.assertEqual(cmd.path_file, None) - - # three elements (no way !) - cmd.extra_path = 'path,dirs,again' - self.assertRaises(DistutilsOptionError, cmd.handle_extra_path) - - def test_finalize_options(self): - dist = Distribution({'name': 'xx'}) - cmd = install(dist) - - # must supply either prefix/exec-prefix/home or - # install-base/install-platbase -- not both - cmd.prefix = 'prefix' - cmd.install_base = 'base' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - # must supply either home or prefix/exec-prefix -- not both - cmd.install_base = None - cmd.home = 'home' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - # can't combine user with prefix/exec_prefix/home or - # install_(plat)base - cmd.prefix = None - cmd.user = 'user' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - def test_record(self): - install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(py_modules=['hello'], - scripts=['sayhi']) - os.chdir(project_dir) - self.write_file('hello.py', "def main(): print('o hai')") - self.write_file('sayhi', 'from hello import main; main()') - - cmd = install(dist) - dist.command_obj['install'] = cmd - cmd.root = install_dir - cmd.record = os.path.join(project_dir, 'filelist') - cmd.ensure_finalized() - cmd.run() - - f = open(cmd.record) - try: - content = f.read() - finally: - f.close() - - found = [os.path.basename(line) for line in content.splitlines()] - expected = ['hello.py', 'hello.%s.pyc' % sys.implementation.cache_tag, - 'sayhi', - 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] - self.assertEqual(found, expected) - - @requires_subprocess() - def test_record_extensions(self): - cmd = test_support.missing_compiler_executable() - if cmd is not None: - self.skipTest('The %r command is not found' % cmd) - install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(ext_modules=[ - Extension('xx', ['xxmodule.c'])]) - os.chdir(project_dir) - support.copy_xxmodule_c(project_dir) - - buildextcmd = build_ext(dist) - support.fixup_build_ext(buildextcmd) - buildextcmd.ensure_finalized() - - cmd = install(dist) - dist.command_obj['install'] = cmd - dist.command_obj['build_ext'] = buildextcmd - cmd.root = install_dir - cmd.record = os.path.join(project_dir, 'filelist') - cmd.ensure_finalized() - cmd.run() - - f = open(cmd.record) - try: - content = f.read() - finally: - f.close() - - found = [os.path.basename(line) for line in content.splitlines()] - expected = [_make_ext_name('xx'), - 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] - self.assertEqual(found, expected) - - def test_debug_mode(self): - # this covers the code called when DEBUG is set - old_logs_len = len(self.logs) - install_module.DEBUG = True - try: - with captured_stdout(): - self.test_record() - finally: - install_module.DEBUG = False - self.assertGreater(len(self.logs), old_logs_len) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install_data.py b/Lib/distutils/tests/test_install_data.py deleted file mode 100644 index 6191d2fa6eefab..00000000000000 --- a/Lib/distutils/tests/test_install_data.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Tests for distutils.command.install_data.""" -import os -import unittest - -from distutils.command.install_data import install_data -from distutils.tests import support -from test.support import run_unittest - -class InstallDataTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def test_simple_run(self): - pkg_dir, dist = self.create_dist() - cmd = install_data(dist) - cmd.install_dir = inst = os.path.join(pkg_dir, 'inst') - - # data_files can contain - # - simple files - # - a tuple with a path, and a list of file - one = os.path.join(pkg_dir, 'one') - self.write_file(one, 'xxx') - inst2 = os.path.join(pkg_dir, 'inst2') - two = os.path.join(pkg_dir, 'two') - self.write_file(two, 'xxx') - - cmd.data_files = [one, (inst2, [two])] - self.assertEqual(cmd.get_inputs(), [one, (inst2, [two])]) - - # let's run the command - cmd.ensure_finalized() - cmd.run() - - # let's check the result - self.assertEqual(len(cmd.get_outputs()), 2) - rtwo = os.path.split(two)[-1] - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - rone = os.path.split(one)[-1] - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - cmd.outfiles = [] - - # let's try with warn_dir one - cmd.warn_dir = 1 - cmd.ensure_finalized() - cmd.run() - - # let's check the result - self.assertEqual(len(cmd.get_outputs()), 2) - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - cmd.outfiles = [] - - # now using root and empty dir - cmd.root = os.path.join(pkg_dir, 'root') - inst3 = os.path.join(cmd.install_dir, 'inst3') - inst4 = os.path.join(pkg_dir, 'inst4') - three = os.path.join(cmd.install_dir, 'three') - self.write_file(three, 'xx') - cmd.data_files = [one, (inst2, [two]), - ('inst3', [three]), - (inst4, [])] - cmd.ensure_finalized() - cmd.run() - - # let's check the result - self.assertEqual(len(cmd.get_outputs()), 4) - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallDataTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install_headers.py b/Lib/distutils/tests/test_install_headers.py deleted file mode 100644 index 1aa4d09cdef731..00000000000000 --- a/Lib/distutils/tests/test_install_headers.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Tests for distutils.command.install_headers.""" -import os -import unittest - -from distutils.command.install_headers import install_headers -from distutils.tests import support -from test.support import run_unittest - -class InstallHeadersTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def test_simple_run(self): - # we have two headers - header_list = self.mkdtemp() - header1 = os.path.join(header_list, 'header1') - header2 = os.path.join(header_list, 'header2') - self.write_file(header1) - self.write_file(header2) - headers = [header1, header2] - - pkg_dir, dist = self.create_dist(headers=headers) - cmd = install_headers(dist) - self.assertEqual(cmd.get_inputs(), headers) - - # let's run the command - cmd.install_dir = os.path.join(pkg_dir, 'inst') - cmd.ensure_finalized() - cmd.run() - - # let's check the results - self.assertEqual(len(cmd.get_outputs()), 2) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallHeadersTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install_lib.py b/Lib/distutils/tests/test_install_lib.py deleted file mode 100644 index f840d1a94665ed..00000000000000 --- a/Lib/distutils/tests/test_install_lib.py +++ /dev/null @@ -1,117 +0,0 @@ -"""Tests for distutils.command.install_data.""" -import sys -import os -import importlib.util -import unittest - -from distutils.command.install_lib import install_lib -from distutils.extension import Extension -from distutils.tests import support -from distutils.errors import DistutilsOptionError -from test.support import run_unittest, requires_subprocess - - -class InstallLibTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - - def test_finalize_options(self): - dist = self.create_dist()[1] - cmd = install_lib(dist) - - cmd.finalize_options() - self.assertEqual(cmd.compile, 1) - self.assertEqual(cmd.optimize, 0) - - # optimize must be 0, 1, or 2 - cmd.optimize = 'foo' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - cmd.optimize = '4' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - cmd.optimize = '2' - cmd.finalize_options() - self.assertEqual(cmd.optimize, 2) - - @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') - @requires_subprocess() - def test_byte_compile(self): - project_dir, dist = self.create_dist() - os.chdir(project_dir) - cmd = install_lib(dist) - cmd.compile = cmd.optimize = 1 - - f = os.path.join(project_dir, 'foo.py') - self.write_file(f, '# python file') - cmd.byte_compile([f]) - pyc_file = importlib.util.cache_from_source('foo.py', optimization='') - pyc_opt_file = importlib.util.cache_from_source('foo.py', - optimization=cmd.optimize) - self.assertTrue(os.path.exists(pyc_file)) - self.assertTrue(os.path.exists(pyc_opt_file)) - - def test_get_outputs(self): - project_dir, dist = self.create_dist() - os.chdir(project_dir) - os.mkdir('spam') - cmd = install_lib(dist) - - # setting up a dist environment - cmd.compile = cmd.optimize = 1 - cmd.install_dir = self.mkdtemp() - f = os.path.join(project_dir, 'spam', '__init__.py') - self.write_file(f, '# python package') - cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] - cmd.distribution.packages = ['spam'] - cmd.distribution.script_name = 'setup.py' - - # get_outputs should return 4 elements: spam/__init__.py and .pyc, - # foo.import-tag-abiflags.so / foo.pyd - outputs = cmd.get_outputs() - self.assertEqual(len(outputs), 4, outputs) - - def test_get_inputs(self): - project_dir, dist = self.create_dist() - os.chdir(project_dir) - os.mkdir('spam') - cmd = install_lib(dist) - - # setting up a dist environment - cmd.compile = cmd.optimize = 1 - cmd.install_dir = self.mkdtemp() - f = os.path.join(project_dir, 'spam', '__init__.py') - self.write_file(f, '# python package') - cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] - cmd.distribution.packages = ['spam'] - cmd.distribution.script_name = 'setup.py' - - # get_inputs should return 2 elements: spam/__init__.py and - # foo.import-tag-abiflags.so / foo.pyd - inputs = cmd.get_inputs() - self.assertEqual(len(inputs), 2, inputs) - - @requires_subprocess() - def test_dont_write_bytecode(self): - # makes sure byte_compile is not used - dist = self.create_dist()[1] - cmd = install_lib(dist) - cmd.compile = 1 - cmd.optimize = 1 - - old_dont_write_bytecode = sys.dont_write_bytecode - sys.dont_write_bytecode = True - try: - cmd.byte_compile([]) - finally: - sys.dont_write_bytecode = old_dont_write_bytecode - - self.assertIn('byte-compiling is disabled', - self.logs[0][1] % self.logs[0][2]) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallLibTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_install_scripts.py b/Lib/distutils/tests/test_install_scripts.py deleted file mode 100644 index 648db3b11da745..00000000000000 --- a/Lib/distutils/tests/test_install_scripts.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Tests for distutils.command.install_scripts.""" - -import os -import unittest - -from distutils.command.install_scripts import install_scripts -from distutils.core import Distribution - -from distutils.tests import support -from test.support import run_unittest - - -class InstallScriptsTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - def test_default_settings(self): - dist = Distribution() - dist.command_obj["build"] = support.DummyCommand( - build_scripts="/foo/bar") - dist.command_obj["install"] = support.DummyCommand( - install_scripts="/splat/funk", - force=1, - skip_build=1, - ) - cmd = install_scripts(dist) - self.assertFalse(cmd.force) - self.assertFalse(cmd.skip_build) - self.assertIsNone(cmd.build_dir) - self.assertIsNone(cmd.install_dir) - - cmd.finalize_options() - - self.assertTrue(cmd.force) - self.assertTrue(cmd.skip_build) - self.assertEqual(cmd.build_dir, "/foo/bar") - self.assertEqual(cmd.install_dir, "/splat/funk") - - def test_installation(self): - source = self.mkdtemp() - expected = [] - - def write_script(name, text): - expected.append(name) - f = open(os.path.join(source, name), "w") - try: - f.write(text) - finally: - f.close() - - write_script("script1.py", ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("script2.py", ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("shell.sh", ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) - - target = self.mkdtemp() - dist = Distribution() - dist.command_obj["build"] = support.DummyCommand(build_scripts=source) - dist.command_obj["install"] = support.DummyCommand( - install_scripts=target, - force=1, - skip_build=1, - ) - cmd = install_scripts(dist) - cmd.finalize_options() - cmd.run() - - installed = os.listdir(target) - for name in expected: - self.assertIn(name, installed) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(InstallScriptsTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_log.py b/Lib/distutils/tests/test_log.py deleted file mode 100644 index ec2ae028de8777..00000000000000 --- a/Lib/distutils/tests/test_log.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Tests for distutils.log""" - -import io -import sys -import unittest -from test.support import swap_attr, run_unittest - -from distutils import log - -class TestLog(unittest.TestCase): - def test_non_ascii(self): - # Issues #8663, #34421: test that non-encodable text is escaped with - # backslashreplace error handler and encodable non-ASCII text is - # output as is. - for errors in ('strict', 'backslashreplace', 'surrogateescape', - 'replace', 'ignore'): - with self.subTest(errors=errors): - stdout = io.TextIOWrapper(io.BytesIO(), - encoding='cp437', errors=errors) - stderr = io.TextIOWrapper(io.BytesIO(), - encoding='cp437', errors=errors) - old_threshold = log.set_threshold(log.DEBUG) - try: - with swap_attr(sys, 'stdout', stdout), \ - swap_attr(sys, 'stderr', stderr): - log.debug('Dεbug\tMėssãge') - log.fatal('Fαtal\tÈrrōr') - finally: - log.set_threshold(old_threshold) - - stdout.seek(0) - self.assertEqual(stdout.read().rstrip(), - 'Dεbug\tM?ss?ge' if errors == 'replace' else - 'Dεbug\tMssge' if errors == 'ignore' else - 'Dεbug\tM\\u0117ss\\xe3ge') - stderr.seek(0) - self.assertEqual(stderr.read().rstrip(), - 'Fαtal\t?rr?r' if errors == 'replace' else - 'Fαtal\trrr' if errors == 'ignore' else - 'Fαtal\t\\xc8rr\\u014dr') - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(TestLog) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_msvc9compiler.py b/Lib/distutils/tests/test_msvc9compiler.py deleted file mode 100644 index 6235405e31201b..00000000000000 --- a/Lib/distutils/tests/test_msvc9compiler.py +++ /dev/null @@ -1,184 +0,0 @@ -"""Tests for distutils.msvc9compiler.""" -import sys -import unittest -import os - -from distutils.errors import DistutilsPlatformError -from distutils.tests import support -from test.support import run_unittest - -# A manifest with the only assembly reference being the msvcrt assembly, so -# should have the assembly completely stripped. Note that although the -# assembly has a reference the assembly is removed - that is -# currently a "feature", not a bug :) -_MANIFEST_WITH_ONLY_MSVC_REFERENCE = """\ - - - - - - - - - - - - - - - - - -""" - -# A manifest with references to assemblies other than msvcrt. When processed, -# this assembly should be returned with just the msvcrt part removed. -_MANIFEST_WITH_MULTIPLE_REFERENCES = """\ - - - - - - - - - - - - - - - - - - - - - - -""" - -_CLEANED_MANIFEST = """\ - - - - - - - - - - - - - - - - - - -""" - -if sys.platform=="win32": - from distutils.msvccompiler import get_build_version - if get_build_version()>=8.0: - SKIP_MESSAGE = None - else: - SKIP_MESSAGE = "These tests are only for MSVC8.0 or above" -else: - SKIP_MESSAGE = "These tests are only for win32" - -@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) -class msvc9compilerTestCase(support.TempdirManager, - unittest.TestCase): - - def test_no_compiler(self): - # makes sure query_vcvarsall raises - # a DistutilsPlatformError if the compiler - # is not found - from distutils.msvc9compiler import query_vcvarsall - def _find_vcvarsall(version): - return None - - from distutils import msvc9compiler - old_find_vcvarsall = msvc9compiler.find_vcvarsall - msvc9compiler.find_vcvarsall = _find_vcvarsall - try: - self.assertRaises(DistutilsPlatformError, query_vcvarsall, - 'wont find this version') - finally: - msvc9compiler.find_vcvarsall = old_find_vcvarsall - - def test_reg_class(self): - from distutils.msvc9compiler import Reg - self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx') - - # looking for values that should exist on all - # windows registry versions. - path = r'Control Panel\Desktop' - v = Reg.get_value(path, 'dragfullwindows') - self.assertIn(v, ('0', '1', '2')) - - import winreg - HKCU = winreg.HKEY_CURRENT_USER - keys = Reg.read_keys(HKCU, 'xxxx') - self.assertEqual(keys, None) - - keys = Reg.read_keys(HKCU, r'Control Panel') - self.assertIn('Desktop', keys) - - def test_remove_visual_c_ref(self): - from distutils.msvc9compiler import MSVCCompiler - tempdir = self.mkdtemp() - manifest = os.path.join(tempdir, 'manifest') - f = open(manifest, 'w') - try: - f.write(_MANIFEST_WITH_MULTIPLE_REFERENCES) - finally: - f.close() - - compiler = MSVCCompiler() - compiler._remove_visual_c_ref(manifest) - - # see what we got - f = open(manifest) - try: - # removing trailing spaces - content = '\n'.join([line.rstrip() for line in f.readlines()]) - finally: - f.close() - - # makes sure the manifest was properly cleaned - self.assertEqual(content, _CLEANED_MANIFEST) - - def test_remove_entire_manifest(self): - from distutils.msvc9compiler import MSVCCompiler - tempdir = self.mkdtemp() - manifest = os.path.join(tempdir, 'manifest') - f = open(manifest, 'w') - try: - f.write(_MANIFEST_WITH_ONLY_MSVC_REFERENCE) - finally: - f.close() - - compiler = MSVCCompiler() - got = compiler._remove_visual_c_ref(manifest) - self.assertIsNone(got) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(msvc9compilerTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_msvccompiler.py b/Lib/distutils/tests/test_msvccompiler.py deleted file mode 100644 index dd67c3eb6d519d..00000000000000 --- a/Lib/distutils/tests/test_msvccompiler.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Tests for distutils._msvccompiler.""" -import sys -import unittest -import os - -from distutils.errors import DistutilsPlatformError -from distutils.tests import support -from test.support import run_unittest - - -SKIP_MESSAGE = (None if sys.platform == "win32" else - "These tests are only for win32") - -@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) -class msvccompilerTestCase(support.TempdirManager, - unittest.TestCase): - - def test_no_compiler(self): - import distutils._msvccompiler as _msvccompiler - # makes sure query_vcvarsall raises - # a DistutilsPlatformError if the compiler - # is not found - def _find_vcvarsall(plat_spec): - return None, None - - old_find_vcvarsall = _msvccompiler._find_vcvarsall - _msvccompiler._find_vcvarsall = _find_vcvarsall - try: - self.assertRaises(DistutilsPlatformError, - _msvccompiler._get_vc_env, - 'wont find this version') - finally: - _msvccompiler._find_vcvarsall = old_find_vcvarsall - - def test_get_vc_env_unicode(self): - import distutils._msvccompiler as _msvccompiler - - test_var = 'ṰḖṤṪ┅ṼẨṜ' - test_value = '₃⁴₅' - - # Ensure we don't early exit from _get_vc_env - old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None) - os.environ[test_var] = test_value - try: - env = _msvccompiler._get_vc_env('x86') - self.assertIn(test_var.lower(), env) - self.assertEqual(test_value, env[test_var.lower()]) - finally: - os.environ.pop(test_var) - if old_distutils_use_sdk: - os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk - - def test_get_vc2017(self): - import distutils._msvccompiler as _msvccompiler - - # This function cannot be mocked, so pass it if we find VS 2017 - # and mark it skipped if we do not. - version, path = _msvccompiler._find_vc2017() - if version: - self.assertGreaterEqual(version, 15) - self.assertTrue(os.path.isdir(path)) - else: - raise unittest.SkipTest("VS 2017 is not installed") - - def test_get_vc2015(self): - import distutils._msvccompiler as _msvccompiler - - # This function cannot be mocked, so pass it if we find VS 2015 - # and mark it skipped if we do not. - version, path = _msvccompiler._find_vc2015() - if version: - self.assertGreaterEqual(version, 14) - self.assertTrue(os.path.isdir(path)) - else: - raise unittest.SkipTest("VS 2015 is not installed") - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(msvccompilerTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_register.py b/Lib/distutils/tests/test_register.py deleted file mode 100644 index 7805c6d3c9f7b9..00000000000000 --- a/Lib/distutils/tests/test_register.py +++ /dev/null @@ -1,324 +0,0 @@ -"""Tests for distutils.command.register.""" -import os -import unittest -import getpass -import urllib -import warnings - -from test.support import run_unittest -from test.support.warnings_helper import check_warnings - -from distutils.command import register as register_module -from distutils.command.register import register -from distutils.errors import DistutilsSetupError -from distutils.log import INFO - -from distutils.tests.test_config import BasePyPIRCCommandTestCase - -try: - import docutils -except ImportError: - docutils = None - -PYPIRC_NOPASSWORD = """\ -[distutils] - -index-servers = - server1 - -[server1] -username:me -""" - -WANTED_PYPIRC = """\ -[distutils] -index-servers = - pypi - -[pypi] -username:tarek -password:password -""" - -class Inputs(object): - """Fakes user inputs.""" - def __init__(self, *answers): - self.answers = answers - self.index = 0 - - def __call__(self, prompt=''): - try: - return self.answers[self.index] - finally: - self.index += 1 - -class FakeOpener(object): - """Fakes a PyPI server""" - def __init__(self): - self.reqs = [] - - def __call__(self, *args): - return self - - def open(self, req, data=None, timeout=None): - self.reqs.append(req) - return self - - def read(self): - return b'xxx' - - def getheader(self, name, default=None): - return { - 'content-type': 'text/plain; charset=utf-8', - }.get(name.lower(), default) - - -class RegisterTestCase(BasePyPIRCCommandTestCase): - - def setUp(self): - super(RegisterTestCase, self).setUp() - # patching the password prompt - self._old_getpass = getpass.getpass - def _getpass(prompt): - return 'password' - getpass.getpass = _getpass - urllib.request._opener = None - self.old_opener = urllib.request.build_opener - self.conn = urllib.request.build_opener = FakeOpener() - - def tearDown(self): - getpass.getpass = self._old_getpass - urllib.request._opener = None - urllib.request.build_opener = self.old_opener - super(RegisterTestCase, self).tearDown() - - def _get_cmd(self, metadata=None): - if metadata is None: - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx'} - pkg_info, dist = self.create_dist(**metadata) - return register(dist) - - def test_create_pypirc(self): - # this test makes sure a .pypirc file - # is created when requested. - - # let's create a register instance - cmd = self._get_cmd() - - # we shouldn't have a .pypirc file yet - self.assertFalse(os.path.exists(self.rc)) - - # patching input and getpass.getpass - # so register gets happy - # - # Here's what we are faking : - # use your existing login (choice 1.) - # Username : 'tarek' - # Password : 'password' - # Save your login (y/N)? : 'y' - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - # let's run the command - try: - cmd.run() - finally: - del register_module.input - - # we should have a brand new .pypirc file - self.assertTrue(os.path.exists(self.rc)) - - # with the content similar to WANTED_PYPIRC - f = open(self.rc) - try: - content = f.read() - self.assertEqual(content, WANTED_PYPIRC) - finally: - f.close() - - # now let's make sure the .pypirc file generated - # really works : we shouldn't be asked anything - # if we run the command again - def _no_way(prompt=''): - raise AssertionError(prompt) - register_module.input = _no_way - - cmd.show_response = 1 - cmd.run() - - # let's see what the server received : we should - # have 2 similar requests - self.assertEqual(len(self.conn.reqs), 2) - req1 = dict(self.conn.reqs[0].headers) - req2 = dict(self.conn.reqs[1].headers) - - self.assertEqual(req1['Content-length'], '1374') - self.assertEqual(req2['Content-length'], '1374') - self.assertIn(b'xxx', self.conn.reqs[1].data) - - def test_password_not_in_file(self): - - self.write_file(self.rc, PYPIRC_NOPASSWORD) - cmd = self._get_cmd() - cmd._set_config() - cmd.finalize_options() - cmd.send_metadata() - - # dist.password should be set - # therefore used afterwards by other commands - self.assertEqual(cmd.distribution.password, 'password') - - def test_registering(self): - # this test runs choice 2 - cmd = self._get_cmd() - inputs = Inputs('2', 'tarek', 'tarek@ziade.org') - register_module.input = inputs.__call__ - try: - # let's run the command - cmd.run() - finally: - del register_module.input - - # we should have send a request - self.assertEqual(len(self.conn.reqs), 1) - req = self.conn.reqs[0] - headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '608') - self.assertIn(b'tarek', req.data) - - def test_password_reset(self): - # this test runs choice 3 - cmd = self._get_cmd() - inputs = Inputs('3', 'tarek@ziade.org') - register_module.input = inputs.__call__ - try: - # let's run the command - cmd.run() - finally: - del register_module.input - - # we should have send a request - self.assertEqual(len(self.conn.reqs), 1) - req = self.conn.reqs[0] - headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '290') - self.assertIn(b'tarek', req.data) - - @unittest.skipUnless(docutils is not None, 'needs docutils') - def test_strict(self): - # testing the script option - # when on, the register command stops if - # the metadata is incomplete or if - # long_description is not reSt compliant - - # empty metadata - cmd = self._get_cmd({}) - cmd.ensure_finalized() - cmd.strict = 1 - self.assertRaises(DistutilsSetupError, cmd.run) - - # metadata are OK but long_description is broken - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'éxéxé', - 'name': 'xxx', 'version': 'xxx', - 'long_description': 'title\n==\n\ntext'} - - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = 1 - self.assertRaises(DistutilsSetupError, cmd.run) - - # now something that works - metadata['long_description'] = 'title\n=====\n\ntext' - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = 1 - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - # let's run the command - try: - cmd.run() - finally: - del register_module.input - - # strict is not by default - cmd = self._get_cmd() - cmd.ensure_finalized() - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - # let's run the command - try: - cmd.run() - finally: - del register_module.input - - # and finally a Unicode test (bug #12114) - metadata = {'url': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': 'xxx', - 'description': 'Something about esszet \u00df', - 'long_description': 'More things about esszet \u00df'} - - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = 1 - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - # let's run the command - try: - cmd.run() - finally: - del register_module.input - - @unittest.skipUnless(docutils is not None, 'needs docutils') - def test_register_invalid_long_description(self): - description = ':funkie:`str`' # mimic Sphinx-specific markup - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', - 'long_description': description} - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = True - inputs = Inputs('2', 'tarek', 'tarek@ziade.org') - register_module.input = inputs - self.addCleanup(delattr, register_module, 'input') - - self.assertRaises(DistutilsSetupError, cmd.run) - - def test_check_metadata_deprecated(self): - # makes sure make_metadata is deprecated - cmd = self._get_cmd() - with check_warnings() as w: - warnings.simplefilter("always") - cmd.check_metadata() - self.assertEqual(len(w.warnings), 1) - - def test_list_classifiers(self): - cmd = self._get_cmd() - cmd.list_classifiers = 1 - cmd.run() - results = self.get_logs(INFO) - self.assertEqual(results, ['running check', 'xxx']) - - def test_show_response(self): - # test that the --show-response option return a well formatted response - cmd = self._get_cmd() - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs.__call__ - cmd.show_response = 1 - try: - cmd.run() - finally: - del register_module.input - - results = self.get_logs(INFO) - self.assertEqual(results[3], 75 * '-' + '\nxxx\n' + 75 * '-') - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(RegisterTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_sdist.py b/Lib/distutils/tests/test_sdist.py deleted file mode 100644 index 46b3a13e470c4e..00000000000000 --- a/Lib/distutils/tests/test_sdist.py +++ /dev/null @@ -1,493 +0,0 @@ -"""Tests for distutils.command.sdist.""" -import os -import tarfile -import unittest -import warnings -import zipfile -from os.path import join -from textwrap import dedent -from test.support import captured_stdout, run_unittest -from test.support.warnings_helper import check_warnings - -try: - import zlib - ZLIB_SUPPORT = True -except ImportError: - ZLIB_SUPPORT = False - -try: - import grp - import pwd - UID_GID_SUPPORT = True -except ImportError: - UID_GID_SUPPORT = False - -from distutils.command.sdist import sdist, show_formats -from distutils.core import Distribution -from distutils.tests.test_config import BasePyPIRCCommandTestCase -from distutils.errors import DistutilsOptionError -from distutils.spawn import find_executable -from distutils.log import WARN -from distutils.filelist import FileList -from distutils.archive_util import ARCHIVE_FORMATS - -SETUP_PY = """ -from distutils.core import setup -import somecode - -setup(name='fake') -""" - -MANIFEST = """\ -# file GENERATED by distutils, do NOT edit -README -buildout.cfg -inroot.txt -setup.py -data%(sep)sdata.dt -scripts%(sep)sscript.py -some%(sep)sfile.txt -some%(sep)sother_file.txt -somecode%(sep)s__init__.py -somecode%(sep)sdoc.dat -somecode%(sep)sdoc.txt -""" - -class SDistTestCase(BasePyPIRCCommandTestCase): - - def setUp(self): - # PyPIRCCommandTestCase creates a temp dir already - # and put it in self.tmp_dir - super(SDistTestCase, self).setUp() - # setting up an environment - self.old_path = os.getcwd() - os.mkdir(join(self.tmp_dir, 'somecode')) - os.mkdir(join(self.tmp_dir, 'dist')) - # a package, and a README - self.write_file((self.tmp_dir, 'README'), 'xxx') - self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#') - self.write_file((self.tmp_dir, 'setup.py'), SETUP_PY) - os.chdir(self.tmp_dir) - - def tearDown(self): - # back to normal - os.chdir(self.old_path) - super(SDistTestCase, self).tearDown() - - def get_cmd(self, metadata=None): - """Returns a cmd""" - if metadata is None: - metadata = {'name': 'fake', 'version': '1.0', - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'} - dist = Distribution(metadata) - dist.script_name = 'setup.py' - dist.packages = ['somecode'] - dist.include_package_data = True - cmd = sdist(dist) - cmd.dist_dir = 'dist' - return dist, cmd - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_prune_file_list(self): - # this test creates a project with some VCS dirs and an NFS rename - # file, then launches sdist to check they get pruned on all systems - - # creating VCS directories with some files in them - os.mkdir(join(self.tmp_dir, 'somecode', '.svn')) - self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx') - - os.mkdir(join(self.tmp_dir, 'somecode', '.hg')) - self.write_file((self.tmp_dir, 'somecode', '.hg', - 'ok'), 'xxx') - - os.mkdir(join(self.tmp_dir, 'somecode', '.git')) - self.write_file((self.tmp_dir, 'somecode', '.git', - 'ok'), 'xxx') - - self.write_file((self.tmp_dir, 'somecode', '.nfs0001'), 'xxx') - - # now building a sdist - dist, cmd = self.get_cmd() - - # zip is available universally - # (tar might not be installed under win32) - cmd.formats = ['zip'] - - cmd.ensure_finalized() - cmd.run() - - # now let's check what we have - dist_folder = join(self.tmp_dir, 'dist') - files = os.listdir(dist_folder) - self.assertEqual(files, ['fake-1.0.zip']) - - zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) - try: - content = zip_file.namelist() - finally: - zip_file.close() - - # making sure everything has been pruned correctly - expected = ['', 'PKG-INFO', 'README', 'setup.py', - 'somecode/', 'somecode/__init__.py'] - self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - @unittest.skipIf(find_executable('tar') is None, - "The tar command is not found") - @unittest.skipIf(find_executable('gzip') is None, - "The gzip command is not found") - def test_make_distribution(self): - # now building a sdist - dist, cmd = self.get_cmd() - - # creating a gztar then a tar - cmd.formats = ['gztar', 'tar'] - cmd.ensure_finalized() - cmd.run() - - # making sure we have two files - dist_folder = join(self.tmp_dir, 'dist') - result = os.listdir(dist_folder) - result.sort() - self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) - - os.remove(join(dist_folder, 'fake-1.0.tar')) - os.remove(join(dist_folder, 'fake-1.0.tar.gz')) - - # now trying a tar then a gztar - cmd.formats = ['tar', 'gztar'] - - cmd.ensure_finalized() - cmd.run() - - result = os.listdir(dist_folder) - result.sort() - self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_add_defaults(self): - - # http://bugs.python.org/issue2279 - - # add_default should also include - # data_files and package_data - dist, cmd = self.get_cmd() - - # filling data_files by pointing files - # in package_data - dist.package_data = {'': ['*.cfg', '*.dat'], - 'somecode': ['*.txt']} - self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') - self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#') - - # adding some data in data_files - data_dir = join(self.tmp_dir, 'data') - os.mkdir(data_dir) - self.write_file((data_dir, 'data.dt'), '#') - some_dir = join(self.tmp_dir, 'some') - os.mkdir(some_dir) - # make sure VCS directories are pruned (#14004) - hg_dir = join(self.tmp_dir, '.hg') - os.mkdir(hg_dir) - self.write_file((hg_dir, 'last-message.txt'), '#') - # a buggy regex used to prevent this from working on windows (#6884) - self.write_file((self.tmp_dir, 'buildout.cfg'), '#') - self.write_file((self.tmp_dir, 'inroot.txt'), '#') - self.write_file((some_dir, 'file.txt'), '#') - self.write_file((some_dir, 'other_file.txt'), '#') - - dist.data_files = [('data', ['data/data.dt', - 'buildout.cfg', - 'inroot.txt', - 'notexisting']), - 'some/file.txt', - 'some/other_file.txt'] - - # adding a script - script_dir = join(self.tmp_dir, 'scripts') - os.mkdir(script_dir) - self.write_file((script_dir, 'script.py'), '#') - dist.scripts = [join('scripts', 'script.py')] - - cmd.formats = ['zip'] - cmd.use_defaults = True - - cmd.ensure_finalized() - cmd.run() - - # now let's check what we have - dist_folder = join(self.tmp_dir, 'dist') - files = os.listdir(dist_folder) - self.assertEqual(files, ['fake-1.0.zip']) - - zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) - try: - content = zip_file.namelist() - finally: - zip_file.close() - - # making sure everything was added - expected = ['', 'PKG-INFO', 'README', 'buildout.cfg', - 'data/', 'data/data.dt', 'inroot.txt', - 'scripts/', 'scripts/script.py', 'setup.py', - 'some/', 'some/file.txt', 'some/other_file.txt', - 'somecode/', 'somecode/__init__.py', 'somecode/doc.dat', - 'somecode/doc.txt'] - self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) - - # checking the MANIFEST - f = open(join(self.tmp_dir, 'MANIFEST')) - try: - manifest = f.read() - finally: - f.close() - self.assertEqual(manifest, MANIFEST % {'sep': os.sep}) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_metadata_check_option(self): - # testing the `medata-check` option - dist, cmd = self.get_cmd(metadata={}) - - # this should raise some warnings ! - # with the `check` subcommand - cmd.ensure_finalized() - cmd.run() - warnings = [msg for msg in self.get_logs(WARN) if - msg.startswith('warning: check:')] - self.assertEqual(len(warnings), 2) - - # trying with a complete set of metadata - self.clear_logs() - dist, cmd = self.get_cmd() - cmd.ensure_finalized() - cmd.metadata_check = 0 - cmd.run() - warnings = [msg for msg in self.get_logs(WARN) if - msg.startswith('warning: check:')] - self.assertEqual(len(warnings), 0) - - def test_check_metadata_deprecated(self): - # makes sure make_metadata is deprecated - dist, cmd = self.get_cmd() - with check_warnings() as w: - warnings.simplefilter("always") - cmd.check_metadata() - self.assertEqual(len(w.warnings), 1) - - def test_show_formats(self): - with captured_stdout() as stdout: - show_formats() - - # the output should be a header line + one line per format - num_formats = len(ARCHIVE_FORMATS.keys()) - output = [line for line in stdout.getvalue().split('\n') - if line.strip().startswith('--formats=')] - self.assertEqual(len(output), num_formats) - - def test_finalize_options(self): - dist, cmd = self.get_cmd() - cmd.finalize_options() - - # default options set by finalize - self.assertEqual(cmd.manifest, 'MANIFEST') - self.assertEqual(cmd.template, 'MANIFEST.in') - self.assertEqual(cmd.dist_dir, 'dist') - - # formats has to be a string splitable on (' ', ',') or - # a stringlist - cmd.formats = 1 - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - cmd.formats = ['zip'] - cmd.finalize_options() - - # formats has to be known - cmd.formats = 'supazipa' - self.assertRaises(DistutilsOptionError, cmd.finalize_options) - - # the following tests make sure there is a nice error message instead - # of a traceback when parsing an invalid manifest template - - def _check_template(self, content): - dist, cmd = self.get_cmd() - os.chdir(self.tmp_dir) - self.write_file('MANIFEST.in', content) - cmd.ensure_finalized() - cmd.filelist = FileList() - cmd.read_template() - warnings = self.get_logs(WARN) - self.assertEqual(len(warnings), 1) - - def test_invalid_template_unknown_command(self): - self._check_template('taunt knights *') - - def test_invalid_template_wrong_arguments(self): - # this manifest command takes one argument - self._check_template('prune') - - @unittest.skipIf(os.name != 'nt', 'test relevant for Windows only') - def test_invalid_template_wrong_path(self): - # on Windows, trailing slashes are not allowed - # this used to crash instead of raising a warning: #8286 - self._check_template('include examples/') - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_get_file_list(self): - # make sure MANIFEST is recalculated - dist, cmd = self.get_cmd() - - # filling data_files by pointing files in package_data - dist.package_data = {'somecode': ['*.txt']} - self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') - cmd.formats = ['gztar'] - cmd.ensure_finalized() - cmd.run() - - f = open(cmd.manifest) - try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - finally: - f.close() - - self.assertEqual(len(manifest), 5) - - # adding a file - self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#') - - # make sure build_py is reinitialized, like a fresh run - build_py = dist.get_command_obj('build_py') - build_py.finalized = False - build_py.ensure_finalized() - - cmd.run() - - f = open(cmd.manifest) - try: - manifest2 = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - finally: - f.close() - - # do we have the new file in MANIFEST ? - self.assertEqual(len(manifest2), 6) - self.assertIn('doc2.txt', manifest2[-1]) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_manifest_marker(self): - # check that autogenerated MANIFESTs have a marker - dist, cmd = self.get_cmd() - cmd.ensure_finalized() - cmd.run() - - f = open(cmd.manifest) - try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - finally: - f.close() - - self.assertEqual(manifest[0], - '# file GENERATED by distutils, do NOT edit') - - @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run") - def test_manifest_comments(self): - # make sure comments don't cause exceptions or wrong includes - contents = dedent("""\ - # bad.py - #bad.py - good.py - """) - dist, cmd = self.get_cmd() - cmd.ensure_finalized() - self.write_file((self.tmp_dir, cmd.manifest), contents) - self.write_file((self.tmp_dir, 'good.py'), '# pick me!') - self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") - self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") - cmd.run() - self.assertEqual(cmd.filelist.files, ['good.py']) - - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_manual_manifest(self): - # check that a MANIFEST without a marker is left alone - dist, cmd = self.get_cmd() - cmd.formats = ['gztar'] - cmd.ensure_finalized() - self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') - self.write_file((self.tmp_dir, 'README.manual'), - 'This project maintains its MANIFEST file itself.') - cmd.run() - self.assertEqual(cmd.filelist.files, ['README.manual']) - - f = open(cmd.manifest) - try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - finally: - f.close() - - self.assertEqual(manifest, ['README.manual']) - - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') - archive = tarfile.open(archive_name) - try: - filenames = [tarinfo.name for tarinfo in archive] - finally: - archive.close() - self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', - 'fake-1.0/README.manual']) - - @unittest.skipUnless(ZLIB_SUPPORT, "requires zlib") - @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") - @unittest.skipIf(find_executable('tar') is None, - "The tar command is not found") - @unittest.skipIf(find_executable('gzip') is None, - "The gzip command is not found") - def test_make_distribution_owner_group(self): - # now building a sdist - dist, cmd = self.get_cmd() - - # creating a gztar and specifying the owner+group - cmd.formats = ['gztar'] - cmd.owner = pwd.getpwuid(0)[0] - cmd.group = grp.getgrgid(0)[0] - cmd.ensure_finalized() - cmd.run() - - # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') - archive = tarfile.open(archive_name) - try: - for member in archive.getmembers(): - self.assertEqual(member.uid, 0) - self.assertEqual(member.gid, 0) - finally: - archive.close() - - # building a sdist again - dist, cmd = self.get_cmd() - - # creating a gztar - cmd.formats = ['gztar'] - cmd.ensure_finalized() - cmd.run() - - # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') - archive = tarfile.open(archive_name) - - # note that we are not testing the group ownership here - # because, depending on the platforms and the container - # rights (see #7408) - try: - for member in archive.getmembers(): - self.assertEqual(member.uid, os.getuid()) - finally: - archive.close() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(SDistTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_spawn.py b/Lib/distutils/tests/test_spawn.py deleted file mode 100644 index a0a1145da5df8e..00000000000000 --- a/Lib/distutils/tests/test_spawn.py +++ /dev/null @@ -1,139 +0,0 @@ -"""Tests for distutils.spawn.""" -import os -import stat -import sys -import unittest.mock -from test.support import run_unittest, unix_shell, requires_subprocess -from test.support import os_helper - -from distutils.spawn import find_executable -from distutils.spawn import spawn -from distutils.errors import DistutilsExecError -from distutils.tests import support - - -@requires_subprocess() -class SpawnTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - - @unittest.skipUnless(os.name in ('nt', 'posix'), - 'Runs only under posix or nt') - def test_spawn(self): - tmpdir = self.mkdtemp() - - # creating something executable - # through the shell that returns 1 - if sys.platform != 'win32': - exe = os.path.join(tmpdir, 'foo.sh') - self.write_file(exe, '#!%s\nexit 1' % unix_shell) - else: - exe = os.path.join(tmpdir, 'foo.bat') - self.write_file(exe, 'exit 1') - - os.chmod(exe, 0o777) - self.assertRaises(DistutilsExecError, spawn, [exe]) - - # now something that works - if sys.platform != 'win32': - exe = os.path.join(tmpdir, 'foo.sh') - self.write_file(exe, '#!%s\nexit 0' % unix_shell) - else: - exe = os.path.join(tmpdir, 'foo.bat') - self.write_file(exe, 'exit 0') - - os.chmod(exe, 0o777) - spawn([exe]) # should work without any error - - def test_find_executable(self): - with os_helper.temp_dir() as tmp_dir: - # use TESTFN to get a pseudo-unique filename - program_noeext = os_helper.TESTFN - # Give the temporary program an ".exe" suffix for all. - # It's needed on Windows and not harmful on other platforms. - program = program_noeext + ".exe" - - filename = os.path.join(tmp_dir, program) - with open(filename, "wb"): - pass - os.chmod(filename, stat.S_IXUSR) - - # test path parameter - rv = find_executable(program, path=tmp_dir) - self.assertEqual(rv, filename) - - if sys.platform == 'win32': - # test without ".exe" extension - rv = find_executable(program_noeext, path=tmp_dir) - self.assertEqual(rv, filename) - - # test find in the current directory - with os_helper.change_cwd(tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, program) - - # test non-existent program - dont_exist_program = "dontexist_" + program - rv = find_executable(dont_exist_program , path=tmp_dir) - self.assertIsNone(rv) - - # PATH='': no match, except in the current directory - with os_helper.EnvironmentVarGuard() as env: - env['PATH'] = '' - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value=tmp_dir, create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', - tmp_dir): - rv = find_executable(program) - self.assertIsNone(rv) - - # look in current directory - with os_helper.change_cwd(tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, program) - - # PATH=':': explicitly looks in the current directory - with os_helper.EnvironmentVarGuard() as env: - env['PATH'] = os.pathsep - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value='', create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', ''): - rv = find_executable(program) - self.assertIsNone(rv) - - # look in current directory - with os_helper.change_cwd(tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, program) - - # missing PATH: test os.confstr("CS_PATH") and os.defpath - with os_helper.EnvironmentVarGuard() as env: - env.pop('PATH', None) - - # without confstr - with unittest.mock.patch('distutils.spawn.os.confstr', - side_effect=ValueError, - create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', - tmp_dir): - rv = find_executable(program) - self.assertEqual(rv, filename) - - # with confstr - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value=tmp_dir, create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', ''): - rv = find_executable(program) - self.assertEqual(rv, filename) - - def test_spawn_missing_exe(self): - with self.assertRaises(DistutilsExecError) as ctx: - spawn(['does-not-exist']) - self.assertIn("command 'does-not-exist' failed", str(ctx.exception)) - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(SpawnTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_sysconfig.py b/Lib/distutils/tests/test_sysconfig.py deleted file mode 100644 index ae0eca897bc7bd..00000000000000 --- a/Lib/distutils/tests/test_sysconfig.py +++ /dev/null @@ -1,263 +0,0 @@ -"""Tests for distutils.sysconfig.""" -import contextlib -import os -import shutil -import subprocess -import sys -import textwrap -import unittest - -from distutils import sysconfig -from distutils.ccompiler import get_default_compiler -from distutils.tests import support -from test.support import run_unittest, swap_item, requires_subprocess, is_wasi -from test.support.os_helper import TESTFN - - -class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): - def setUp(self): - super(SysconfigTestCase, self).setUp() - self.makefile = None - - def tearDown(self): - if self.makefile is not None: - os.unlink(self.makefile) - self.cleanup_testfn() - super(SysconfigTestCase, self).tearDown() - - def cleanup_testfn(self): - if os.path.isfile(TESTFN): - os.remove(TESTFN) - elif os.path.isdir(TESTFN): - shutil.rmtree(TESTFN) - - @unittest.skipIf(is_wasi, "Incompatible with WASI mapdir and OOT builds") - def test_get_config_h_filename(self): - config_h = sysconfig.get_config_h_filename() - self.assertTrue(os.path.isfile(config_h), config_h) - - def test_get_python_lib(self): - # XXX doesn't work on Linux when Python was never installed before - #self.assertTrue(os.path.isdir(lib_dir), lib_dir) - # test for pythonxx.lib? - self.assertNotEqual(sysconfig.get_python_lib(), - sysconfig.get_python_lib(prefix=TESTFN)) - - def test_get_config_vars(self): - cvars = sysconfig.get_config_vars() - self.assertIsInstance(cvars, dict) - self.assertTrue(cvars) - - @unittest.skipIf(is_wasi, "Incompatible with WASI mapdir and OOT builds") - def test_srcdir(self): - # See Issues #15322, #15364. - srcdir = sysconfig.get_config_var('srcdir') - - self.assertTrue(os.path.isabs(srcdir), srcdir) - self.assertTrue(os.path.isdir(srcdir), srcdir) - - if sysconfig.python_build: - # The python executable has not been installed so srcdir - # should be a full source checkout. - Python_h = os.path.join(srcdir, 'Include', 'Python.h') - self.assertTrue(os.path.exists(Python_h), Python_h) - # /PC/pyconfig.h always exists even if unused on POSIX. - pyconfig_h = os.path.join(srcdir, 'PC', 'pyconfig.h') - self.assertTrue(os.path.exists(pyconfig_h), pyconfig_h) - pyconfig_h_in = os.path.join(srcdir, 'pyconfig.h.in') - self.assertTrue(os.path.exists(pyconfig_h_in), pyconfig_h_in) - elif os.name == 'posix': - self.assertEqual( - os.path.dirname(sysconfig.get_makefile_filename()), - srcdir) - - def test_srcdir_independent_of_cwd(self): - # srcdir should be independent of the current working directory - # See Issues #15322, #15364. - srcdir = sysconfig.get_config_var('srcdir') - cwd = os.getcwd() - try: - os.chdir('..') - srcdir2 = sysconfig.get_config_var('srcdir') - finally: - os.chdir(cwd) - self.assertEqual(srcdir, srcdir2) - - def customize_compiler(self): - # make sure AR gets caught - class compiler: - compiler_type = 'unix' - - def set_executables(self, **kw): - self.exes = kw - - sysconfig_vars = { - 'AR': 'sc_ar', - 'CC': 'sc_cc', - 'CXX': 'sc_cxx', - 'ARFLAGS': '--sc-arflags', - 'CFLAGS': '--sc-cflags', - 'CCSHARED': '--sc-ccshared', - 'LDSHARED': 'sc_ldshared', - 'SHLIB_SUFFIX': 'sc_shutil_suffix', - - # On macOS, disable _osx_support.customize_compiler() - 'CUSTOMIZED_OSX_COMPILER': 'True', - } - - comp = compiler() - with contextlib.ExitStack() as cm: - for key, value in sysconfig_vars.items(): - cm.enter_context(swap_item(sysconfig._config_vars, key, value)) - sysconfig.customize_compiler(comp) - - return comp - - @unittest.skipUnless(get_default_compiler() == 'unix', - 'not testing if default compiler is not unix') - def test_customize_compiler(self): - # Make sure that sysconfig._config_vars is initialized - sysconfig.get_config_vars() - - os.environ['AR'] = 'env_ar' - os.environ['CC'] = 'env_cc' - os.environ['CPP'] = 'env_cpp' - os.environ['CXX'] = 'env_cxx --env-cxx-flags' - os.environ['LDSHARED'] = 'env_ldshared' - os.environ['LDFLAGS'] = '--env-ldflags' - os.environ['ARFLAGS'] = '--env-arflags' - os.environ['CFLAGS'] = '--env-cflags' - os.environ['CPPFLAGS'] = '--env-cppflags' - - comp = self.customize_compiler() - self.assertEqual(comp.exes['archiver'], - 'env_ar --env-arflags') - self.assertEqual(comp.exes['preprocessor'], - 'env_cpp --env-cppflags') - self.assertEqual(comp.exes['compiler'], - 'env_cc --sc-cflags --env-cflags --env-cppflags') - self.assertEqual(comp.exes['compiler_so'], - ('env_cc --sc-cflags ' - '--env-cflags ''--env-cppflags --sc-ccshared')) - self.assertEqual(comp.exes['compiler_cxx'], - 'env_cxx --env-cxx-flags') - self.assertEqual(comp.exes['linker_exe'], - 'env_cc') - self.assertEqual(comp.exes['linker_so'], - ('env_ldshared --env-ldflags --env-cflags' - ' --env-cppflags')) - self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') - - del os.environ['AR'] - del os.environ['CC'] - del os.environ['CPP'] - del os.environ['CXX'] - del os.environ['LDSHARED'] - del os.environ['LDFLAGS'] - del os.environ['ARFLAGS'] - del os.environ['CFLAGS'] - del os.environ['CPPFLAGS'] - - comp = self.customize_compiler() - self.assertEqual(comp.exes['archiver'], - 'sc_ar --sc-arflags') - self.assertEqual(comp.exes['preprocessor'], - 'sc_cc -E') - self.assertEqual(comp.exes['compiler'], - 'sc_cc --sc-cflags') - self.assertEqual(comp.exes['compiler_so'], - 'sc_cc --sc-cflags --sc-ccshared') - self.assertEqual(comp.exes['compiler_cxx'], - 'sc_cxx') - self.assertEqual(comp.exes['linker_exe'], - 'sc_cc') - self.assertEqual(comp.exes['linker_so'], - 'sc_ldshared') - self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') - - def test_parse_makefile_base(self): - self.makefile = TESTFN - fd = open(self.makefile, 'w') - try: - fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'" '\n') - fd.write('VAR=$OTHER\nOTHER=foo') - finally: - fd.close() - d = sysconfig.parse_makefile(self.makefile) - self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", - 'OTHER': 'foo'}) - - def test_parse_makefile_literal_dollar(self): - self.makefile = TESTFN - fd = open(self.makefile, 'w') - try: - fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=\$$LIB'" '\n') - fd.write('VAR=$OTHER\nOTHER=foo') - finally: - fd.close() - d = sysconfig.parse_makefile(self.makefile) - self.assertEqual(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", - 'OTHER': 'foo'}) - - - def test_sysconfig_module(self): - import sysconfig as global_sysconfig - self.assertEqual(global_sysconfig.get_config_var('CFLAGS'), - sysconfig.get_config_var('CFLAGS')) - self.assertEqual(global_sysconfig.get_config_var('LDFLAGS'), - sysconfig.get_config_var('LDFLAGS')) - - @unittest.skipIf(sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'), - 'compiler flags customized') - def test_sysconfig_compiler_vars(self): - # On OS X, binary installers support extension module building on - # various levels of the operating system with differing Xcode - # configurations. This requires customization of some of the - # compiler configuration directives to suit the environment on - # the installed machine. Some of these customizations may require - # running external programs and, so, are deferred until needed by - # the first extension module build. With Python 3.3, only - # the Distutils version of sysconfig is used for extension module - # builds, which happens earlier in the Distutils tests. This may - # cause the following tests to fail since no tests have caused - # the global version of sysconfig to call the customization yet. - # The solution for now is to simply skip this test in this case. - # The longer-term solution is to only have one version of sysconfig. - - import sysconfig as global_sysconfig - if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'): - self.skipTest('compiler flags customized') - self.assertEqual(global_sysconfig.get_config_var('LDSHARED'), - sysconfig.get_config_var('LDSHARED')) - self.assertEqual(global_sysconfig.get_config_var('CC'), - sysconfig.get_config_var('CC')) - - @requires_subprocess() - def test_customize_compiler_before_get_config_vars(self): - # Issue #21923: test that a Distribution compiler - # instance can be called without an explicit call to - # get_config_vars(). - with open(TESTFN, 'w') as f: - f.writelines(textwrap.dedent('''\ - from distutils.core import Distribution - config = Distribution().get_command_obj('config') - # try_compile may pass or it may fail if no compiler - # is found but it should not raise an exception. - rc = config.try_compile('int x;') - ''')) - p = subprocess.Popen([str(sys.executable), TESTFN], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True) - outs, errs = p.communicate() - self.assertEqual(0, p.returncode, "Subprocess failed: " + outs) - - -def test_suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.TestLoader().loadTestsFromTestCase(SysconfigTestCase)) - return suite - - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_text_file.py b/Lib/distutils/tests/test_text_file.py deleted file mode 100644 index ebac3d52f9097c..00000000000000 --- a/Lib/distutils/tests/test_text_file.py +++ /dev/null @@ -1,107 +0,0 @@ -"""Tests for distutils.text_file.""" -import os -import unittest -from distutils.text_file import TextFile -from distutils.tests import support -from test.support import run_unittest - -TEST_DATA = """# test file - -line 3 \\ -# intervening comment - continues on next line -""" - -class TextFileTestCase(support.TempdirManager, unittest.TestCase): - - def test_class(self): - # old tests moved from text_file.__main__ - # so they are really called by the buildbots - - # result 1: no fancy options - result1 = ['# test file\n', '\n', 'line 3 \\\n', - '# intervening comment\n', - ' continues on next line\n'] - - # result 2: just strip comments - result2 = ["\n", - "line 3 \\\n", - " continues on next line\n"] - - # result 3: just strip blank lines - result3 = ["# test file\n", - "line 3 \\\n", - "# intervening comment\n", - " continues on next line\n"] - - # result 4: default, strip comments, blank lines, - # and trailing whitespace - result4 = ["line 3 \\", - " continues on next line"] - - # result 5: strip comments and blanks, plus join lines (but don't - # "collapse" joined lines - result5 = ["line 3 continues on next line"] - - # result 6: strip comments and blanks, plus join lines (and - # "collapse" joined lines - result6 = ["line 3 continues on next line"] - - def test_input(count, description, file, expected_result): - result = file.readlines() - self.assertEqual(result, expected_result) - - tmpdir = self.mkdtemp() - filename = os.path.join(tmpdir, "test.txt") - out_file = open(filename, "w") - try: - out_file.write(TEST_DATA) - finally: - out_file.close() - - in_file = TextFile(filename, strip_comments=0, skip_blanks=0, - lstrip_ws=0, rstrip_ws=0) - try: - test_input(1, "no processing", in_file, result1) - finally: - in_file.close() - - in_file = TextFile(filename, strip_comments=1, skip_blanks=0, - lstrip_ws=0, rstrip_ws=0) - try: - test_input(2, "strip comments", in_file, result2) - finally: - in_file.close() - - in_file = TextFile(filename, strip_comments=0, skip_blanks=1, - lstrip_ws=0, rstrip_ws=0) - try: - test_input(3, "strip blanks", in_file, result3) - finally: - in_file.close() - - in_file = TextFile(filename) - try: - test_input(4, "default processing", in_file, result4) - finally: - in_file.close() - - in_file = TextFile(filename, strip_comments=1, skip_blanks=1, - join_lines=1, rstrip_ws=1) - try: - test_input(5, "join lines without collapsing", in_file, result5) - finally: - in_file.close() - - in_file = TextFile(filename, strip_comments=1, skip_blanks=1, - join_lines=1, rstrip_ws=1, collapse_join=1) - try: - test_input(6, "join lines with collapsing", in_file, result6) - finally: - in_file.close() - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(TextFileTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_unixccompiler.py b/Lib/distutils/tests/test_unixccompiler.py deleted file mode 100644 index a3484d4f94cd92..00000000000000 --- a/Lib/distutils/tests/test_unixccompiler.py +++ /dev/null @@ -1,145 +0,0 @@ -"""Tests for distutils.unixccompiler.""" -import sys -import unittest -from test.support import run_unittest -from test.support.os_helper import EnvironmentVarGuard - -from distutils import sysconfig -from distutils.unixccompiler import UnixCCompiler - -class UnixCCompilerTestCase(unittest.TestCase): - - def setUp(self): - self._backup_platform = sys.platform - self._backup_get_config_var = sysconfig.get_config_var - self._backup_config_vars = dict(sysconfig._config_vars) - class CompilerWrapper(UnixCCompiler): - def rpath_foo(self): - return self.runtime_library_dir_option('/foo') - self.cc = CompilerWrapper() - - def tearDown(self): - sys.platform = self._backup_platform - sysconfig.get_config_var = self._backup_get_config_var - sysconfig._config_vars.clear() - sysconfig._config_vars.update(self._backup_config_vars) - - @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") - def test_runtime_libdir_option(self): - # Issue#5900 - # - # Ensure RUNPATH is added to extension modules with RPATH if - # GNU ld is used - - # darwin - sys.platform = 'darwin' - self.assertEqual(self.cc.rpath_foo(), '-L/foo') - - # hp-ux - sys.platform = 'hp-ux' - old_gcv = sysconfig.get_config_var - def gcv(v): - return 'xxx' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo']) - - def gcv(v): - return 'gcc' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) - - def gcv(v): - return 'g++' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) - - sysconfig.get_config_var = old_gcv - - # GCC GNULD - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'gcc' - elif v == 'GNULD': - return 'yes' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') - - # GCC non-GNULD - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'gcc' - elif v == 'GNULD': - return 'no' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo') - - # GCC GNULD with fully qualified configuration prefix - # see #7617 - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'x86_64-pc-linux-gnu-gcc-4.4.2' - elif v == 'GNULD': - return 'yes' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') - - # non-GCC GNULD - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'cc' - elif v == 'GNULD': - return 'yes' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-R/foo') - - # non-GCC non-GNULD - sys.platform = 'bar' - def gcv(v): - if v == 'CC': - return 'cc' - elif v == 'GNULD': - return 'no' - sysconfig.get_config_var = gcv - self.assertEqual(self.cc.rpath_foo(), '-R/foo') - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for OS X') - def test_osx_cc_overrides_ldshared(self): - # Issue #18080: - # ensure that setting CC env variable also changes default linker - def gcv(v): - if v == 'LDSHARED': - return 'gcc-4.2 -bundle -undefined dynamic_lookup ' - return 'gcc-4.2' - sysconfig.get_config_var = gcv - with EnvironmentVarGuard() as env: - env['CC'] = 'my_cc' - del env['LDSHARED'] - sysconfig.customize_compiler(self.cc) - self.assertEqual(self.cc.linker_so[0], 'my_cc') - - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for OS X') - def test_osx_explicit_ldshared(self): - # Issue #18080: - # ensure that setting CC env variable does not change - # explicit LDSHARED setting for linker - def gcv(v): - if v == 'LDSHARED': - return 'gcc-4.2 -bundle -undefined dynamic_lookup ' - return 'gcc-4.2' - sysconfig.get_config_var = gcv - with EnvironmentVarGuard() as env: - env['CC'] = 'my_cc' - env['LDSHARED'] = 'my_ld -bundle -dynamic' - sysconfig.customize_compiler(self.cc) - self.assertEqual(self.cc.linker_so[0], 'my_ld') - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(UnixCCompilerTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py deleted file mode 100644 index d6797414883060..00000000000000 --- a/Lib/distutils/tests/test_upload.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Tests for distutils.command.upload.""" -import os -import unittest -import unittest.mock as mock -from urllib.error import HTTPError - -from test.support import run_unittest - -from distutils.command import upload as upload_mod -from distutils.command.upload import upload -from distutils.core import Distribution -from distutils.errors import DistutilsError -from distutils.log import ERROR, INFO - -from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase - -PYPIRC_LONG_PASSWORD = """\ -[distutils] - -index-servers = - server1 - server2 - -[server1] -username:me -password:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa - -[server2] -username:meagain -password: secret -realm:acme -repository:http://another.pypi/ -""" - - -PYPIRC_NOPASSWORD = """\ -[distutils] - -index-servers = - server1 - -[server1] -username:me -""" - -class FakeOpen(object): - - def __init__(self, url, msg=None, code=None): - self.url = url - if not isinstance(url, str): - self.req = url - else: - self.req = None - self.msg = msg or 'OK' - self.code = code or 200 - - def getheader(self, name, default=None): - return { - 'content-type': 'text/plain; charset=utf-8', - }.get(name.lower(), default) - - def read(self): - return b'xyzzy' - - def getcode(self): - return self.code - - -class uploadTestCase(BasePyPIRCCommandTestCase): - - def setUp(self): - super(uploadTestCase, self).setUp() - self.old_open = upload_mod.urlopen - upload_mod.urlopen = self._urlopen - self.last_open = None - self.next_msg = None - self.next_code = None - - def tearDown(self): - upload_mod.urlopen = self.old_open - super(uploadTestCase, self).tearDown() - - def _urlopen(self, url): - self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code) - return self.last_open - - def test_finalize_options(self): - - # new format - self.write_file(self.rc, PYPIRC) - dist = Distribution() - cmd = upload(dist) - cmd.finalize_options() - for attr, waited in (('username', 'me'), ('password', 'secret'), - ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/')): - self.assertEqual(getattr(cmd, attr), waited) - - def test_saved_password(self): - # file with no password - self.write_file(self.rc, PYPIRC_NOPASSWORD) - - # make sure it passes - dist = Distribution() - cmd = upload(dist) - cmd.finalize_options() - self.assertEqual(cmd.password, None) - - # make sure we get it as well, if another command - # initialized it at the dist level - dist.password = 'xxx' - cmd = upload(dist) - cmd.finalize_options() - self.assertEqual(cmd.password, 'xxx') - - def test_upload(self): - tmp = self.mkdtemp() - path = os.path.join(tmp, 'xxx') - self.write_file(path) - command, pyversion, filename = 'xxx', '2.6', path - dist_files = [(command, pyversion, filename)] - self.write_file(self.rc, PYPIRC_LONG_PASSWORD) - - # lets run it - pkg_dir, dist = self.create_dist(dist_files=dist_files) - cmd = upload(dist) - cmd.show_response = 1 - cmd.ensure_finalized() - cmd.run() - - # what did we send ? - headers = dict(self.last_open.req.headers) - self.assertGreaterEqual(int(headers['Content-length']), 2162) - content_type = headers['Content-type'] - self.assertTrue(content_type.startswith('multipart/form-data')) - self.assertEqual(self.last_open.req.get_method(), 'POST') - expected_url = 'https://upload.pypi.org/legacy/' - self.assertEqual(self.last_open.req.get_full_url(), expected_url) - data = self.last_open.req.data - self.assertIn(b'xxx',data) - self.assertIn(b'protocol_version', data) - self.assertIn(b'sha256_digest', data) - self.assertIn( - b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf' - b'6860', - data - ) - if b'md5_digest' in data: - self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data) - if b'blake2_256_digest' in data: - self.assertIn( - b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be' - b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc' - b'ce443f1534330a', - data - ) - - # The PyPI response body was echoed - results = self.get_logs(INFO) - self.assertEqual(results[-1], 75 * '-' + '\nxyzzy\n' + 75 * '-') - - # bpo-32304: archives whose last byte was b'\r' were corrupted due to - # normalization intended for Mac OS 9. - def test_upload_correct_cr(self): - # content that ends with \r should not be modified. - tmp = self.mkdtemp() - path = os.path.join(tmp, 'xxx') - self.write_file(path, content='yy\r') - command, pyversion, filename = 'xxx', '2.6', path - dist_files = [(command, pyversion, filename)] - self.write_file(self.rc, PYPIRC_LONG_PASSWORD) - - # other fields that ended with \r used to be modified, now are - # preserved. - pkg_dir, dist = self.create_dist( - dist_files=dist_files, - description='long description\r' - ) - cmd = upload(dist) - cmd.show_response = 1 - cmd.ensure_finalized() - cmd.run() - - headers = dict(self.last_open.req.headers) - self.assertGreaterEqual(int(headers['Content-length']), 2172) - self.assertIn(b'long description\r', self.last_open.req.data) - - def test_upload_fails(self): - self.next_msg = "Not Found" - self.next_code = 404 - self.assertRaises(DistutilsError, self.test_upload) - - def test_wrong_exception_order(self): - tmp = self.mkdtemp() - path = os.path.join(tmp, 'xxx') - self.write_file(path) - dist_files = [('xxx', '2.6', path)] # command, pyversion, filename - self.write_file(self.rc, PYPIRC_LONG_PASSWORD) - - pkg_dir, dist = self.create_dist(dist_files=dist_files) - tests = [ - (OSError('oserror'), 'oserror', OSError), - (HTTPError('url', 400, 'httperror', {}, None), - 'Upload failed (400): httperror', DistutilsError), - ] - for exception, expected, raised_exception in tests: - with self.subTest(exception=type(exception).__name__): - with mock.patch('distutils.command.upload.urlopen', - new=mock.Mock(side_effect=exception)): - with self.assertRaises(raised_exception): - cmd = upload(dist) - cmd.ensure_finalized() - cmd.run() - results = self.get_logs(ERROR) - self.assertIn(expected, results[-1]) - self.clear_logs() - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(uploadTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_util.py b/Lib/distutils/tests/test_util.py deleted file mode 100644 index f9c223f06ec68d..00000000000000 --- a/Lib/distutils/tests/test_util.py +++ /dev/null @@ -1,313 +0,0 @@ -"""Tests for distutils.util.""" -import os -import sys -import unittest -from copy import copy -from test.support import run_unittest -from unittest import mock - -from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError -from distutils.util import (get_platform, convert_path, change_root, - check_environ, split_quoted, strtobool, - rfc822_escape, byte_compile, - grok_environment_error) -from distutils import util # used to patch _environ_checked -from distutils.sysconfig import get_config_vars -from distutils import sysconfig -from distutils.tests import support -import _osx_support - -class UtilTestCase(support.EnvironGuard, unittest.TestCase): - - def setUp(self): - super(UtilTestCase, self).setUp() - # saving the environment - self.name = os.name - self.platform = sys.platform - self.version = sys.version - self.sep = os.sep - self.join = os.path.join - self.isabs = os.path.isabs - self.splitdrive = os.path.splitdrive - self._config_vars = copy(sysconfig._config_vars) - - # patching os.uname - if hasattr(os, 'uname'): - self.uname = os.uname - self._uname = os.uname() - else: - self.uname = None - self._uname = None - - os.uname = self._get_uname - - def tearDown(self): - # getting back the environment - os.name = self.name - sys.platform = self.platform - sys.version = self.version - os.sep = self.sep - os.path.join = self.join - os.path.isabs = self.isabs - os.path.splitdrive = self.splitdrive - if self.uname is not None: - os.uname = self.uname - else: - del os.uname - sysconfig._config_vars.clear() - sysconfig._config_vars.update(self._config_vars) - super(UtilTestCase, self).tearDown() - - def _set_uname(self, uname): - self._uname = uname - - def _get_uname(self): - return self._uname - - def test_get_platform(self): - - # windows XP, 32bits - os.name = 'nt' - sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' - '[MSC v.1310 32 bit (Intel)]') - sys.platform = 'win32' - self.assertEqual(get_platform(), 'win32') - - # windows XP, amd64 - os.name = 'nt' - sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' - '[MSC v.1310 32 bit (Amd64)]') - sys.platform = 'win32' - self.assertEqual(get_platform(), 'win-amd64') - - # macbook - os.name = 'posix' - sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) ' - '\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]') - sys.platform = 'darwin' - self._set_uname(('Darwin', 'macziade', '8.11.1', - ('Darwin Kernel Version 8.11.1: ' - 'Wed Oct 10 18:23:28 PDT 2007; ' - 'root:xnu-792.25.20~1/RELEASE_I386'), 'i386')) - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' - - get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' - '-fwrapv -O3 -Wall -Wstrict-prototypes') - - cursize = sys.maxsize - sys.maxsize = (2 ** 31)-1 - try: - self.assertEqual(get_platform(), 'macosx-10.3-i386') - finally: - sys.maxsize = cursize - - # macbook with fat binaries (fat, universal or fat64) - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4' - get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - - self.assertEqual(get_platform(), 'macosx-10.4-fat') - - _osx_support._remove_original_values(get_config_vars()) - os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.1' - self.assertEqual(get_platform(), 'macosx-10.4-fat') - - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - - self.assertEqual(get_platform(), 'macosx-10.4-intel') - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - self.assertEqual(get_platform(), 'macosx-10.4-fat3') - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - self.assertEqual(get_platform(), 'macosx-10.4-universal') - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - - self.assertEqual(get_platform(), 'macosx-10.4-fat64') - - for arch in ('ppc', 'i386', 'x86_64', 'ppc64'): - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch %s -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3'%(arch,)) - - self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,)) - - - # linux debian sarge - os.name = 'posix' - sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) ' - '\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]') - sys.platform = 'linux2' - self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7', - '#1 Mon Apr 30 17:25:38 CEST 2007', 'i686')) - - self.assertEqual(get_platform(), 'linux-i686') - - # XXX more platforms to tests here - - def test_convert_path(self): - # linux/mac - os.sep = '/' - def _join(path): - return '/'.join(path) - os.path.join = _join - - self.assertEqual(convert_path('/home/to/my/stuff'), - '/home/to/my/stuff') - - # win - os.sep = '\\' - def _join(*path): - return '\\'.join(path) - os.path.join = _join - - self.assertRaises(ValueError, convert_path, '/home/to/my/stuff') - self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/') - - self.assertEqual(convert_path('home/to/my/stuff'), - 'home\\to\\my\\stuff') - self.assertEqual(convert_path('.'), - os.curdir) - - def test_change_root(self): - # linux/mac - os.name = 'posix' - def _isabs(path): - return path[0] == '/' - os.path.isabs = _isabs - def _join(*path): - return '/'.join(path) - os.path.join = _join - - self.assertEqual(change_root('/root', '/old/its/here'), - '/root/old/its/here') - self.assertEqual(change_root('/root', 'its/here'), - '/root/its/here') - - # windows - os.name = 'nt' - def _isabs(path): - return path.startswith('c:\\') - os.path.isabs = _isabs - def _splitdrive(path): - if path.startswith('c:'): - return ('', path.replace('c:', '')) - return ('', path) - os.path.splitdrive = _splitdrive - def _join(*path): - return '\\'.join(path) - os.path.join = _join - - self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'), - 'c:\\root\\old\\its\\here') - self.assertEqual(change_root('c:\\root', 'its\\here'), - 'c:\\root\\its\\here') - - # BugsBunny os (it's a great os) - os.name = 'BugsBunny' - self.assertRaises(DistutilsPlatformError, - change_root, 'c:\\root', 'its\\here') - - # XXX platforms to be covered: mac - - def test_check_environ(self): - util._environ_checked = 0 - os.environ.pop('HOME', None) - - check_environ() - - self.assertEqual(os.environ['PLAT'], get_platform()) - self.assertEqual(util._environ_checked, 1) - - @unittest.skipUnless(os.name == 'posix', 'specific to posix') - def test_check_environ_getpwuid(self): - util._environ_checked = 0 - os.environ.pop('HOME', None) - - try: - import pwd - except ImportError: - raise unittest.SkipTest("Test requires pwd module.") - - # only set pw_dir field, other fields are not used - result = pwd.struct_passwd((None, None, None, None, None, - '/home/distutils', None)) - with mock.patch.object(pwd, 'getpwuid', return_value=result): - check_environ() - self.assertEqual(os.environ['HOME'], '/home/distutils') - - util._environ_checked = 0 - os.environ.pop('HOME', None) - - # bpo-10496: Catch pwd.getpwuid() error - with mock.patch.object(pwd, 'getpwuid', side_effect=KeyError): - check_environ() - self.assertNotIn('HOME', os.environ) - - def test_split_quoted(self): - self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'), - ['one', 'two', 'three', 'four']) - - def test_strtobool(self): - yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1') - no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N') - - for y in yes: - self.assertTrue(strtobool(y)) - - for n in no: - self.assertFalse(strtobool(n)) - - def test_rfc822_escape(self): - header = 'I am a\npoor\nlonesome\nheader\n' - res = rfc822_escape(header) - wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s' - 'header%(8s)s') % {'8s': '\n'+8*' '} - self.assertEqual(res, wanted) - - def test_dont_write_bytecode(self): - # makes sure byte_compile raise a DistutilsError - # if sys.dont_write_bytecode is True - old_dont_write_bytecode = sys.dont_write_bytecode - sys.dont_write_bytecode = True - try: - self.assertRaises(DistutilsByteCompileError, byte_compile, []) - finally: - sys.dont_write_bytecode = old_dont_write_bytecode - - def test_grok_environment_error(self): - # test obsolete function to ensure backward compat (#4931) - exc = IOError("Unable to find batch file") - msg = grok_environment_error(exc) - self.assertEqual(msg, "error: Unable to find batch file") - - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(UtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_version.py b/Lib/distutils/tests/test_version.py deleted file mode 100644 index 1563e0227b60dd..00000000000000 --- a/Lib/distutils/tests/test_version.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Tests for distutils.version.""" -import unittest -from distutils.version import LooseVersion -from distutils.version import StrictVersion -from test.support import run_unittest - -class VersionTestCase(unittest.TestCase): - - def test_prerelease(self): - version = StrictVersion('1.2.3a1') - self.assertEqual(version.version, (1, 2, 3)) - self.assertEqual(version.prerelease, ('a', 1)) - self.assertEqual(str(version), '1.2.3a1') - - version = StrictVersion('1.2.0') - self.assertEqual(str(version), '1.2') - - def test_cmp_strict(self): - versions = (('1.5.1', '1.5.2b2', -1), - ('161', '3.10a', ValueError), - ('8.02', '8.02', 0), - ('3.4j', '1996.07.12', ValueError), - ('3.2.pl0', '3.1.1.6', ValueError), - ('2g6', '11g', ValueError), - ('0.9', '2.2', -1), - ('1.2.1', '1.2', 1), - ('1.1', '1.2.2', -1), - ('1.2', '1.1', 1), - ('1.2.1', '1.2.2', -1), - ('1.2.2', '1.2', 1), - ('1.2', '1.2.2', -1), - ('0.4.0', '0.4', 0), - ('1.13++', '5.5.kw', ValueError)) - - for v1, v2, wanted in versions: - try: - res = StrictVersion(v1)._cmp(StrictVersion(v2)) - except ValueError: - if wanted is ValueError: - continue - else: - raise AssertionError(("cmp(%s, %s) " - "shouldn't raise ValueError") - % (v1, v2)) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) - res = StrictVersion(v1)._cmp(v2) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) - res = StrictVersion(v1)._cmp(object()) - self.assertIs(res, NotImplemented, - 'cmp(%s, %s) should be NotImplemented, got %s' % - (v1, v2, res)) - - - def test_cmp(self): - versions = (('1.5.1', '1.5.2b2', -1), - ('161', '3.10a', 1), - ('8.02', '8.02', 0), - ('3.4j', '1996.07.12', -1), - ('3.2.pl0', '3.1.1.6', 1), - ('2g6', '11g', -1), - ('0.960923', '2.2beta29', -1), - ('1.13++', '5.5.kw', -1)) - - - for v1, v2, wanted in versions: - res = LooseVersion(v1)._cmp(LooseVersion(v2)) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) - res = LooseVersion(v1)._cmp(v2) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) - res = LooseVersion(v1)._cmp(object()) - self.assertIs(res, NotImplemented, - 'cmp(%s, %s) should be NotImplemented, got %s' % - (v1, v2, res)) - -def test_suite(): - return unittest.TestLoader().loadTestsFromTestCase(VersionTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Lib/distutils/tests/test_versionpredicate.py b/Lib/distutils/tests/test_versionpredicate.py deleted file mode 100644 index 28ae09dc2058da..00000000000000 --- a/Lib/distutils/tests/test_versionpredicate.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests harness for distutils.versionpredicate. - -""" - -import distutils.versionpredicate -import doctest -from test.support import run_unittest - -def test_suite(): - return doctest.DocTestSuite(distutils.versionpredicate) - -if __name__ == '__main__': - run_unittest(test_suite()) diff --git a/Lib/distutils/text_file.py b/Lib/distutils/text_file.py deleted file mode 100644 index 93abad38f434d7..00000000000000 --- a/Lib/distutils/text_file.py +++ /dev/null @@ -1,286 +0,0 @@ -"""text_file - -provides the TextFile class, which gives an interface to text files -that (optionally) takes care of stripping comments, ignoring blank -lines, and joining lines with backslashes.""" - -import sys, io - - -class TextFile: - """Provides a file-like object that takes care of all the things you - commonly want to do when processing a text file that has some - line-by-line syntax: strip comments (as long as "#" is your - comment character), skip blank lines, join adjacent lines by - escaping the newline (ie. backslash at end of line), strip - leading and/or trailing whitespace. All of these are optional - and independently controllable. - - Provides a 'warn()' method so you can generate warning messages that - report physical line number, even if the logical line in question - spans multiple physical lines. Also provides 'unreadline()' for - implementing line-at-a-time lookahead. - - Constructor is called as: - - TextFile (filename=None, file=None, **options) - - It bombs (RuntimeError) if both 'filename' and 'file' are None; - 'filename' should be a string, and 'file' a file object (or - something that provides 'readline()' and 'close()' methods). It is - recommended that you supply at least 'filename', so that TextFile - can include it in warning messages. If 'file' is not supplied, - TextFile creates its own using 'io.open()'. - - The options are all boolean, and affect the value returned by - 'readline()': - strip_comments [default: true] - strip from "#" to end-of-line, as well as any whitespace - leading up to the "#" -- unless it is escaped by a backslash - lstrip_ws [default: false] - strip leading whitespace from each line before returning it - rstrip_ws [default: true] - strip trailing whitespace (including line terminator!) from - each line before returning it - skip_blanks [default: true} - skip lines that are empty *after* stripping comments and - whitespace. (If both lstrip_ws and rstrip_ws are false, - then some lines may consist of solely whitespace: these will - *not* be skipped, even if 'skip_blanks' is true.) - join_lines [default: false] - if a backslash is the last non-newline character on a line - after stripping comments and whitespace, join the following line - to it to form one "logical line"; if N consecutive lines end - with a backslash, then N+1 physical lines will be joined to - form one logical line. - collapse_join [default: false] - strip leading whitespace from lines that are joined to their - predecessor; only matters if (join_lines and not lstrip_ws) - errors [default: 'strict'] - error handler used to decode the file content - - Note that since 'rstrip_ws' can strip the trailing newline, the - semantics of 'readline()' must differ from those of the builtin file - object's 'readline()' method! In particular, 'readline()' returns - None for end-of-file: an empty string might just be a blank line (or - an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is - not.""" - - default_options = { 'strip_comments': 1, - 'skip_blanks': 1, - 'lstrip_ws': 0, - 'rstrip_ws': 1, - 'join_lines': 0, - 'collapse_join': 0, - 'errors': 'strict', - } - - def __init__(self, filename=None, file=None, **options): - """Construct a new TextFile object. At least one of 'filename' - (a string) and 'file' (a file-like object) must be supplied. - They keyword argument options are described above and affect - the values returned by 'readline()'.""" - if filename is None and file is None: - raise RuntimeError("you must supply either or both of 'filename' and 'file'") - - # set values for all options -- either from client option hash - # or fallback to default_options - for opt in self.default_options.keys(): - if opt in options: - setattr(self, opt, options[opt]) - else: - setattr(self, opt, self.default_options[opt]) - - # sanity check client option hash - for opt in options.keys(): - if opt not in self.default_options: - raise KeyError("invalid TextFile option '%s'" % opt) - - if file is None: - self.open(filename) - else: - self.filename = filename - self.file = file - self.current_line = 0 # assuming that file is at BOF! - - # 'linebuf' is a stack of lines that will be emptied before we - # actually read from the file; it's only populated by an - # 'unreadline()' operation - self.linebuf = [] - - def open(self, filename): - """Open a new file named 'filename'. This overrides both the - 'filename' and 'file' arguments to the constructor.""" - self.filename = filename - self.file = io.open(self.filename, 'r', errors=self.errors) - self.current_line = 0 - - def close(self): - """Close the current file and forget everything we know about it - (filename, current line number).""" - file = self.file - self.file = None - self.filename = None - self.current_line = None - file.close() - - def gen_error(self, msg, line=None): - outmsg = [] - if line is None: - line = self.current_line - outmsg.append(self.filename + ", ") - if isinstance(line, (list, tuple)): - outmsg.append("lines %d-%d: " % tuple(line)) - else: - outmsg.append("line %d: " % line) - outmsg.append(str(msg)) - return "".join(outmsg) - - def error(self, msg, line=None): - raise ValueError("error: " + self.gen_error(msg, line)) - - def warn(self, msg, line=None): - """Print (to stderr) a warning message tied to the current logical - line in the current file. If the current logical line in the - file spans multiple physical lines, the warning refers to the - whole range, eg. "lines 3-5". If 'line' supplied, it overrides - the current line number; it may be a list or tuple to indicate a - range of physical lines, or an integer for a single physical - line.""" - sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n") - - def readline(self): - """Read and return a single logical line from the current file (or - from an internal buffer if lines have previously been "unread" - with 'unreadline()'). If the 'join_lines' option is true, this - may involve reading multiple physical lines concatenated into a - single string. Updates the current line number, so calling - 'warn()' after 'readline()' emits a warning about the physical - line(s) just read. Returns None on end-of-file, since the empty - string can occur if 'rstrip_ws' is true but 'strip_blanks' is - not.""" - # If any "unread" lines waiting in 'linebuf', return the top - # one. (We don't actually buffer read-ahead data -- lines only - # get put in 'linebuf' if the client explicitly does an - # 'unreadline()'. - if self.linebuf: - line = self.linebuf[-1] - del self.linebuf[-1] - return line - - buildup_line = '' - - while True: - # read the line, make it None if EOF - line = self.file.readline() - if line == '': - line = None - - if self.strip_comments and line: - - # Look for the first "#" in the line. If none, never - # mind. If we find one and it's the first character, or - # is not preceded by "\", then it starts a comment -- - # strip the comment, strip whitespace before it, and - # carry on. Otherwise, it's just an escaped "#", so - # unescape it (and any other escaped "#"'s that might be - # lurking in there) and otherwise leave the line alone. - - pos = line.find("#") - if pos == -1: # no "#" -- no comments - pass - - # It's definitely a comment -- either "#" is the first - # character, or it's elsewhere and unescaped. - elif pos == 0 or line[pos-1] != "\\": - # Have to preserve the trailing newline, because it's - # the job of a later step (rstrip_ws) to remove it -- - # and if rstrip_ws is false, we'd better preserve it! - # (NB. this means that if the final line is all comment - # and has no trailing newline, we will think that it's - # EOF; I think that's OK.) - eol = (line[-1] == '\n') and '\n' or '' - line = line[0:pos] + eol - - # If all that's left is whitespace, then skip line - # *now*, before we try to join it to 'buildup_line' -- - # that way constructs like - # hello \\ - # # comment that should be ignored - # there - # result in "hello there". - if line.strip() == "": - continue - else: # it's an escaped "#" - line = line.replace("\\#", "#") - - # did previous line end with a backslash? then accumulate - if self.join_lines and buildup_line: - # oops: end of file - if line is None: - self.warn("continuation line immediately precedes " - "end-of-file") - return buildup_line - - if self.collapse_join: - line = line.lstrip() - line = buildup_line + line - - # careful: pay attention to line number when incrementing it - if isinstance(self.current_line, list): - self.current_line[1] = self.current_line[1] + 1 - else: - self.current_line = [self.current_line, - self.current_line + 1] - # just an ordinary line, read it as usual - else: - if line is None: # eof - return None - - # still have to be careful about incrementing the line number! - if isinstance(self.current_line, list): - self.current_line = self.current_line[1] + 1 - else: - self.current_line = self.current_line + 1 - - # strip whitespace however the client wants (leading and - # trailing, or one or the other, or neither) - if self.lstrip_ws and self.rstrip_ws: - line = line.strip() - elif self.lstrip_ws: - line = line.lstrip() - elif self.rstrip_ws: - line = line.rstrip() - - # blank line (whether we rstrip'ed or not)? skip to next line - # if appropriate - if (line == '' or line == '\n') and self.skip_blanks: - continue - - if self.join_lines: - if line[-1] == '\\': - buildup_line = line[:-1] - continue - - if line[-2:] == '\\\n': - buildup_line = line[0:-2] + '\n' - continue - - # well, I guess there's some actual content there: return it - return line - - def readlines(self): - """Read and return the list of all logical lines remaining in the - current file.""" - lines = [] - while True: - line = self.readline() - if line is None: - return lines - lines.append(line) - - def unreadline(self, line): - """Push 'line' (a string) onto an internal buffer that will be - checked by future 'readline()' calls. Handy for implementing - a parser with line-at-a-time lookahead.""" - self.linebuf.append(line) diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py deleted file mode 100644 index d00c48981eb6d6..00000000000000 --- a/Lib/distutils/unixccompiler.py +++ /dev/null @@ -1,329 +0,0 @@ -"""distutils.unixccompiler - -Contains the UnixCCompiler class, a subclass of CCompiler that handles -the "typical" Unix-style command-line C compiler: - * macros defined with -Dname[=value] - * macros undefined with -Uname - * include search directories specified with -Idir - * libraries specified with -lllib - * library search directories specified with -Ldir - * compile handled by 'cc' (or similar) executable with -c option: - compiles .c to .o - * link static library handled by 'ar' command (possibly with 'ranlib') - * link shared library handled by 'cc -shared' -""" - -import os, sys, re - -from distutils import sysconfig -from distutils.dep_util import newer -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options, gen_lib_options -from distutils.errors import \ - DistutilsExecError, CompileError, LibError, LinkError -from distutils import log - -if sys.platform == 'darwin': - import _osx_support - -# XXX Things not currently handled: -# * optimization/debug/warning flags; we just use whatever's in Python's -# Makefile and live with it. Is this adequate? If not, we might -# have to have a bunch of subclasses GNUCCompiler, SGICCompiler, -# SunCCompiler, and I suspect down that road lies madness. -# * even if we don't know a warning flag from an optimization flag, -# we need some way for outsiders to feed preprocessor/compiler/linker -# flags in to us -- eg. a sysadmin might want to mandate certain flags -# via a site config file, or a user might want to set something for -# compiling this module distribution only via the setup.py command -# line, whatever. As long as these options come from something on the -# current system, they can be as system-dependent as they like, and we -# should just happily stuff them into the preprocessor/compiler/linker -# options and carry on. - - -class UnixCCompiler(CCompiler): - - compiler_type = 'unix' - - # These are used by CCompiler in two places: the constructor sets - # instance attributes 'preprocessor', 'compiler', etc. from them, and - # 'set_executable()' allows any of these to be set. The defaults here - # are pretty generic; they will probably have to be set by an outsider - # (eg. using information discovered by the sysconfig about building - # Python extensions). - executables = {'preprocessor' : None, - 'compiler' : ["cc"], - 'compiler_so' : ["cc"], - 'compiler_cxx' : ["cc"], - 'linker_so' : ["cc", "-shared"], - 'linker_exe' : ["cc"], - 'archiver' : ["ar", "-cr"], - 'ranlib' : None, - } - - if sys.platform[:6] == "darwin": - executables['ranlib'] = ["ranlib"] - - # Needed for the filename generation methods provided by the base - # class, CCompiler. NB. whoever instantiates/uses a particular - # UnixCCompiler instance should set 'shared_lib_ext' -- we set a - # reasonable common default here, but it's not necessarily used on all - # Unices! - - src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"] - obj_extension = ".o" - static_lib_extension = ".a" - shared_lib_extension = ".so" - dylib_lib_extension = ".dylib" - xcode_stub_lib_extension = ".tbd" - static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s" - xcode_stub_lib_format = dylib_lib_format - if sys.platform == "cygwin": - exe_extension = ".exe" - - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): - fixed_args = self._fix_compile_args(None, macros, include_dirs) - ignore, macros, include_dirs = fixed_args - pp_opts = gen_preprocess_options(macros, include_dirs) - pp_args = self.preprocessor + pp_opts - if output_file: - pp_args.extend(['-o', output_file]) - if extra_preargs: - pp_args[:0] = extra_preargs - if extra_postargs: - pp_args.extend(extra_postargs) - pp_args.append(source) - - # We need to preprocess: either we're being forced to, or we're - # generating output to stdout, or there's a target output file and - # the source file is newer than the target (or the target doesn't - # exist). - if self.force or output_file is None or newer(source, output_file): - if output_file: - self.mkpath(os.path.dirname(output_file)) - try: - self.spawn(pp_args) - except DistutilsExecError as msg: - raise CompileError(msg) - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - compiler_so = self.compiler_so - if sys.platform == 'darwin': - compiler_so = _osx_support.compiler_fixup(compiler_so, - cc_args + extra_postargs) - try: - self.spawn(compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - def create_static_lib(self, objects, output_libname, - output_dir=None, debug=0, target_lang=None): - objects, output_dir = self._fix_object_args(objects, output_dir) - - output_filename = \ - self.library_filename(output_libname, output_dir=output_dir) - - if self._need_link(objects, output_filename): - self.mkpath(os.path.dirname(output_filename)) - self.spawn(self.archiver + - [output_filename] + - objects + self.objects) - - # Not many Unices required ranlib anymore -- SunOS 4.x is, I - # think the only major Unix that does. Maybe we need some - # platform intelligence here to skip ranlib if it's not - # needed -- or maybe Python's configure script took care of - # it for us, hence the check for leading colon. - if self.ranlib: - try: - self.spawn(self.ranlib + [output_filename]) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def link(self, target_desc, objects, - output_filename, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - libraries, library_dirs, runtime_library_dirs = fixed_args - - lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, - libraries) - if not isinstance(output_dir, (str, type(None))): - raise TypeError("'output_dir' must be a string or None") - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - ld_args = (objects + self.objects + - lib_opts + ['-o', output_filename]) - if debug: - ld_args[:0] = ['-g'] - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - self.mkpath(os.path.dirname(output_filename)) - try: - if target_desc == CCompiler.EXECUTABLE: - linker = self.linker_exe[:] - else: - linker = self.linker_so[:] - if target_lang == "c++" and self.compiler_cxx: - # skip over environment variable settings if /usr/bin/env - # is used to set up the linker's environment. - # This is needed on OSX. Note: this assumes that the - # normal and C++ compiler have the same environment - # settings. - i = 0 - if os.path.basename(linker[0]) == "env": - i = 1 - while '=' in linker[i]: - i += 1 - - if os.path.basename(linker[i]) == 'ld_so_aix': - # AIX platforms prefix the compiler with the ld_so_aix - # script, so we need to adjust our linker index - offset = 1 - else: - offset = 0 - - linker[i+offset] = self.compiler_cxx[i] - - if sys.platform == 'darwin': - linker = _osx_support.compiler_fixup(linker, ld_args) - - self.spawn(linker + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "-L" + dir - - def _is_gcc(self, compiler_name): - # clang uses same syntax for rpath as gcc - return any(name in compiler_name for name in ("gcc", "g++", "clang")) - - def runtime_library_dir_option(self, dir): - # XXX Hackish, at the very least. See Python bug #445902: - # http://sourceforge.net/tracker/index.php - # ?func=detail&aid=445902&group_id=5470&atid=105470 - # Linkers on different platforms need different options to - # specify that directories need to be added to the list of - # directories searched for dependencies when a dynamic library - # is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to - # be told to pass the -R option through to the linker, whereas - # other compilers and gcc on other systems just know this. - # Other compilers may need something slightly different. At - # this time, there's no way to determine this information from - # the configuration data stored in the Python installation, so - # we use this hack. - compiler = os.path.basename(sysconfig.get_config_var("CC")) - if sys.platform[:6] == "darwin": - # MacOSX's linker doesn't understand the -R flag at all - return "-L" + dir - elif sys.platform[:7] == "freebsd": - return "-Wl,-rpath=" + dir - elif sys.platform[:5] == "hp-ux": - if self._is_gcc(compiler): - return ["-Wl,+s", "-L" + dir] - return ["+s", "-L" + dir] - else: - if self._is_gcc(compiler): - # gcc on non-GNU systems does not need -Wl, but can - # use it anyway. Since distutils has always passed in - # -Wl whenever gcc was used in the past it is probably - # safest to keep doing so. - if sysconfig.get_config_var("GNULD") == "yes": - # GNU ld needs an extra option to get a RUNPATH - # instead of just an RPATH. - return "-Wl,--enable-new-dtags,-R" + dir - else: - return "-Wl,-R" + dir - else: - # No idea how --enable-new-dtags would be passed on to - # ld if this system was using GNU ld. Don't know if a - # system like this even exists. - return "-R" + dir - - def library_option(self, lib): - return "-l" + lib - - def find_library_file(self, dirs, lib, debug=0): - shared_f = self.library_filename(lib, lib_type='shared') - dylib_f = self.library_filename(lib, lib_type='dylib') - xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub') - static_f = self.library_filename(lib, lib_type='static') - - if sys.platform == 'darwin': - # On OSX users can specify an alternate SDK using - # '-isysroot', calculate the SDK root if it is specified - # (and use it further on) - # - # Note that, as of Xcode 7, Apple SDKs may contain textual stub - # libraries with .tbd extensions rather than the normal .dylib - # shared libraries installed in /. The Apple compiler tool - # chain handles this transparently but it can cause problems - # for programs that are being built with an SDK and searching - # for specific libraries. Callers of find_library_file need to - # keep in mind that the base filename of the returned SDK library - # file might have a different extension from that of the library - # file installed on the running system, for example: - # /Applications/Xcode.app/Contents/Developer/Platforms/ - # MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/ - # usr/lib/libedit.tbd - # vs - # /usr/lib/libedit.dylib - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s*(\S+)', cflags) - if m is None: - sysroot = _osx_support._default_sysroot(sysconfig.get_config_var('CC')) - else: - sysroot = m.group(1) - - - - for dir in dirs: - shared = os.path.join(dir, shared_f) - dylib = os.path.join(dir, dylib_f) - static = os.path.join(dir, static_f) - xcode_stub = os.path.join(dir, xcode_stub_f) - - if sys.platform == 'darwin' and ( - dir.startswith('/System/') or ( - dir.startswith('/usr/') and not dir.startswith('/usr/local/'))): - - shared = os.path.join(sysroot, dir[1:], shared_f) - dylib = os.path.join(sysroot, dir[1:], dylib_f) - static = os.path.join(sysroot, dir[1:], static_f) - xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f) - - # We're second-guessing the linker here, with not much hard - # data to go on: GCC seems to prefer the shared library, so I'm - # assuming that *all* Unix C compilers do. And of course I'm - # ignoring even GCC's "-static" option. So sue me. - if os.path.exists(dylib): - return dylib - elif os.path.exists(xcode_stub): - return xcode_stub - elif os.path.exists(shared): - return shared - elif os.path.exists(static): - return static - - # Oops, didn't find it in *any* of 'dirs' - return None diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py deleted file mode 100644 index 2ce5c5b64d62fa..00000000000000 --- a/Lib/distutils/util.py +++ /dev/null @@ -1,562 +0,0 @@ -"""distutils.util - -Miscellaneous utility functions -- anything that doesn't fit into -one of the other *util.py modules. -""" - -import os -import re -import importlib.util -import string -import sys -import distutils -from distutils.errors import DistutilsPlatformError -from distutils.dep_util import newer -from distutils.spawn import spawn -from distutils import log -from distutils.errors import DistutilsByteCompileError - -def get_host_platform(): - """Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. Typically includes the OS name and version and the - architecture (as supplied by 'os.uname()'), although the exact information - included depends on the OS; eg. on Linux, the kernel version isn't - particularly important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - - """ - if os.name == 'nt': - if 'amd64' in sys.version.lower(): - return 'win-amd64' - if '(arm)' in sys.version.lower(): - return 'win-arm32' - if '(arm64)' in sys.version.lower(): - return 'win-arm64' - return sys.platform - - # Set for cross builds explicitly - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - - if os.name != "posix" or not hasattr(os, 'uname'): - # XXX what about the architecture? NT is Intel or Alpha, - # Mac OS is M68k or PPC, etc. - return sys.platform - - # Try to distinguish various flavours of Unix - - (osname, host, release, version, machine) = os.uname() - - # Convert the OS name to lowercase, remove '/' characters, and translate - # spaces (for "Power Macintosh") - osname = osname.lower().replace('/', '') - machine = machine.replace(' ', '_') - machine = machine.replace('/', '-') - - if osname[:5] == "linux": - # At least on Linux/Intel, 'machine' is the processor -- - # i386, etc. - # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) - elif osname[:5] == "sunos": - if release[0] >= "5": # SunOS 5 == Solaris 2 - osname = "solaris" - release = "%d.%s" % (int(release[0]) - 3, release[2:]) - # We can't use "platform.architecture()[0]" because a - # bootstrap problem. We use a dict to get an error - # if some suspicious happens. - bitness = {2147483647:"32bit", 9223372036854775807:"64bit"} - machine += ".%s" % bitness[sys.maxsize] - # fall through to standard osname-release-machine representation - elif osname[:3] == "aix": - from _aix_support import aix_platform - return aix_platform() - elif osname[:6] == "cygwin": - osname = "cygwin" - rel_re = re.compile (r'[\d.]+', re.ASCII) - m = rel_re.match(release) - if m: - release = m.group() - elif osname[:6] == "darwin": - import _osx_support, distutils.sysconfig - osname, release, machine = _osx_support.get_platform_osx( - distutils.sysconfig.get_config_vars(), - osname, release, machine) - - return "%s-%s-%s" % (osname, release, machine) - -def get_platform(): - if os.name == 'nt': - TARGET_TO_PLAT = { - 'x86' : 'win32', - 'x64' : 'win-amd64', - 'arm' : 'win-arm32', - } - return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform() - else: - return get_host_platform() - -def convert_path (pathname): - """Return 'pathname' as a name that will work on the native filesystem, - i.e. split it on '/' and put it back together again using the current - directory separator. Needed because filenames in the setup script are - always supplied in Unix style, and have to be converted to the local - convention before we can actually use them in the filesystem. Raises - ValueError on non-Unix-ish systems if 'pathname' either starts or - ends with a slash. - """ - if os.sep == '/': - return pathname - if not pathname: - return pathname - if pathname[0] == '/': - raise ValueError("path '%s' cannot be absolute" % pathname) - if pathname[-1] == '/': - raise ValueError("path '%s' cannot end with '/'" % pathname) - - paths = pathname.split('/') - while '.' in paths: - paths.remove('.') - if not paths: - return os.curdir - return os.path.join(*paths) - -# convert_path () - - -def change_root (new_root, pathname): - """Return 'pathname' with 'new_root' prepended. If 'pathname' is - relative, this is equivalent to "os.path.join(new_root,pathname)". - Otherwise, it requires making 'pathname' relative and then joining the - two, which is tricky on DOS/Windows and Mac OS. - """ - if os.name == 'posix': - if not os.path.isabs(pathname): - return os.path.join(new_root, pathname) - else: - return os.path.join(new_root, pathname[1:]) - - elif os.name == 'nt': - (drive, path) = os.path.splitdrive(pathname) - if path[0] == '\\': - path = path[1:] - return os.path.join(new_root, path) - - else: - raise DistutilsPlatformError("nothing known about platform '%s'" % os.name) - - -_environ_checked = 0 -def check_environ (): - """Ensure that 'os.environ' has all the environment variables we - guarantee that users can use in config files, command-line options, - etc. Currently this includes: - HOME - user's home directory (Unix only) - PLAT - description of the current platform, including hardware - and OS (see 'get_platform()') - """ - global _environ_checked - if _environ_checked: - return - - if os.name == 'posix' and 'HOME' not in os.environ: - try: - import pwd - os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] - except (ImportError, KeyError): - # bpo-10496: if the current user identifier doesn't exist in the - # password database, do nothing - pass - - if 'PLAT' not in os.environ: - os.environ['PLAT'] = get_platform() - - _environ_checked = 1 - - -def subst_vars (s, local_vars): - """Perform shell/Perl-style variable substitution on 'string'. Every - occurrence of '$' followed by a name is considered a variable, and - variable is substituted by the value found in the 'local_vars' - dictionary, or in 'os.environ' if it's not in 'local_vars'. - 'os.environ' is first checked/augmented to guarantee that it contains - certain values: see 'check_environ()'. Raise ValueError for any - variables not found in either 'local_vars' or 'os.environ'. - """ - check_environ() - def _subst (match, local_vars=local_vars): - var_name = match.group(1) - if var_name in local_vars: - return str(local_vars[var_name]) - else: - return os.environ[var_name] - - try: - return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) - except KeyError as var: - raise ValueError("invalid variable '$%s'" % var) - -# subst_vars () - - -def grok_environment_error (exc, prefix="error: "): - # Function kept for backward compatibility. - # Used to try clever things with EnvironmentErrors, - # but nowadays str(exception) produces good messages. - return prefix + str(exc) - - -# Needed by 'split_quoted()' -_wordchars_re = _squote_re = _dquote_re = None -def _init_regex(): - global _wordchars_re, _squote_re, _dquote_re - _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) - _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") - _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') - -def split_quoted (s): - """Split a string up according to Unix shell-like rules for quotes and - backslashes. In short: words are delimited by spaces, as long as those - spaces are not escaped by a backslash, or inside a quoted string. - Single and double quotes are equivalent, and the quote characters can - be backslash-escaped. The backslash is stripped from any two-character - escape sequence, leaving only the escaped character. The quote - characters are stripped from any quoted string. Returns a list of - words. - """ - - # This is a nice algorithm for splitting up a single string, since it - # doesn't require character-by-character examination. It was a little - # bit of a brain-bender to get it working right, though... - if _wordchars_re is None: _init_regex() - - s = s.strip() - words = [] - pos = 0 - - while s: - m = _wordchars_re.match(s, pos) - end = m.end() - if end == len(s): - words.append(s[:end]) - break - - if s[end] in string.whitespace: # unescaped, unquoted whitespace: now - words.append(s[:end]) # we definitely have a word delimiter - s = s[end:].lstrip() - pos = 0 - - elif s[end] == '\\': # preserve whatever is being escaped; - # will become part of the current word - s = s[:end] + s[end+1:] - pos = end+1 - - else: - if s[end] == "'": # slurp singly-quoted string - m = _squote_re.match(s, end) - elif s[end] == '"': # slurp doubly-quoted string - m = _dquote_re.match(s, end) - else: - raise RuntimeError("this can't happen (bad char '%c')" % s[end]) - - if m is None: - raise ValueError("bad string (mismatched %s quotes?)" % s[end]) - - (beg, end) = m.span() - s = s[:beg] + s[beg+1:end-1] + s[end:] - pos = m.end() - 2 - - if pos >= len(s): - words.append(s) - break - - return words - -# split_quoted () - - -def execute (func, args, msg=None, verbose=0, dry_run=0): - """Perform some action that affects the outside world (eg. by - writing to the filesystem). Such actions are special because they - are disabled by the 'dry_run' flag. This method takes care of all - that bureaucracy for you; all you have to do is supply the - function to call and an argument tuple for it (to embody the - "external action" being performed), and an optional message to - print. - """ - if msg is None: - msg = "%s%r" % (func.__name__, args) - if msg[-2:] == ',)': # correct for singleton tuple - msg = msg[0:-2] + ')' - - log.info(msg) - if not dry_run: - func(*args) - - -def strtobool (val): - """Convert a string representation of truth to true (1) or false (0). - - True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values - are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if - 'val' is anything else. - """ - val = val.lower() - if val in ('y', 'yes', 't', 'true', 'on', '1'): - return 1 - elif val in ('n', 'no', 'f', 'false', 'off', '0'): - return 0 - else: - raise ValueError("invalid truth value %r" % (val,)) - - -def byte_compile (py_files, - optimize=0, force=0, - prefix=None, base_dir=None, - verbose=1, dry_run=0, - direct=None): - """Byte-compile a collection of Python source files to .pyc - files in a __pycache__ subdirectory. 'py_files' is a list - of files to compile; any files that don't end in ".py" are silently - skipped. 'optimize' must be one of the following: - 0 - don't optimize - 1 - normal optimization (like "python -O") - 2 - extra optimization (like "python -OO") - If 'force' is true, all files are recompiled regardless of - timestamps. - - The source filename encoded in each bytecode file defaults to the - filenames listed in 'py_files'; you can modify these with 'prefix' and - 'basedir'. 'prefix' is a string that will be stripped off of each - source filename, and 'base_dir' is a directory name that will be - prepended (after 'prefix' is stripped). You can supply either or both - (or neither) of 'prefix' and 'base_dir', as you wish. - - If 'dry_run' is true, doesn't actually do anything that would - affect the filesystem. - - Byte-compilation is either done directly in this interpreter process - with the standard py_compile module, or indirectly by writing a - temporary script and executing it. Normally, you should let - 'byte_compile()' figure out to use direct compilation or not (see - the source for details). The 'direct' flag is used by the script - generated in indirect mode; unless you know what you're doing, leave - it set to None. - """ - - # Late import to fix a bootstrap issue: _posixsubprocess is built by - # setup.py, but setup.py uses distutils. - import subprocess - - # nothing is done if sys.dont_write_bytecode is True - if sys.dont_write_bytecode: - raise DistutilsByteCompileError('byte-compiling is disabled.') - - # First, if the caller didn't force us into direct or indirect mode, - # figure out which mode we should be in. We take a conservative - # approach: choose direct mode *only* if the current interpreter is - # in debug mode and optimize is 0. If we're not in debug mode (-O - # or -OO), we don't know which level of optimization this - # interpreter is running with, so we can't do direct - # byte-compilation and be certain that it's the right thing. Thus, - # always compile indirectly if the current interpreter is in either - # optimize mode, or if either optimization level was requested by - # the caller. - if direct is None: - direct = (__debug__ and optimize == 0) - - # "Indirect" byte-compilation: write a temporary script and then - # run it with the appropriate flags. - if not direct: - try: - from tempfile import mkstemp - (script_fd, script_name) = mkstemp(".py") - except ImportError: - from tempfile import mktemp - (script_fd, script_name) = None, mktemp(".py") - log.info("writing byte-compilation script '%s'", script_name) - if not dry_run: - if script_fd is not None: - script = os.fdopen(script_fd, "w") - else: - script = open(script_name, "w") - - with script: - script.write("""\ -from distutils.util import byte_compile -files = [ -""") - - # XXX would be nice to write absolute filenames, just for - # safety's sake (script should be more robust in the face of - # chdir'ing before running it). But this requires abspath'ing - # 'prefix' as well, and that breaks the hack in build_lib's - # 'byte_compile()' method that carefully tacks on a trailing - # slash (os.sep really) to make sure the prefix here is "just - # right". This whole prefix business is rather delicate -- the - # problem is that it's really a directory, but I'm treating it - # as a dumb string, so trailing slashes and so forth matter. - - #py_files = map(os.path.abspath, py_files) - #if prefix: - # prefix = os.path.abspath(prefix) - - script.write(",\n".join(map(repr, py_files)) + "]\n") - script.write(""" -byte_compile(files, optimize=%r, force=%r, - prefix=%r, base_dir=%r, - verbose=%r, dry_run=0, - direct=1) -""" % (optimize, force, prefix, base_dir, verbose)) - - msg = distutils._DEPRECATION_MESSAGE - cmd = [sys.executable] - cmd.extend(subprocess._optim_args_from_interpreter_flags()) - cmd.append(f'-Wignore:{msg}:DeprecationWarning') - cmd.append(script_name) - spawn(cmd, dry_run=dry_run) - execute(os.remove, (script_name,), "removing %s" % script_name, - dry_run=dry_run) - - # "Direct" byte-compilation: use the py_compile module to compile - # right here, right now. Note that the script generated in indirect - # mode simply calls 'byte_compile()' in direct mode, a weird sort of - # cross-process recursion. Hey, it works! - else: - from py_compile import compile - - for file in py_files: - if file[-3:] != ".py": - # This lets us be lazy and not filter filenames in - # the "install_lib" command. - continue - - # Terminology from the py_compile module: - # cfile - byte-compiled file - # dfile - purported source filename (same as 'file' by default) - if optimize >= 0: - opt = '' if optimize == 0 else optimize - cfile = importlib.util.cache_from_source( - file, optimization=opt) - else: - cfile = importlib.util.cache_from_source(file) - dfile = file - if prefix: - if file[:len(prefix)] != prefix: - raise ValueError("invalid prefix: filename %r doesn't start with %r" - % (file, prefix)) - dfile = dfile[len(prefix):] - if base_dir: - dfile = os.path.join(base_dir, dfile) - - cfile_base = os.path.basename(cfile) - if direct: - if force or newer(file, cfile): - log.info("byte-compiling %s to %s", file, cfile_base) - if not dry_run: - compile(file, cfile, dfile) - else: - log.debug("skipping byte-compilation of %s to %s", - file, cfile_base) - -# byte_compile () - -def rfc822_escape (header): - """Return a version of the string escaped for inclusion in an - RFC-822 header, by ensuring there are 8 spaces space after each newline. - """ - lines = header.split('\n') - sep = '\n' + 8 * ' ' - return sep.join(lines) - -# 2to3 support - -def run_2to3(files, fixer_names=None, options=None, explicit=None): - """Invoke 2to3 on a list of Python files. - The files should all come from the build area, as the - modification is done in-place. To reduce the build time, - only files modified since the last invocation of this - function should be passed in the files argument.""" - - if not files: - return - - # Make this class local, to delay import of 2to3 - from lib2to3.refactor import RefactoringTool, get_fixers_from_package - class DistutilsRefactoringTool(RefactoringTool): - def log_error(self, msg, *args, **kw): - log.error(msg, *args) - - def log_message(self, msg, *args): - log.info(msg, *args) - - def log_debug(self, msg, *args): - log.debug(msg, *args) - - if fixer_names is None: - fixer_names = get_fixers_from_package('lib2to3.fixes') - r = DistutilsRefactoringTool(fixer_names, options=options) - r.refactor(files, write=True) - -def copydir_run_2to3(src, dest, template=None, fixer_names=None, - options=None, explicit=None): - """Recursively copy a directory, only copying new and changed files, - running run_2to3 over all newly copied Python modules afterward. - - If you give a template string, it's parsed like a MANIFEST.in. - """ - from distutils.dir_util import mkpath - from distutils.file_util import copy_file - from distutils.filelist import FileList - filelist = FileList() - curdir = os.getcwd() - os.chdir(src) - try: - filelist.findall() - finally: - os.chdir(curdir) - filelist.files[:] = filelist.allfiles - if template: - for line in template.splitlines(): - line = line.strip() - if not line: continue - filelist.process_template_line(line) - copied = [] - for filename in filelist.files: - outname = os.path.join(dest, filename) - mkpath(os.path.dirname(outname)) - res = copy_file(os.path.join(src, filename), outname, update=1) - if res[1]: copied.append(outname) - run_2to3([fn for fn in copied if fn.lower().endswith('.py')], - fixer_names=fixer_names, options=options, explicit=explicit) - return copied - -class Mixin2to3: - '''Mixin class for commands that run 2to3. - To configure 2to3, setup scripts may either change - the class variables, or inherit from individual commands - to override how 2to3 is invoked.''' - - # provide list of fixers to run; - # defaults to all from lib2to3.fixers - fixer_names = None - - # options dictionary - options = None - - # list of fixers to invoke even though they are marked as explicit - explicit = None - - def run_2to3(self, files): - return run_2to3(files, self.fixer_names, self.options, self.explicit) diff --git a/Lib/distutils/version.py b/Lib/distutils/version.py deleted file mode 100644 index c33bebaed26aee..00000000000000 --- a/Lib/distutils/version.py +++ /dev/null @@ -1,347 +0,0 @@ -# -# distutils/version.py -# -# Implements multiple version numbering conventions for the -# Python Module Distribution Utilities. -# -# $Id$ -# - -"""Provides classes to represent module version numbers (one class for -each style of version numbering). There are currently two such classes -implemented: StrictVersion and LooseVersion. - -Every version number class implements the following interface: - * the 'parse' method takes a string and parses it to some internal - representation; if the string is an invalid version number, - 'parse' raises a ValueError exception - * the class constructor takes an optional string argument which, - if supplied, is passed to 'parse' - * __str__ reconstructs the string that was passed to 'parse' (or - an equivalent string -- ie. one that will generate an equivalent - version number instance) - * __repr__ generates Python code to recreate the version number instance - * _cmp compares the current instance with either another instance - of the same class or a string (which will be parsed to an instance - of the same class, thus must follow the same rules) -""" - -import re - -class Version: - """Abstract base class for version numbering classes. Just provides - constructor (__init__) and reproducer (__repr__), because those - seem to be the same for all version numbering classes; and route - rich comparisons to _cmp. - """ - - def __init__ (self, vstring=None): - if vstring: - self.parse(vstring) - - def __repr__ (self): - return "%s ('%s')" % (self.__class__.__name__, str(self)) - - def __eq__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c == 0 - - def __lt__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c < 0 - - def __le__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c <= 0 - - def __gt__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c > 0 - - def __ge__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c >= 0 - - -# Interface for version-number classes -- must be implemented -# by the following classes (the concrete ones -- Version should -# be treated as an abstract class). -# __init__ (string) - create and take same action as 'parse' -# (string parameter is optional) -# parse (string) - convert a string representation to whatever -# internal representation is appropriate for -# this style of version numbering -# __str__ (self) - convert back to a string; should be very similar -# (if not identical to) the string supplied to parse -# __repr__ (self) - generate Python code to recreate -# the instance -# _cmp (self, other) - compare two version numbers ('other' may -# be an unparsed version string, or another -# instance of your version class) - - -class StrictVersion (Version): - - """Version numbering for anal retentives and software idealists. - Implements the standard interface for version number classes as - described above. A version number consists of two or three - dot-separated numeric components, with an optional "pre-release" tag - on the end. The pre-release tag consists of the letter 'a' or 'b' - followed by a number. If the numeric components of two version - numbers are equal, then one with a pre-release tag will always - be deemed earlier (lesser) than one without. - - The following are valid version numbers (shown in the order that - would be obtained by sorting according to the supplied cmp function): - - 0.4 0.4.0 (these two are equivalent) - 0.4.1 - 0.5a1 - 0.5b3 - 0.5 - 0.9.6 - 1.0 - 1.0.4a3 - 1.0.4b1 - 1.0.4 - - The following are examples of invalid version numbers: - - 1 - 2.7.2.2 - 1.3.a4 - 1.3pl1 - 1.3c4 - - The rationale for this version numbering system will be explained - in the distutils documentation. - """ - - version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', - re.VERBOSE | re.ASCII) - - - def parse (self, vstring): - match = self.version_re.match(vstring) - if not match: - raise ValueError("invalid version number '%s'" % vstring) - - (major, minor, patch, prerelease, prerelease_num) = \ - match.group(1, 2, 4, 5, 6) - - if patch: - self.version = tuple(map(int, [major, minor, patch])) - else: - self.version = tuple(map(int, [major, minor])) + (0,) - - if prerelease: - self.prerelease = (prerelease[0], int(prerelease_num)) - else: - self.prerelease = None - - - def __str__ (self): - - if self.version[2] == 0: - vstring = '.'.join(map(str, self.version[0:2])) - else: - vstring = '.'.join(map(str, self.version)) - - if self.prerelease: - vstring = vstring + self.prerelease[0] + str(self.prerelease[1]) - - return vstring - - - def _cmp (self, other): - if isinstance(other, str): - other = StrictVersion(other) - elif not isinstance(other, StrictVersion): - return NotImplemented - - if self.version != other.version: - # numeric versions don't match - # prerelease stuff doesn't matter - if self.version < other.version: - return -1 - else: - return 1 - - # have to compare prerelease - # case 1: neither has prerelease; they're equal - # case 2: self has prerelease, other doesn't; other is greater - # case 3: self doesn't have prerelease, other does: self is greater - # case 4: both have prerelease: must compare them! - - if (not self.prerelease and not other.prerelease): - return 0 - elif (self.prerelease and not other.prerelease): - return -1 - elif (not self.prerelease and other.prerelease): - return 1 - elif (self.prerelease and other.prerelease): - if self.prerelease == other.prerelease: - return 0 - elif self.prerelease < other.prerelease: - return -1 - else: - return 1 - else: - assert False, "never get here" - -# end class StrictVersion - - -# The rules according to Greg Stein: -# 1) a version number has 1 or more numbers separated by a period or by -# sequences of letters. If only periods, then these are compared -# left-to-right to determine an ordering. -# 2) sequences of letters are part of the tuple for comparison and are -# compared lexicographically -# 3) recognize the numeric components may have leading zeroes -# -# The LooseVersion class below implements these rules: a version number -# string is split up into a tuple of integer and string components, and -# comparison is a simple tuple comparison. This means that version -# numbers behave in a predictable and obvious way, but a way that might -# not necessarily be how people *want* version numbers to behave. There -# wouldn't be a problem if people could stick to purely numeric version -# numbers: just split on period and compare the numbers as tuples. -# However, people insist on putting letters into their version numbers; -# the most common purpose seems to be: -# - indicating a "pre-release" version -# ('alpha', 'beta', 'a', 'b', 'pre', 'p') -# - indicating a post-release patch ('p', 'pl', 'patch') -# but of course this can't cover all version number schemes, and there's -# no way to know what a programmer means without asking him. -# -# The problem is what to do with letters (and other non-numeric -# characters) in a version number. The current implementation does the -# obvious and predictable thing: keep them as strings and compare -# lexically within a tuple comparison. This has the desired effect if -# an appended letter sequence implies something "post-release": -# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002". -# -# However, if letters in a version number imply a pre-release version, -# the "obvious" thing isn't correct. Eg. you would expect that -# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison -# implemented here, this just isn't so. -# -# Two possible solutions come to mind. The first is to tie the -# comparison algorithm to a particular set of semantic rules, as has -# been done in the StrictVersion class above. This works great as long -# as everyone can go along with bondage and discipline. Hopefully a -# (large) subset of Python module programmers will agree that the -# particular flavour of bondage and discipline provided by StrictVersion -# provides enough benefit to be worth using, and will submit their -# version numbering scheme to its domination. The free-thinking -# anarchists in the lot will never give in, though, and something needs -# to be done to accommodate them. -# -# Perhaps a "moderately strict" version class could be implemented that -# lets almost anything slide (syntactically), and makes some heuristic -# assumptions about non-digits in version number strings. This could -# sink into special-case-hell, though; if I was as talented and -# idiosyncratic as Larry Wall, I'd go ahead and implement a class that -# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is -# just as happy dealing with things like "2g6" and "1.13++". I don't -# think I'm smart enough to do it right though. -# -# In any case, I've coded the test suite for this module (see -# ../test/test_version.py) specifically to fail on things like comparing -# "1.2a2" and "1.2". That's not because the *code* is doing anything -# wrong, it's because the simple, obvious design doesn't match my -# complicated, hairy expectations for real-world version numbers. It -# would be a snap to fix the test suite to say, "Yep, LooseVersion does -# the Right Thing" (ie. the code matches the conception). But I'd rather -# have a conception that matches common notions about version numbers. - -class LooseVersion (Version): - - """Version numbering for anarchists and software realists. - Implements the standard interface for version number classes as - described above. A version number consists of a series of numbers, - separated by either periods or strings of letters. When comparing - version numbers, the numeric components will be compared - numerically, and the alphabetic components lexically. The following - are all valid version numbers, in no particular order: - - 1.5.1 - 1.5.2b2 - 161 - 3.10a - 8.02 - 3.4j - 1996.07.12 - 3.2.pl0 - 3.1.1.6 - 2g6 - 11g - 0.960923 - 2.2beta29 - 1.13++ - 5.5.kw - 2.0b1pl0 - - In fact, there is no such thing as an invalid version number under - this scheme; the rules for comparison are simple and predictable, - but may not always give the results you want (for some definition - of "want"). - """ - - component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) - - def __init__ (self, vstring=None): - if vstring: - self.parse(vstring) - - - def parse (self, vstring): - # I've given up on thinking I can reconstruct the version string - # from the parsed tuple -- so I just store the string here for - # use by __str__ - self.vstring = vstring - components = [x for x in self.component_re.split(vstring) - if x and x != '.'] - for i, obj in enumerate(components): - try: - components[i] = int(obj) - except ValueError: - pass - - self.version = components - - - def __str__ (self): - return self.vstring - - - def __repr__ (self): - return "LooseVersion ('%s')" % str(self) - - - def _cmp (self, other): - if isinstance(other, str): - other = LooseVersion(other) - elif not isinstance(other, LooseVersion): - return NotImplemented - - if self.version == other.version: - return 0 - if self.version < other.version: - return -1 - if self.version > other.version: - return 1 - - -# end class LooseVersion diff --git a/Lib/distutils/versionpredicate.py b/Lib/distutils/versionpredicate.py deleted file mode 100644 index 062c98f2489951..00000000000000 --- a/Lib/distutils/versionpredicate.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Module for parsing and testing package version predicate strings. -""" -import re -import distutils.version -import operator - - -re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", - re.ASCII) -# (package) (rest) - -re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses -re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$") -# (comp) (version) - - -def splitUp(pred): - """Parse a single version comparison. - - Return (comparison string, StrictVersion) - """ - res = re_splitComparison.match(pred) - if not res: - raise ValueError("bad package restriction syntax: %r" % pred) - comp, verStr = res.groups() - return (comp, distutils.version.StrictVersion(verStr)) - -compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq, - ">": operator.gt, ">=": operator.ge, "!=": operator.ne} - -class VersionPredicate: - """Parse and test package version predicates. - - >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)') - - The `name` attribute provides the full dotted name that is given:: - - >>> v.name - 'pyepat.abc' - - The str() of a `VersionPredicate` provides a normalized - human-readable version of the expression:: - - >>> print(v) - pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3) - - The `satisfied_by()` method can be used to determine with a given - version number is included in the set described by the version - restrictions:: - - >>> v.satisfied_by('1.1') - True - >>> v.satisfied_by('1.4') - True - >>> v.satisfied_by('1.0') - False - >>> v.satisfied_by('4444.4') - False - >>> v.satisfied_by('1555.1b3') - False - - `VersionPredicate` is flexible in accepting extra whitespace:: - - >>> v = VersionPredicate(' pat( == 0.1 ) ') - >>> v.name - 'pat' - >>> v.satisfied_by('0.1') - True - >>> v.satisfied_by('0.2') - False - - If any version numbers passed in do not conform to the - restrictions of `StrictVersion`, a `ValueError` is raised:: - - >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)') - Traceback (most recent call last): - ... - ValueError: invalid version number '1.2zb3' - - It the module or package name given does not conform to what's - allowed as a legal module or package name, `ValueError` is - raised:: - - >>> v = VersionPredicate('foo-bar') - Traceback (most recent call last): - ... - ValueError: expected parenthesized list: '-bar' - - >>> v = VersionPredicate('foo bar (12.21)') - Traceback (most recent call last): - ... - ValueError: expected parenthesized list: 'bar (12.21)' - - """ - - def __init__(self, versionPredicateStr): - """Parse a version predicate string. - """ - # Fields: - # name: package name - # pred: list of (comparison string, StrictVersion) - - versionPredicateStr = versionPredicateStr.strip() - if not versionPredicateStr: - raise ValueError("empty package restriction") - match = re_validPackage.match(versionPredicateStr) - if not match: - raise ValueError("bad package name in %r" % versionPredicateStr) - self.name, paren = match.groups() - paren = paren.strip() - if paren: - match = re_paren.match(paren) - if not match: - raise ValueError("expected parenthesized list: %r" % paren) - str = match.groups()[0] - self.pred = [splitUp(aPred) for aPred in str.split(",")] - if not self.pred: - raise ValueError("empty parenthesized list in %r" - % versionPredicateStr) - else: - self.pred = [] - - def __str__(self): - if self.pred: - seq = [cond + " " + str(ver) for cond, ver in self.pred] - return self.name + " (" + ", ".join(seq) + ")" - else: - return self.name - - def satisfied_by(self, version): - """True if version is compatible with all the predicates in self. - The parameter version must be acceptable to the StrictVersion - constructor. It may be either a string or StrictVersion. - """ - for cond, ver in self.pred: - if not compmap[cond](version, ver): - return False - return True - - -_provision_rx = None - -def split_provision(value): - """Return the name and optional version number of a provision. - - The version number, if given, will be returned as a `StrictVersion` - instance, otherwise it will be `None`. - - >>> split_provision('mypkg') - ('mypkg', None) - >>> split_provision(' mypkg( 1.2 ) ') - ('mypkg', StrictVersion ('1.2')) - """ - global _provision_rx - if _provision_rx is None: - _provision_rx = re.compile( - r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", - re.ASCII) - value = value.strip() - m = _provision_rx.match(value) - if not m: - raise ValueError("illegal provides specification: %r" % value) - ver = m.group(2) or None - if ver: - ver = distutils.version.StrictVersion(ver) - return m.group(1), ver diff --git a/Lib/email/generator.py b/Lib/email/generator.py index c9b121624e08d5..885e6ba98540a7 100644 --- a/Lib/email/generator.py +++ b/Lib/email/generator.py @@ -170,7 +170,7 @@ def _write(self, msg): # parameter. # # The way we do this, so as to make the _handle_*() methods simpler, - # is to cache any subpart writes into a buffer. The we write the + # is to cache any subpart writes into a buffer. Then we write the # headers and the buffer contents. That way, subpart handlers can # Do The Right Thing, and can still modify the Content-Type: header if # necessary. diff --git a/Lib/encodings/idna.py b/Lib/encodings/idna.py index ea4058512fe366..5396047a7fb0b8 100644 --- a/Lib/encodings/idna.py +++ b/Lib/encodings/idna.py @@ -39,23 +39,21 @@ def nameprep(label): # Check bidi RandAL = [stringprep.in_table_d1(x) for x in label] - for c in RandAL: - if c: - # There is a RandAL char in the string. Must perform further - # tests: - # 1) The characters in section 5.8 MUST be prohibited. - # This is table C.8, which was already checked - # 2) If a string contains any RandALCat character, the string - # MUST NOT contain any LCat character. - if any(stringprep.in_table_d2(x) for x in label): - raise UnicodeError("Violation of BIDI requirement 2") - - # 3) If a string contains any RandALCat character, a - # RandALCat character MUST be the first character of the - # string, and a RandALCat character MUST be the last - # character of the string. - if not RandAL[0] or not RandAL[-1]: - raise UnicodeError("Violation of BIDI requirement 3") + if any(RandAL): + # There is a RandAL char in the string. Must perform further + # tests: + # 1) The characters in section 5.8 MUST be prohibited. + # This is table C.8, which was already checked + # 2) If a string contains any RandALCat character, the string + # MUST NOT contain any LCat character. + if any(stringprep.in_table_d2(x) for x in label): + raise UnicodeError("Violation of BIDI requirement 2") + # 3) If a string contains any RandALCat character, a + # RandALCat character MUST be the first character of the + # string, and a RandALCat character MUST be the last + # character of the string. + if not RandAL[0] or not RandAL[-1]: + raise UnicodeError("Violation of BIDI requirement 3") return label @@ -103,6 +101,16 @@ def ToASCII(label): raise UnicodeError("label empty or too long") def ToUnicode(label): + if len(label) > 1024: + # Protection from https://github.com/python/cpython/issues/98433. + # https://datatracker.ietf.org/doc/html/rfc5894#section-6 + # doesn't specify a label size limit prior to NAMEPREP. But having + # one makes practical sense. + # This leaves ample room for nameprep() to remove Nothing characters + # per https://www.rfc-editor.org/rfc/rfc3454#section-3.1 while still + # preventing us from wasting time decoding a big thing that'll just + # hit the actual <= 63 length limit in Step 6. + raise UnicodeError("label way too long") # Step 1: Check for ASCII if isinstance(label, bytes): pure_ascii = True diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 33c91801b0aa85..1a2f57c07ba341 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -10,8 +10,8 @@ __all__ = ["version", "bootstrap"] _PACKAGE_NAMES = ('setuptools', 'pip') -_SETUPTOOLS_VERSION = "63.2.0" -_PIP_VERSION = "22.2.2" +_SETUPTOOLS_VERSION = "65.5.0" +_PIP_VERSION = "22.3.1" _PROJECTS = [ ("setuptools", _SETUPTOOLS_VERSION, "py3"), ("pip", _PIP_VERSION, "py3"), diff --git a/Lib/ensurepip/_bundled/pip-22.2.2-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-22.3.1-py3-none-any.whl similarity index 62% rename from Lib/ensurepip/_bundled/pip-22.2.2-py3-none-any.whl rename to Lib/ensurepip/_bundled/pip-22.3.1-py3-none-any.whl index 03099718b0bcf4..c5b7753e757df2 100644 Binary files a/Lib/ensurepip/_bundled/pip-22.2.2-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-22.3.1-py3-none-any.whl differ diff --git a/Lib/ensurepip/_bundled/setuptools-63.2.0-py3-none-any.whl b/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl similarity index 61% rename from Lib/ensurepip/_bundled/setuptools-63.2.0-py3-none-any.whl rename to Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl index e3b5446eb59cb5..123a13e2c6b254 100644 Binary files a/Lib/ensurepip/_bundled/setuptools-63.2.0-py3-none-any.whl and b/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl differ diff --git a/Lib/enum.py b/Lib/enum.py index a66c344dbc3db2..1b683c702d59b4 100644 --- a/Lib/enum.py +++ b/Lib/enum.py @@ -114,9 +114,12 @@ def _break_on_call_reduce(self, proto): setattr(obj, '__module__', '') def _iter_bits_lsb(num): - # num must be an integer + # num must be a positive integer + original = num if isinstance(num, Enum): num = num.value + if num < 0: + raise ValueError('%r is not a positive integer' % original) while num: b = num & (~num + 1) yield b @@ -171,7 +174,8 @@ class auto: """ Instances are replaced with an appropriate value in Enum class suites. """ - value = _auto_null + def __init__(self, value=_auto_null): + self.value = value def __repr__(self): return "auto(%r)" % self.value @@ -416,7 +420,7 @@ def __setitem__(self, key, value): value = value.value elif _is_descriptor(value): pass - # TODO: uncomment next three lines in 3.12 + # TODO: uncomment next three lines in 3.13 # elif _is_internal_class(self._cls_name, value): # # do nothing, name will be a normal attribute # pass @@ -427,15 +431,31 @@ def __setitem__(self, key, value): elif isinstance(value, member): # unwrap value here -- it will become a member value = value.value + non_auto_store = True + single = False if isinstance(value, auto): - if value.value == _auto_null: - value.value = self._generate_next_value( - key, 1, len(self._member_names), self._last_values[:], - ) - self._auto_called = True - value = value.value + single = True + value = (value, ) + if isinstance(value, tuple): + auto_valued = [] + for v in value: + if isinstance(v, auto): + non_auto_store = False + if v.value == _auto_null: + v.value = self._generate_next_value( + key, 1, len(self._member_names), self._last_values[:], + ) + self._auto_called = True + v = v.value + self._last_values.append(v) + auto_valued.append(v) + if single: + value = auto_valued[0] + else: + value = tuple(auto_valued) self._member_names[key] = None - self._last_values.append(value) + if non_auto_store: + self._last_values.append(value) super().__setitem__(key, value) def update(self, members, **more_members): @@ -672,7 +692,7 @@ def __bool__(cls): """ return True - def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None): + def __call__(cls, value, names=None, *values, module=None, qualname=None, type=None, start=1, boundary=None): """ Either returns an existing member, or creates a new enum class. @@ -680,6 +700,8 @@ def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, s to an enumeration member (i.e. Color(3)) and for the functional API (i.e. Color = Enum('Color', names='RED GREEN BLUE')). + The value lookup branch is chosen if the enum is final. + When used for the functional API: `value` will be the name of the new class. @@ -697,12 +719,15 @@ def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, s `type`, if set, will be mixed in as the first base class. """ - if names is None: # simple value lookup + if cls._member_map_: + # simple value lookup if members exist + if names: + value = (value, names) + values return cls.__new__(cls, value) # otherwise, functional API: we're creating a new Enum type return cls._create_( - value, - names, + class_name=value, + names=names, module=module, qualname=qualname, type=type, @@ -1822,6 +1847,9 @@ def __call__(self, enumeration): if name in member_names: # not an alias continue + if alias.value < 0: + # negative numbers are not checked + continue values = list(_iter_bits_lsb(alias.value)) missed = [v for v in values if v not in member_values] if missed: diff --git a/Lib/ftplib.py b/Lib/ftplib.py index dc9a8afbd8d247..c7ca8f632e1bd4 100644 --- a/Lib/ftplib.py +++ b/Lib/ftplib.py @@ -713,28 +713,12 @@ class FTP_TLS(FTP): '221 Goodbye.' >>> ''' - ssl_version = ssl.PROTOCOL_TLS_CLIENT def __init__(self, host='', user='', passwd='', acct='', - keyfile=None, certfile=None, context=None, - timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *, - encoding='utf-8'): - if context is not None and keyfile is not None: - raise ValueError("context and keyfile arguments are mutually " - "exclusive") - if context is not None and certfile is not None: - raise ValueError("context and certfile arguments are mutually " - "exclusive") - if keyfile is not None or certfile is not None: - import warnings - warnings.warn("keyfile and certfile are deprecated, use a " - "custom context instead", DeprecationWarning, 2) - self.keyfile = keyfile - self.certfile = certfile + *, context=None, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None, encoding='utf-8'): if context is None: - context = ssl._create_stdlib_context(self.ssl_version, - certfile=certfile, - keyfile=keyfile) + context = ssl._create_stdlib_context() self.context = context self._prot_p = False super().__init__(host, user, passwd, acct, @@ -749,7 +733,7 @@ def auth(self): '''Set up secure control connection by using TLS/SSL.''' if isinstance(self.sock, ssl.SSLSocket): raise ValueError("Already using TLS") - if self.ssl_version >= ssl.PROTOCOL_TLS: + if self.context.protocol >= ssl.PROTOCOL_TLS: resp = self.voidcmd('AUTH TLS') else: resp = self.voidcmd('AUTH SSL') diff --git a/Lib/gzip.py b/Lib/gzip.py index 8edcda4493c894..75c6ddc3f2cffb 100644 --- a/Lib/gzip.py +++ b/Lib/gzip.py @@ -21,6 +21,8 @@ _COMPRESS_LEVEL_TRADEOFF = 6 _COMPRESS_LEVEL_BEST = 9 +READ_BUFFER_SIZE = 128 * 1024 + def open(filename, mode="rb", compresslevel=_COMPRESS_LEVEL_BEST, encoding=None, errors=None, newline=None): @@ -446,7 +448,7 @@ def _read_gzip_header(fp): class _GzipReader(_compression.DecompressReader): def __init__(self, fp): - super().__init__(_PaddedFile(fp), zlib.decompressobj, + super().__init__(_PaddedFile(fp), zlib._ZlibDecompressor, wbits=-zlib.MAX_WBITS) # Set flag indicating start of a new member self._new_member = True @@ -494,12 +496,13 @@ def read(self, size=-1): self._new_member = False # Read a chunk of data from the file - buf = self._fp.read(io.DEFAULT_BUFFER_SIZE) + if self._decompressor.needs_input: + buf = self._fp.read(READ_BUFFER_SIZE) + uncompress = self._decompressor.decompress(buf, size) + else: + uncompress = self._decompressor.decompress(b"", size) - uncompress = self._decompressor.decompress(buf, size) - if self._decompressor.unconsumed_tail != b"": - self._fp.prepend(self._decompressor.unconsumed_tail) - elif self._decompressor.unused_data != b"": + if self._decompressor.unused_data != b"": # Prepend the already read bytes to the fileobj so they can # be seen by _read_eof() and _read_gzip_header() self._fp.prepend(self._decompressor.unused_data) @@ -510,14 +513,11 @@ def read(self, size=-1): raise EOFError("Compressed file ended before the " "end-of-stream marker was reached") - self._add_read_data( uncompress ) + self._crc = zlib.crc32(uncompress, self._crc) + self._stream_size += len(uncompress) self._pos += len(uncompress) return uncompress - def _add_read_data(self, data): - self._crc = zlib.crc32(data, self._crc) - self._stream_size = self._stream_size + len(data) - def _read_eof(self): # We've read to the end of the file # We check that the computed CRC and size of the @@ -647,7 +647,7 @@ def main(): f = builtins.open(arg, "rb") g = open(arg + ".gz", "wb") while True: - chunk = f.read(io.DEFAULT_BUFFER_SIZE) + chunk = f.read(READ_BUFFER_SIZE) if not chunk: break g.write(chunk) diff --git a/Lib/html/entities.py b/Lib/html/entities.py index cc59bc314499ad..eb6dc12190586d 100644 --- a/Lib/html/entities.py +++ b/Lib/html/entities.py @@ -261,7 +261,7 @@ # HTML5 named character references -# Generated by 'Tools/scripts/parse_html5_entities.py' +# Generated by Tools/build/parse_html5_entities.py # from https://html.spec.whatwg.org/entities.json and # https://html.spec.whatwg.org/multipage/named-characters.html. # Map HTML5 named character references to the equivalent Unicode character(s). diff --git a/Lib/http/client.py b/Lib/http/client.py index 0720990f84e7ed..0a3e950c669622 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -1414,33 +1414,14 @@ class HTTPSConnection(HTTPConnection): default_port = HTTPS_PORT - # XXX Should key_file and cert_file be deprecated in favour of context? - - def __init__(self, host, port=None, key_file=None, cert_file=None, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, *, context=None, - check_hostname=None, blocksize=8192): + def __init__(self, host, port=None, + *, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, context=None, blocksize=8192): super(HTTPSConnection, self).__init__(host, port, timeout, source_address, blocksize=blocksize) - if (key_file is not None or cert_file is not None or - check_hostname is not None): - import warnings - warnings.warn("key_file, cert_file and check_hostname are " - "deprecated, use a custom context instead.", - DeprecationWarning, 2) - self.key_file = key_file - self.cert_file = cert_file if context is None: context = _create_https_context(self._http_vsn) - if check_hostname is not None: - context.check_hostname = check_hostname - if key_file or cert_file: - context.load_cert_chain(cert_file, key_file) - # cert and key file means the user wants to authenticate. - # enable TLS 1.3 PHA implicitly even for custom contexts. - if context.post_handshake_auth is not None: - context.post_handshake_auth = True self._context = context def connect(self): diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 7fa7facf8cf780..e64e96f75e6cfc 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -4,6 +4,11 @@ Released on 2022-10-03 ========================= +gh-97527: Fix a bug in the previous bugfix that caused IDLE to not +start when run with 3.10.8, 3.12.0a1, and at least Microsoft Python +3.10.2288.0 installed without the Lib/test package. 3.11.0 was never +affected. + gh-65802: Document handling of extensions in Save As dialogs. gh-95191: Include prompts when saving Shell (interactive input/output). diff --git a/Lib/idlelib/idle_test/test_run.py b/Lib/idlelib/idle_test/test_run.py index ec4637c5ca617a..a38e43dcb9d1c4 100644 --- a/Lib/idlelib/idle_test/test_run.py +++ b/Lib/idlelib/idle_test/test_run.py @@ -39,7 +39,8 @@ def __eq__(self, other): data = (('1/0', ZeroDivisionError, "division by zero\n"), ('abc', NameError, "name 'abc' is not defined. " - "Did you mean: 'abs'?\n"), + "Did you mean: 'abs'? " + "Or did you forget to import 'abc'?\n"), ('int.reel', AttributeError, "type object 'int' has no attribute 'reel'. " "Did you mean: 'real'?\n"), diff --git a/Lib/idlelib/macosx.py b/Lib/idlelib/macosx.py index 12399f152aea2a..2ea02ec04d661a 100644 --- a/Lib/idlelib/macosx.py +++ b/Lib/idlelib/macosx.py @@ -4,7 +4,6 @@ from os.path import expanduser import plistlib from sys import platform # Used in _init_tk_type, changed by test. -from test.support import requires, ResourceDenied import tkinter @@ -16,27 +15,38 @@ def _init_tk_type(): """ Initialize _tk_type for isXyzTk functions. + + This function is only called once, when _tk_type is still None. """ global _tk_type if platform == 'darwin': - try: - requires('gui') - except ResourceDenied: # Possible when testing. - _tk_type = "cocoa" # Newest and most common. - else: - root = tkinter.Tk() - ws = root.tk.call('tk', 'windowingsystem') - if 'x11' in ws: - _tk_type = "xquartz" - elif 'aqua' not in ws: - _tk_type = "other" - elif 'AppKit' in root.tk.call('winfo', 'server', '.'): + + # When running IDLE, GUI is present, test/* may not be. + # When running tests, test/* is present, GUI may not be. + # If not, guess most common. Does not matter for testing. + from idlelib.__init__ import testing + if testing: + from test.support import requires, ResourceDenied + try: + requires('gui') + except ResourceDenied: _tk_type = "cocoa" - else: - _tk_type = "carbon" - root.destroy() + return + + root = tkinter.Tk() + ws = root.tk.call('tk', 'windowingsystem') + if 'x11' in ws: + _tk_type = "xquartz" + elif 'aqua' not in ws: + _tk_type = "other" + elif 'AppKit' in root.tk.call('winfo', 'server', '.'): + _tk_type = "cocoa" + else: + _tk_type = "carbon" + root.destroy() else: _tk_type = "other" + return def isAquaTk(): """ @@ -164,9 +174,8 @@ def overrideRootMenu(root, flist): del mainmenu.menudefs[-3][1][0:2] menubar = Menu(root) root.configure(menu=menubar) - menudict = {} - menudict['window'] = menu = Menu(menubar, name='window', tearoff=0) + menu = Menu(menubar, name='window', tearoff=0) menubar.add_cascade(label='Window', menu=menu, underline=0) def postwindowsmenu(menu=menu): @@ -216,8 +225,7 @@ def help_dialog(event=None): if isCarbonTk(): # for Carbon AquaTk, replace the default Tk apple menu - menudict['application'] = menu = Menu(menubar, name='apple', - tearoff=0) + menu = Menu(menubar, name='apple', tearoff=0) menubar.add_cascade(label='IDLE', menu=menu) mainmenu.menudefs.insert(0, ('application', [ diff --git a/Lib/imaplib.py b/Lib/imaplib.py index fa4c0f8f62361a..577b4b9b03a88d 100644 --- a/Lib/imaplib.py +++ b/Lib/imaplib.py @@ -1285,16 +1285,12 @@ class IMAP4_SSL(IMAP4): """IMAP4 client class over SSL connection - Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile[, ssl_context[, timeout=None]]]]]]) + Instantiate with: IMAP4_SSL([host[, port[, ssl_context[, timeout=None]]]]) host - host's name (default: localhost); port - port number (default: standard IMAP4 SSL port); - keyfile - PEM formatted file that contains your private key (default: None); - certfile - PEM formatted certificate chain file (default: None); ssl_context - a SSLContext object that contains your certificate chain and private key (default: None) - Note: if ssl_context is provided, then parameters keyfile or - certfile should not be set otherwise ValueError is raised. timeout - socket timeout (default: None) If timeout is not given or is None, the global default socket timeout is used @@ -1302,23 +1298,10 @@ class IMAP4_SSL(IMAP4): """ - def __init__(self, host='', port=IMAP4_SSL_PORT, keyfile=None, - certfile=None, ssl_context=None, timeout=None): - if ssl_context is not None and keyfile is not None: - raise ValueError("ssl_context and keyfile arguments are mutually " - "exclusive") - if ssl_context is not None and certfile is not None: - raise ValueError("ssl_context and certfile arguments are mutually " - "exclusive") - if keyfile is not None or certfile is not None: - import warnings - warnings.warn("keyfile and certfile are deprecated, use a " - "custom ssl_context instead", DeprecationWarning, 2) - self.keyfile = keyfile - self.certfile = certfile + def __init__(self, host='', port=IMAP4_SSL_PORT, + *, ssl_context=None, timeout=None): if ssl_context is None: - ssl_context = ssl._create_stdlib_context(certfile=certfile, - keyfile=keyfile) + ssl_context = ssl._create_stdlib_context() self.ssl_context = ssl_context IMAP4.__init__(self, host, port, timeout) diff --git a/Lib/imghdr.py b/Lib/imghdr.py index 6a372e66c7f261..33868883470764 100644 --- a/Lib/imghdr.py +++ b/Lib/imghdr.py @@ -14,6 +14,7 @@ #-------------------------# def what(file, h=None): + """Return the type of image contained in a file or byte stream.""" f = None try: if h is None: @@ -40,7 +41,7 @@ def what(file, h=None): tests = [] def test_jpeg(h, f): - """JPEG data with JFIF or Exif markers; and raw JPEG""" + """Test for JPEG data with JFIF or Exif markers; and raw JPEG.""" if h[6:10] in (b'JFIF', b'Exif'): return 'jpeg' elif h[:4] == b'\xff\xd8\xff\xdb': @@ -49,34 +50,35 @@ def test_jpeg(h, f): tests.append(test_jpeg) def test_png(h, f): + """Verify if the image is a PNG.""" if h.startswith(b'\211PNG\r\n\032\n'): return 'png' tests.append(test_png) def test_gif(h, f): - """GIF ('87 and '89 variants)""" + """Verify if the image is a GIF ('87 or '89 variants).""" if h[:6] in (b'GIF87a', b'GIF89a'): return 'gif' tests.append(test_gif) def test_tiff(h, f): - """TIFF (can be in Motorola or Intel byte order)""" + """Verify if the image is a TIFF (can be in Motorola or Intel byte order).""" if h[:2] in (b'MM', b'II'): return 'tiff' tests.append(test_tiff) def test_rgb(h, f): - """SGI image library""" + """test for the SGI image library.""" if h.startswith(b'\001\332'): return 'rgb' tests.append(test_rgb) def test_pbm(h, f): - """PBM (portable bitmap)""" + """Verify if the image is a PBM (portable bitmap).""" if len(h) >= 3 and \ h[0] == ord(b'P') and h[1] in b'14' and h[2] in b' \t\n\r': return 'pbm' @@ -84,7 +86,7 @@ def test_pbm(h, f): tests.append(test_pbm) def test_pgm(h, f): - """PGM (portable graymap)""" + """Verify if the image is a PGM (portable graymap).""" if len(h) >= 3 and \ h[0] == ord(b'P') and h[1] in b'25' and h[2] in b' \t\n\r': return 'pgm' @@ -92,7 +94,7 @@ def test_pgm(h, f): tests.append(test_pgm) def test_ppm(h, f): - """PPM (portable pixmap)""" + """Verify if the image is a PPM (portable pixmap).""" if len(h) >= 3 and \ h[0] == ord(b'P') and h[1] in b'36' and h[2] in b' \t\n\r': return 'ppm' @@ -100,32 +102,35 @@ def test_ppm(h, f): tests.append(test_ppm) def test_rast(h, f): - """Sun raster file""" + """test for the Sun raster file.""" if h.startswith(b'\x59\xA6\x6A\x95'): return 'rast' tests.append(test_rast) def test_xbm(h, f): - """X bitmap (X10 or X11)""" + """Verify if the image is a X bitmap (X10 or X11).""" if h.startswith(b'#define '): return 'xbm' tests.append(test_xbm) def test_bmp(h, f): + """Verify if the image is a BMP file.""" if h.startswith(b'BM'): return 'bmp' tests.append(test_bmp) def test_webp(h, f): + """Verify if the image is a WebP.""" if h.startswith(b'RIFF') and h[8:12] == b'WEBP': return 'webp' tests.append(test_webp) def test_exr(h, f): + """verify is the image ia a OpenEXR fileOpenEXR.""" if h.startswith(b'\x76\x2f\x31\x01'): return 'exr' diff --git a/Lib/importlib/__init__.py b/Lib/importlib/__init__.py index ce61883288aa35..21d9dee652b3df 100644 --- a/Lib/importlib/__init__.py +++ b/Lib/importlib/__init__.py @@ -84,13 +84,13 @@ def find_loader(name, path=None): try: loader = sys.modules[name].__loader__ if loader is None: - raise ValueError('{}.__loader__ is None'.format(name)) + raise ValueError(f'{name}.__loader__ is None') else: return loader except KeyError: pass except AttributeError: - raise ValueError('{}.__loader__ is not set'.format(name)) from None + raise ValueError(f'{name}.__loader__ is not set') from None spec = _bootstrap._find_spec(name, path) # We won't worry about malformed specs (missing attributes). @@ -98,8 +98,7 @@ def find_loader(name, path=None): return None if spec.loader is None: if spec.submodule_search_locations is None: - raise ImportError('spec for {} missing loader'.format(name), - name=name) + raise ImportError(f'spec for {name} missing loader', name=name) raise ImportError('namespace packages do not have loaders', name=name) return spec.loader @@ -116,9 +115,8 @@ def import_module(name, package=None): level = 0 if name.startswith('.'): if not package: - msg = ("the 'package' argument is required to perform a relative " - "import for {!r}") - raise TypeError(msg.format(name)) + raise TypeError("the 'package' argument is required to perform a " + f"relative import for {name!r}") for character in name: if character != '.': break @@ -144,8 +142,7 @@ def reload(module): raise TypeError("reload() argument must be a module") if sys.modules.get(name) is not module: - msg = "module {} not in sys.modules" - raise ImportError(msg.format(name), name=name) + raise ImportError(f"module {name} not in sys.modules", name=name) if name in _RELOADING: return _RELOADING[name] _RELOADING[name] = module @@ -155,8 +152,7 @@ def reload(module): try: parent = sys.modules[parent_name] except KeyError: - msg = "parent {!r} not in sys.modules" - raise ImportError(msg.format(parent_name), + raise ImportError(f"parent {parent_name!r} not in sys.modules", name=parent_name) from None else: pkgpath = parent.__path__ diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index 1c132106ce5a8f..1a1d5cf7c9c33e 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -136,7 +136,7 @@ def release(self): self.wakeup.release() def __repr__(self): - return '_ModuleLock({!r}) at {}'.format(self.name, id(self)) + return f'_ModuleLock({self.name!r}) at {id(self)}' class _DummyModuleLock: @@ -157,7 +157,7 @@ def release(self): self.count -= 1 def __repr__(self): - return '_DummyModuleLock({!r}) at {}'.format(self.name, id(self)) + return f'_DummyModuleLock({self.name!r}) at {id(self)}' class _ModuleLockManager: @@ -253,7 +253,7 @@ def _requires_builtin(fxn): """Decorator to verify the named module is built-in.""" def _requires_builtin_wrapper(self, fullname): if fullname not in sys.builtin_module_names: - raise ImportError('{!r} is not a built-in module'.format(fullname), + raise ImportError(f'{fullname!r} is not a built-in module', name=fullname) return fxn(self, fullname) _wrap(_requires_builtin_wrapper, fxn) @@ -264,7 +264,7 @@ def _requires_frozen(fxn): """Decorator to verify the named module is frozen.""" def _requires_frozen_wrapper(self, fullname): if not _imp.is_frozen(fullname): - raise ImportError('{!r} is not a frozen module'.format(fullname), + raise ImportError(f'{fullname!r} is not a frozen module', name=fullname) return fxn(self, fullname) _wrap(_requires_frozen_wrapper, fxn) @@ -305,11 +305,11 @@ def _module_repr(module): filename = module.__file__ except AttributeError: if loader is None: - return ''.format(name) + return f'' else: - return ''.format(name, loader) + return f'' else: - return ''.format(name, filename) + return f'' class ModuleSpec: @@ -363,14 +363,12 @@ def __init__(self, name, loader, *, origin=None, loader_state=None, self._cached = None def __repr__(self): - args = ['name={!r}'.format(self.name), - 'loader={!r}'.format(self.loader)] + args = [f'name={self.name!r}', f'loader={self.loader!r}'] if self.origin is not None: - args.append('origin={!r}'.format(self.origin)) + args.append(f'origin={self.origin!r}') if self.submodule_search_locations is not None: - args.append('submodule_search_locations={}' - .format(self.submodule_search_locations)) - return '{}({})'.format(self.__class__.__name__, ', '.join(args)) + args.append(f'submodule_search_locations={self.submodule_search_locations}') + return f'{self.__class__.__name__}({", ".join(args)})' def __eq__(self, other): smsl = self.submodule_search_locations @@ -580,14 +578,14 @@ def _module_repr_from_spec(spec): name = '?' if spec.name is None else spec.name if spec.origin is None: if spec.loader is None: - return ''.format(name) + return f'' else: - return ''.format(name, spec.loader) + return f'' else: if spec.has_location: - return ''.format(name, spec.origin) + return f'' else: - return ''.format(spec.name, spec.origin) + return f'' # Used by importlib.reload() and _load_module_shim(). @@ -596,7 +594,7 @@ def _exec(spec, module): name = spec.name with _ModuleLockManager(name): if sys.modules.get(name) is not module: - msg = 'module {!r} not in sys.modules'.format(name) + msg = f'module {name!r} not in sys.modules' raise ImportError(msg, name=name) try: if spec.loader is None: @@ -756,7 +754,7 @@ def find_module(cls, fullname, path=None): def create_module(spec): """Create a built-in module""" if spec.name not in sys.builtin_module_names: - raise ImportError('{!r} is not a built-in module'.format(spec.name), + raise ImportError(f'{spec.name!r} is not a built-in module', name=spec.name) return _call_with_frames_removed(_imp.create_builtin, spec) @@ -1012,7 +1010,7 @@ def _resolve_name(name, package, level): if len(bits) < level: raise ImportError('attempted relative import beyond top-level package') base = bits[0] - return '{}.{}'.format(base, name) if name else base + return f'{base}.{name}' if name else base def _find_spec_legacy(finder, name, path): @@ -1075,7 +1073,7 @@ def _find_spec(name, path, target=None): def _sanity_check(name, package, level): """Verify arguments are "sane".""" if not isinstance(name, str): - raise TypeError('module name must be str, not {}'.format(type(name))) + raise TypeError(f'module name must be str, not {type(name)}') if level < 0: raise ValueError('level must be >= 0') if level > 0: @@ -1105,13 +1103,13 @@ def _find_and_load_unlocked(name, import_): try: path = parent_module.__path__ except AttributeError: - msg = (_ERR_MSG + '; {!r} is not a package').format(name, parent) + msg = f'{_ERR_MSG_PREFIX} {name!r}; {parent!r} is not a package' raise ModuleNotFoundError(msg, name=name) from None parent_spec = parent_module.__spec__ child = name.rpartition('.')[2] spec = _find_spec(name, path) if spec is None: - raise ModuleNotFoundError(_ERR_MSG.format(name), name=name) + raise ModuleNotFoundError(f'{_ERR_MSG_PREFIX}{name!r}', name=name) else: if parent_spec: # Temporarily add child we are currently importing to parent's @@ -1156,8 +1154,7 @@ def _find_and_load(name, import_): _lock_unlock_module(name) if module is None: - message = ('import of {} halted; ' - 'None in sys.modules'.format(name)) + message = f'import of {name} halted; None in sys.modules' raise ModuleNotFoundError(message, name=name) return module @@ -1201,7 +1198,7 @@ def _handle_fromlist(module, fromlist, import_, *, recursive=False): _handle_fromlist(module, module.__all__, import_, recursive=True) elif not hasattr(module, x): - from_name = '{}.{}'.format(module.__name__, x) + from_name = f'{module.__name__}.{x}' try: _call_with_frames_removed(import_, from_name) except ModuleNotFoundError as exc: diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index b3c31b9659d849..71a16064b8ec0a 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -182,12 +182,22 @@ def _path_isabs(path): return path.startswith(path_separators) +def _path_abspath(path): + """Replacement for os.path.abspath.""" + if not _path_isabs(path): + for sep in path_separators: + path = path.removeprefix(f".{sep}") + return _path_join(_os.getcwd(), path) + else: + return path + + def _write_atomic(path, data, mode=0o666): """Best-effort function to write data to a path atomically. Be prepared to handle a FileExistsError if concurrent writing of the temporary file is attempted.""" # id() is used to generate a pseudo-random filename. - path_tmp = '{}.{}'.format(path, id(path)) + path_tmp = f'{path}.{id(path)}' fd = _os.open(path_tmp, _os.O_EXCL | _os.O_CREAT | _os.O_WRONLY, mode & 0o666) try: @@ -413,6 +423,9 @@ def _write_atomic(path, data, mode=0o666): # Python 3.12a1 3507 (Set lineno of module's RESUME to 0) # Python 3.12a1 3508 (Add CLEANUP_THROW) # Python 3.12a1 3509 (Conditional jumps only jump forward) +# Python 3.12a1 3510 (FOR_ITER leaves iterator on the stack) +# Python 3.12a1 3511 (Add STOPITERATION_ERROR instruction) +# Python 3.12a1 3512 (Remove all unused consts from code objects) # Python 3.13 will start with 3550 @@ -425,7 +438,7 @@ def _write_atomic(path, data, mode=0o666): # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array # in PC/launcher.c must also be updated. -MAGIC_NUMBER = (3509).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3512).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c @@ -482,8 +495,8 @@ def cache_from_source(path, debug_override=None, *, optimization=None): optimization = str(optimization) if optimization != '': if not optimization.isalnum(): - raise ValueError('{!r} is not alphanumeric'.format(optimization)) - almost_filename = '{}.{}{}'.format(almost_filename, _OPT, optimization) + raise ValueError(f'{optimization!r} is not alphanumeric') + almost_filename = f'{almost_filename}.{_OPT}{optimization}' filename = almost_filename + BYTECODE_SUFFIXES[0] if sys.pycache_prefix is not None: # We need an absolute path to the py file to avoid the possibility of @@ -494,8 +507,7 @@ def cache_from_source(path, debug_override=None, *, optimization=None): # make it absolute (`C:\Somewhere\Foo\Bar`), then make it root-relative # (`Somewhere\Foo\Bar`), so we end up placing the bytecode file in an # unambiguous `C:\Bytecode\Somewhere\Foo\Bar\`. - if not _path_isabs(head): - head = _path_join(_os.getcwd(), head) + head = _path_abspath(head) # Strip initial drive from a Windows path. We know we have an absolute # path here, so the second part of the check rules out a POSIX path that @@ -642,8 +654,8 @@ def _find_module_shim(self, fullname): # return None. loader, portions = self.find_loader(fullname) if loader is None and len(portions): - msg = 'Not importing directory {}: missing __init__' - _warnings.warn(msg.format(portions[0]), ImportWarning) + msg = f'Not importing directory {portions[0]}: missing __init__' + _warnings.warn(msg, ImportWarning) return loader @@ -741,7 +753,7 @@ def _compile_bytecode(data, name=None, bytecode_path=None, source_path=None): _imp._fix_co_filename(code, source_path) return code else: - raise ImportError('Non-code object in {!r}'.format(bytecode_path), + raise ImportError(f'Non-code object in {bytecode_path!r}', name=name, path=bytecode_path) @@ -808,11 +820,10 @@ def spec_from_file_location(name, location=None, *, loader=None, pass else: location = _os.fspath(location) - if not _path_isabs(location): - try: - location = _path_join(_os.getcwd(), location) - except OSError: - pass + try: + location = _path_abspath(location) + except OSError: + pass # If the location is on the filesystem, but doesn't actually exist, # we could return None here, indicating that the location is not @@ -854,6 +865,54 @@ def spec_from_file_location(name, location=None, *, loader=None, return spec +def _bless_my_loader(module_globals): + """Helper function for _warnings.c + + See GH#97850 for details. + """ + # 2022-10-06(warsaw): For now, this helper is only used in _warnings.c and + # that use case only has the module globals. This function could be + # extended to accept either that or a module object. However, in the + # latter case, it would be better to raise certain exceptions when looking + # at a module, which should have either a __loader__ or __spec__.loader. + # For backward compatibility, it is possible that we'll get an empty + # dictionary for the module globals, and that cannot raise an exception. + if not isinstance(module_globals, dict): + return None + + missing = object() + loader = module_globals.get('__loader__', None) + spec = module_globals.get('__spec__', missing) + + if loader is None: + if spec is missing: + # If working with a module: + # raise AttributeError('Module globals is missing a __spec__') + return None + elif spec is None: + raise ValueError('Module globals is missing a __spec__.loader') + + spec_loader = getattr(spec, 'loader', missing) + + if spec_loader in (missing, None): + if loader is None: + exc = AttributeError if spec_loader is missing else ValueError + raise exc('Module globals is missing a __spec__.loader') + _warnings.warn( + 'Module globals is missing a __spec__.loader', + DeprecationWarning) + spec_loader = loader + + assert spec_loader is not None + if loader is not None and loader != spec_loader: + _warnings.warn( + 'Module globals; __loader__ != __spec__.loader', + DeprecationWarning) + return loader + + return spec_loader + + # Loaders ##################################################################### class WindowsRegistryFinder: @@ -943,8 +1002,8 @@ def exec_module(self, module): """Execute the module.""" code = self.get_code(module.__name__) if code is None: - raise ImportError('cannot load module {!r} when get_code() ' - 'returns None'.format(module.__name__)) + raise ImportError(f'cannot load module {module.__name__!r} when ' + 'get_code() returns None') _bootstrap._call_with_frames_removed(exec, code, module.__dict__) def load_module(self, fullname): @@ -1085,7 +1144,8 @@ def get_code(self, fullname): source_mtime is not None): if hash_based: if source_hash is None: - source_hash = _imp.source_hash(source_bytes) + source_hash = _imp.source_hash(_RAW_MAGIC_NUMBER, + source_bytes) data = _code_to_hash_pyc(code_object, source_hash, check_source) else: data = _code_to_timestamp_pyc(code_object, source_mtime, @@ -1329,7 +1389,7 @@ def __len__(self): return len(self._recalculate()) def __repr__(self): - return '_NamespacePath({!r})'.format(self._path) + return f'_NamespacePath({self._path!r})' def __contains__(self, item): return item in self._recalculate() @@ -1564,10 +1624,8 @@ def __init__(self, path, *loader_details): # Base (directory) path if not path or path == '.': self.path = _os.getcwd() - elif not _path_isabs(path): - self.path = _path_join(_os.getcwd(), path) else: - self.path = path + self.path = _path_abspath(path) self._path_mtime = -1 self._path_cache = set() self._relaxed_path_cache = set() @@ -1672,7 +1730,7 @@ def _fill_cache(self): for item in contents: name, dot, suffix = item.partition('.') if dot: - new_name = '{}.{}'.format(name, suffix.lower()) + new_name = f'{name}.{suffix.lower()}' else: new_name = name lower_suffix_contents.add(new_name) @@ -1699,7 +1757,7 @@ def path_hook_for_FileFinder(path): return path_hook_for_FileFinder def __repr__(self): - return 'FileFinder({!r})'.format(self.path) + return f'FileFinder({self.path!r})' # Import setup ############################################################### @@ -1717,6 +1775,8 @@ def _fix_up_module(ns, name, pathname, cpathname=None): loader = SourceFileLoader(name, pathname) if not spec: spec = spec_from_file_location(name, pathname, loader=loader) + if cpathname: + spec.cached = _path_abspath(cpathname) try: ns['__spec__'] = spec ns['__loader__'] = loader diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py index b01de145c33671..40ab1a1aaac328 100644 --- a/Lib/importlib/metadata/__init__.py +++ b/Lib/importlib/metadata/__init__.py @@ -24,7 +24,7 @@ from importlib import import_module from importlib.abc import MetaPathFinder from itertools import starmap -from typing import List, Mapping, Optional, Union +from typing import List, Mapping, Optional __all__ = [ @@ -134,6 +134,7 @@ class DeprecatedTuple: 1 """ + # Do not remove prior to 2023-05-01 or Python 3.13 _warn = functools.partial( warnings.warn, "EntryPoint tuple interface is deprecated. Access members by name.", @@ -184,6 +185,10 @@ class EntryPoint(DeprecatedTuple): following the attr, and following any extras. """ + name: str + value: str + group: str + dist: Optional['Distribution'] = None def __init__(self, name, value, group): @@ -218,17 +223,6 @@ def _for(self, dist): vars(self).update(dist=dist) return self - def __iter__(self): - """ - Supply iter so one may construct dicts of EntryPoints by name. - """ - msg = ( - "Construction of dict of EntryPoints is deprecated in " - "favor of EntryPoints." - ) - warnings.warn(msg, DeprecationWarning) - return iter((self.name, self)) - def matches(self, **params): """ EntryPoint matches the given parameters. @@ -274,77 +268,7 @@ def __hash__(self): return hash(self._key()) -class DeprecatedList(list): - """ - Allow an otherwise immutable object to implement mutability - for compatibility. - - >>> recwarn = getfixture('recwarn') - >>> dl = DeprecatedList(range(3)) - >>> dl[0] = 1 - >>> dl.append(3) - >>> del dl[3] - >>> dl.reverse() - >>> dl.sort() - >>> dl.extend([4]) - >>> dl.pop(-1) - 4 - >>> dl.remove(1) - >>> dl += [5] - >>> dl + [6] - [1, 2, 5, 6] - >>> dl + (6,) - [1, 2, 5, 6] - >>> dl.insert(0, 0) - >>> dl - [0, 1, 2, 5] - >>> dl == [0, 1, 2, 5] - True - >>> dl == (0, 1, 2, 5) - True - >>> len(recwarn) - 1 - """ - - __slots__ = () - - _warn = functools.partial( - warnings.warn, - "EntryPoints list interface is deprecated. Cast to list if needed.", - DeprecationWarning, - stacklevel=2, - ) - - def _wrap_deprecated_method(method_name: str): # type: ignore - def wrapped(self, *args, **kwargs): - self._warn() - return getattr(super(), method_name)(*args, **kwargs) - - return method_name, wrapped - - locals().update( - map( - _wrap_deprecated_method, - '__setitem__ __delitem__ append reverse extend pop remove ' - '__iadd__ insert sort'.split(), - ) - ) - - def __add__(self, other): - if not isinstance(other, tuple): - self._warn() - other = tuple(other) - return self.__class__(tuple(self) + other) - - def __eq__(self, other): - if not isinstance(other, tuple): - self._warn() - other = tuple(other) - - return tuple(self).__eq__(other) - - -class EntryPoints(DeprecatedList): +class EntryPoints(tuple): """ An immutable collection of selectable EntryPoint objects. """ @@ -355,14 +279,6 @@ def __getitem__(self, name): # -> EntryPoint: """ Get the EntryPoint in self matching name. """ - if isinstance(name, int): - warnings.warn( - "Accessing entry points by index is deprecated. " - "Cast to tuple if needed.", - DeprecationWarning, - stacklevel=2, - ) - return super().__getitem__(name) try: return next(iter(self.select(name=name))) except StopIteration: @@ -386,10 +302,6 @@ def names(self): def groups(self): """ Return the set of all groups of all entry points. - - For coverage while SelectableGroups is present. - >>> EntryPoints().groups - set() """ return {ep.group for ep in self} @@ -405,101 +317,6 @@ def _from_text(text): ) -class Deprecated: - """ - Compatibility add-in for mapping to indicate that - mapping behavior is deprecated. - - >>> recwarn = getfixture('recwarn') - >>> class DeprecatedDict(Deprecated, dict): pass - >>> dd = DeprecatedDict(foo='bar') - >>> dd.get('baz', None) - >>> dd['foo'] - 'bar' - >>> list(dd) - ['foo'] - >>> list(dd.keys()) - ['foo'] - >>> 'foo' in dd - True - >>> list(dd.values()) - ['bar'] - >>> len(recwarn) - 1 - """ - - _warn = functools.partial( - warnings.warn, - "SelectableGroups dict interface is deprecated. Use select.", - DeprecationWarning, - stacklevel=2, - ) - - def __getitem__(self, name): - self._warn() - return super().__getitem__(name) - - def get(self, name, default=None): - self._warn() - return super().get(name, default) - - def __iter__(self): - self._warn() - return super().__iter__() - - def __contains__(self, *args): - self._warn() - return super().__contains__(*args) - - def keys(self): - self._warn() - return super().keys() - - def values(self): - self._warn() - return super().values() - - -class SelectableGroups(Deprecated, dict): - """ - A backward- and forward-compatible result from - entry_points that fully implements the dict interface. - """ - - @classmethod - def load(cls, eps): - by_group = operator.attrgetter('group') - ordered = sorted(eps, key=by_group) - grouped = itertools.groupby(ordered, by_group) - return cls((group, EntryPoints(eps)) for group, eps in grouped) - - @property - def _all(self): - """ - Reconstruct a list of all entrypoints from the groups. - """ - groups = super(Deprecated, self).values() - return EntryPoints(itertools.chain.from_iterable(groups)) - - @property - def groups(self): - return self._all.groups - - @property - def names(self): - """ - for coverage: - >>> SelectableGroups().names - set() - """ - return self._all.names - - def select(self, **params): - if not params: - return self - return self._all.select(**params) - - class PackagePath(pathlib.PurePosixPath): """A reference to a path in a package""" @@ -1013,27 +830,19 @@ def version(distribution_name): """ -def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: +def entry_points(**params) -> EntryPoints: """Return EntryPoint objects for all installed packages. Pass selection parameters (group or name) to filter the result to entry points matching those properties (see EntryPoints.select()). - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. - - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. - - :return: EntryPoints or SelectableGroups for all installed packages. + :return: EntryPoints for all installed packages. """ eps = itertools.chain.from_iterable( dist.entry_points for dist in _unique(distributions()) ) - return SelectableGroups.load(eps).select(**params) + return EntryPoints(eps).select(**params) def files(distribution_name): diff --git a/Lib/importlib/resources/_adapters.py b/Lib/importlib/resources/_adapters.py index ea363d86a564b5..f22f6bc509a3d7 100644 --- a/Lib/importlib/resources/_adapters.py +++ b/Lib/importlib/resources/_adapters.py @@ -35,7 +35,7 @@ def _io_wrapper(file, mode='r', *args, **kwargs): elif mode == 'rb': return file raise ValueError( - "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode) + f"Invalid mode value '{mode}', only 'r' and 'rb' are supported" ) diff --git a/Lib/importlib/resources/_common.py b/Lib/importlib/resources/_common.py index ca1fa8ab2fe6ff..8c0be7ee547e01 100644 --- a/Lib/importlib/resources/_common.py +++ b/Lib/importlib/resources/_common.py @@ -67,10 +67,14 @@ def from_package(package): @contextlib.contextmanager -def _tempfile(reader, suffix='', - # gh-93353: Keep a reference to call os.remove() in late Python - # finalization. - *, _os_remove=os.remove): +def _tempfile( + reader, + suffix='', + # gh-93353: Keep a reference to call os.remove() in late Python + # finalization. + *, + _os_remove=os.remove, +): # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' # blocks due to the need to close the temporary file to work on Windows # properly. @@ -89,13 +93,30 @@ def _tempfile(reader, suffix='', pass +def _temp_file(path): + return _tempfile(path.read_bytes, suffix=path.name) + + +def _is_present_dir(path: Traversable) -> bool: + """ + Some Traversables implement ``is_dir()`` to raise an + exception (i.e. ``FileNotFoundError``) when the + directory doesn't exist. This function wraps that call + to always return a boolean and only return True + if there's a dir and it exists. + """ + with contextlib.suppress(FileNotFoundError): + return path.is_dir() + return False + + @functools.singledispatch def as_file(path): """ Given a Traversable object, return that object as a path on the local file system in a context manager. """ - return _tempfile(path.read_bytes, suffix=path.name) + return _temp_dir(path) if _is_present_dir(path) else _temp_file(path) @as_file.register(pathlib.Path) @@ -105,3 +126,34 @@ def _(path): Degenerate behavior for pathlib.Path objects. """ yield path + + +@contextlib.contextmanager +def _temp_path(dir: tempfile.TemporaryDirectory): + """ + Wrap tempfile.TemporyDirectory to return a pathlib object. + """ + with dir as result: + yield pathlib.Path(result) + + +@contextlib.contextmanager +def _temp_dir(path): + """ + Given a traversable dir, recursively replicate the whole tree + to the file system in a context manager. + """ + assert path.is_dir() + with _temp_path(tempfile.TemporaryDirectory()) as temp_dir: + yield _write_contents(temp_dir, path) + + +def _write_contents(target, source): + child = target.joinpath(source.name) + if source.is_dir(): + child.mkdir() + for item in source.iterdir(): + _write_contents(child, item) + else: + child.open('wb').write(source.read_bytes()) + return child diff --git a/Lib/importlib/resources/abc.py b/Lib/importlib/resources/abc.py index 0b7bfdc415829e..67c78c07856705 100644 --- a/Lib/importlib/resources/abc.py +++ b/Lib/importlib/resources/abc.py @@ -1,6 +1,8 @@ import abc import io +import itertools import os +import pathlib from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional from typing import runtime_checkable, Protocol from typing import Union @@ -53,6 +55,10 @@ def contents(self) -> Iterable[str]: raise FileNotFoundError +class TraversalError(Exception): + pass + + @runtime_checkable class Traversable(Protocol): """ @@ -95,7 +101,6 @@ def is_file(self) -> bool: Return True if self is a file """ - @abc.abstractmethod def joinpath(self, *descendants: StrPath) -> "Traversable": """ Return Traversable resolved with any descendants applied. @@ -104,6 +109,22 @@ def joinpath(self, *descendants: StrPath) -> "Traversable": and each may contain multiple levels separated by ``posixpath.sep`` (``/``). """ + if not descendants: + return self + names = itertools.chain.from_iterable( + path.parts for path in map(pathlib.PurePosixPath, descendants) + ) + target = next(names) + matches = ( + traversable for traversable in self.iterdir() if traversable.name == target + ) + try: + match = next(matches) + except StopIteration: + raise TraversalError( + "Target not found during traversal.", target, list(names) + ) + return match.joinpath(*names) def __truediv__(self, child: StrPath) -> "Traversable": """ diff --git a/Lib/importlib/resources/readers.py b/Lib/importlib/resources/readers.py index b470a2062b2b3a..80cb320dd8bda0 100644 --- a/Lib/importlib/resources/readers.py +++ b/Lib/importlib/resources/readers.py @@ -82,15 +82,13 @@ def is_dir(self): def is_file(self): return False - def joinpath(self, child): - # first try to find child in current paths - for file in self.iterdir(): - if file.name == child: - return file - # if it does not exist, construct it with the first path - return self._paths[0] / child - - __truediv__ = joinpath + def joinpath(self, *descendants): + try: + return super().joinpath(*descendants) + except abc.TraversalError: + # One of the paths did not resolve (a directory does not exist). + # Just return something that will not exist. + return self._paths[0].joinpath(*descendants) def open(self, *args, **kwargs): raise FileNotFoundError(f'{self} is not a file') diff --git a/Lib/importlib/resources/simple.py b/Lib/importlib/resources/simple.py index d0fbf237762c2f..b85e4694acedff 100644 --- a/Lib/importlib/resources/simple.py +++ b/Lib/importlib/resources/simple.py @@ -99,20 +99,6 @@ def iterdir(self): def open(self, *args, **kwargs): raise IsADirectoryError() - @staticmethod - def _flatten(compound_names): - for name in compound_names: - yield from name.split('/') - - def joinpath(self, *descendants): - if not descendants: - return self - names = self._flatten(descendants) - target = next(names) - return next( - traversable for traversable in self.iterdir() if traversable.name == target - ).joinpath(*names) - class TraversableReader(TraversableResources, SimpleReader): """ diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py index 7f15b029b24050..5294578cc26cf3 100644 --- a/Lib/importlib/util.py +++ b/Lib/importlib/util.py @@ -11,12 +11,9 @@ from ._bootstrap_external import source_from_cache from ._bootstrap_external import spec_from_file_location -from contextlib import contextmanager import _imp -import functools import sys import types -import warnings def source_hash(source_bytes): @@ -63,10 +60,10 @@ def _find_spec_from_path(name, path=None): try: spec = module.__spec__ except AttributeError: - raise ValueError('{}.__spec__ is not set'.format(name)) from None + raise ValueError(f'{name}.__spec__ is not set') from None else: if spec is None: - raise ValueError('{}.__spec__ is None'.format(name)) + raise ValueError(f'{name}.__spec__ is None') return spec @@ -108,97 +105,13 @@ def find_spec(name, package=None): try: spec = module.__spec__ except AttributeError: - raise ValueError('{}.__spec__ is not set'.format(name)) from None + raise ValueError(f'{name}.__spec__ is not set') from None else: if spec is None: - raise ValueError('{}.__spec__ is None'.format(name)) + raise ValueError(f'{name}.__spec__ is None') return spec -@contextmanager -def _module_to_load(name): - is_reload = name in sys.modules - - module = sys.modules.get(name) - if not is_reload: - # This must be done before open() is called as the 'io' module - # implicitly imports 'locale' and would otherwise trigger an - # infinite loop. - module = type(sys)(name) - # This must be done before putting the module in sys.modules - # (otherwise an optimization shortcut in import.c becomes wrong) - module.__initializing__ = True - sys.modules[name] = module - try: - yield module - except Exception: - if not is_reload: - try: - del sys.modules[name] - except KeyError: - pass - finally: - module.__initializing__ = False - - -def set_loader(fxn): - """Set __loader__ on the returned module. - - This function is deprecated. - - """ - @functools.wraps(fxn) - def set_loader_wrapper(self, *args, **kwargs): - warnings.warn('The import system now takes care of this automatically; ' - 'this decorator is slated for removal in Python 3.12', - DeprecationWarning, stacklevel=2) - module = fxn(self, *args, **kwargs) - if getattr(module, '__loader__', None) is None: - module.__loader__ = self - return module - return set_loader_wrapper - - -def module_for_loader(fxn): - """Decorator to handle selecting the proper module for loaders. - - The decorated function is passed the module to use instead of the module - name. The module passed in to the function is either from sys.modules if - it already exists or is a new module. If the module is new, then __name__ - is set the first argument to the method, __loader__ is set to self, and - __package__ is set accordingly (if self.is_package() is defined) will be set - before it is passed to the decorated function (if self.is_package() does - not work for the module it will be set post-load). - - If an exception is raised and the decorator created the module it is - subsequently removed from sys.modules. - - The decorator assumes that the decorated function takes the module name as - the second argument. - - """ - warnings.warn('The import system now takes care of this automatically; ' - 'this decorator is slated for removal in Python 3.12', - DeprecationWarning, stacklevel=2) - @functools.wraps(fxn) - def module_for_loader_wrapper(self, fullname, *args, **kwargs): - with _module_to_load(fullname) as module: - module.__loader__ = self - try: - is_package = self.is_package(fullname) - except (ImportError, AttributeError): - pass - else: - if is_package: - module.__package__ = fullname - else: - module.__package__ = fullname.rpartition('.')[0] - # If __package__ was not set above, __import__() will do it later. - return fxn(self, module, *args, **kwargs) - - return module_for_loader_wrapper - - class _LazyModule(types.ModuleType): """A subclass of the module type which triggers loading upon attribute access.""" diff --git a/Lib/inspect.py b/Lib/inspect.py index 498ee7ab9eaf8a..d0015aa202044e 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -281,30 +281,15 @@ def get_annotations(obj, *, globals=None, locals=None, eval_str=False): # ----------------------------------------------------------- type-checking def ismodule(object): - """Return true if the object is a module. - - Module objects provide these attributes: - __cached__ pathname to byte compiled file - __doc__ documentation string - __file__ filename (missing for built-in modules)""" + """Return true if the object is a module.""" return isinstance(object, types.ModuleType) def isclass(object): - """Return true if the object is a class. - - Class objects provide these attributes: - __doc__ documentation string - __module__ name of module in which this class was defined""" + """Return true if the object is a class.""" return isinstance(object, type) def ismethod(object): - """Return true if the object is an instance method. - - Instance method objects provide these attributes: - __doc__ documentation string - __name__ name with which this method was defined - __func__ function object containing implementation of method - __self__ instance to which this method is bound""" + """Return true if the object is an instance method.""" return isinstance(object, types.MethodType) def ismethoddescriptor(object): @@ -1448,7 +1433,10 @@ def getargvalues(frame): def formatannotation(annotation, base_module=None): if getattr(annotation, '__module__', None) == 'typing': - return repr(annotation).replace('typing.', '') + def repl(match): + text = match.group() + return text.removeprefix('typing.') + return re.sub(r'[\w\.]+', repl, repr(annotation)) if isinstance(annotation, types.GenericAlias): return str(annotation) if isinstance(annotation, type): @@ -2454,7 +2442,10 @@ def _signature_from_callable(obj, *, # Was this function wrapped by a decorator? if follow_wrapper_chains: - obj = unwrap(obj, stop=(lambda f: hasattr(f, "__signature__"))) + # Unwrap until we find an explicit signature or a MethodType (which will be + # handled explicitly below). + obj = unwrap(obj, stop=(lambda f: hasattr(f, "__signature__") + or isinstance(f, types.MethodType))) if isinstance(obj, types.MethodType): # If the unwrapped object is a *method*, we might want to # skip its first parameter (self). @@ -3114,8 +3105,12 @@ def _bind(self, args, kwargs, *, partial=False): parameters_ex = (param,) break else: - msg = 'missing a required argument: {arg!r}' - msg = msg.format(arg=param.name) + if param.kind == _KEYWORD_ONLY: + argtype = ' keyword-only' + else: + argtype = '' + msg = 'missing a required{argtype} argument: {arg!r}' + msg = msg.format(arg=param.name, argtype=argtype) raise TypeError(msg) from None else: # We have a positional argument to process diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py index b4c5c1304b9752..9847104446eaf6 100644 --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -891,6 +891,13 @@ def _connect_unixsocket(self, address): raise def createSocket(self): + """ + Try to create a socket and, if it's not a datagram socket, connect it + to the other end. This method is called during handler initialization, + but it's not regarded as an error if the other end isn't listening yet + --- the method will be called again when emitting an event, + if there is no socket at that point. + """ address = self.address socktype = self.socktype @@ -898,7 +905,7 @@ def createSocket(self): self.unixsocket = True # Syslog server may be unavailable during handler initialisation. # C's openlog() function also ignores connection errors. - # Moreover, we ignore these errors while logging, so it not worse + # Moreover, we ignore these errors while logging, so it's not worse # to ignore it also here. try: self._connect_unixsocket(address) diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index f6c43b3b92bc50..3224363a3f2bfb 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -427,8 +427,8 @@ def _default_mime_types(): # Make sure the entry with the preferred file extension for a particular mime type # appears before any others of the same mimetype. types_map = _types_map_default = { - '.js' : 'application/javascript', - '.mjs' : 'application/javascript', + '.js' : 'text/javascript', + '.mjs' : 'text/javascript', '.json' : 'application/json', '.webmanifest': 'application/manifest+json', '.doc' : 'application/msword', diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index 65303d29f51f8f..1a8822b9db012d 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -73,11 +73,6 @@ def arbitrary_address(family): if family == 'AF_INET': return ('localhost', 0) elif family == 'AF_UNIX': - # Prefer abstract sockets if possible to avoid problems with the address - # size. When coding portable applications, some implementations have - # sun_path as short as 92 bytes in the sockaddr_un struct. - if util.abstract_sockets_supported: - return f"\0listener-{os.getpid()}-{next(_mmap_counter)}" return tempfile.mktemp(prefix='listener-', dir=util.get_temp_dir()) elif family == 'AF_PIPE': return tempfile.mktemp(prefix=r'\\.\pipe\pyc-%d-%d-' % @@ -733,6 +728,74 @@ def PipeClient(address): WELCOME = b'#WELCOME#' FAILURE = b'#FAILURE#' +# multiprocessing.connection Authentication Handshake Protocol Description +# (as documented for reference after reading the existing code) +# ============================================================================= +# +# On Windows: native pipes with "overlapped IO" are used to send the bytes, +# instead of the length prefix SIZE scheme described below. (ie: the OS deals +# with message sizes for us) +# +# Protocol error behaviors: +# +# On POSIX, any failure to receive the length prefix into SIZE, for SIZE greater +# than the requested maxsize to receive, or receiving fewer than SIZE bytes +# results in the connection being closed and auth to fail. +# +# On Windows, receiving too few bytes is never a low level _recv_bytes read +# error, receiving too many will trigger an error only if receive maxsize +# value was larger than 128 OR the if the data arrived in smaller pieces. +# +# Serving side Client side +# ------------------------------ --------------------------------------- +# 0. Open a connection on the pipe. +# 1. Accept connection. +# 2. New random 20 bytes -> MESSAGE +# 3. send 4 byte length (net order) +# prefix followed by: +# b'#CHALLENGE#' + MESSAGE +# 4. Receive 4 bytes, parse as network byte +# order integer. If it is -1, receive an +# additional 8 bytes, parse that as network +# byte order. The result is the length of +# the data that follows -> SIZE. +# 5. Receive min(SIZE, 256) bytes -> M1 +# 6. Assert that M1 starts with: +# b'#CHALLENGE#' +# 7. Strip that prefix from M1 into -> M2 +# 8. Compute HMAC-MD5 of AUTHKEY, M2 -> C_DIGEST +# 9. Send 4 byte length prefix (net order) +# followed by C_DIGEST bytes. +# 10. Compute HMAC-MD5 of AUTHKEY, +# MESSAGE into -> M_DIGEST. +# 11. Receive 4 or 4+8 byte length +# prefix (#4 dance) -> SIZE. +# 12. Receive min(SIZE, 256) -> C_D. +# 13. Compare M_DIGEST == C_D: +# 14a: Match? Send length prefix & +# b'#WELCOME#' +# <- RETURN +# 14b: Mismatch? Send len prefix & +# b'#FAILURE#' +# <- CLOSE & AuthenticationError +# 15. Receive 4 or 4+8 byte length prefix (net +# order) again as in #4 into -> SIZE. +# 16. Receive min(SIZE, 256) bytes -> M3. +# 17. Compare M3 == b'#WELCOME#': +# 17a. Match? <- RETURN +# 17b. Mismatch? <- CLOSE & AuthenticationError +# +# If this RETURNed, the connection remains open: it has been authenticated. +# +# Length prefixes are used consistently even though every step so far has +# always been a singular specific fixed length. This may help us evolve +# the protocol in the future without breaking backwards compatibility. +# +# Similarly the initial challenge message from the serving side has always +# been 20 bytes, but clients can accept a 100+ so using the length of the +# opening challenge message as an indicator of protocol version may work. + + def deliver_challenge(connection, authkey): import hmac if not isinstance(authkey, bytes): diff --git a/Lib/multiprocessing/popen_spawn_win32.py b/Lib/multiprocessing/popen_spawn_win32.py index 9c4098d0fa4f1e..4d60ffc030bea6 100644 --- a/Lib/multiprocessing/popen_spawn_win32.py +++ b/Lib/multiprocessing/popen_spawn_win32.py @@ -54,19 +54,20 @@ def __init__(self, process_obj): wfd = msvcrt.open_osfhandle(whandle, 0) cmd = spawn.get_command_line(parent_pid=os.getpid(), pipe_handle=rhandle) - cmd = ' '.join('"%s"' % x for x in cmd) python_exe = spawn.get_executable() # bpo-35797: When running in a venv, we bypass the redirect # executor and launch our base Python. if WINENV and _path_eq(python_exe, sys.executable): - python_exe = sys._base_executable + cmd[0] = python_exe = sys._base_executable env = os.environ.copy() env["__PYVENV_LAUNCHER__"] = sys.executable else: env = None + cmd = ' '.join('"%s"' % x for x in cmd) + with open(wfd, 'wb', closefd=True) as to_child: # start process try: diff --git a/Lib/multiprocessing/shared_memory.py b/Lib/multiprocessing/shared_memory.py index 881f2001dd5980..9a1e5aa17b87a2 100644 --- a/Lib/multiprocessing/shared_memory.py +++ b/Lib/multiprocessing/shared_memory.py @@ -173,7 +173,10 @@ def __init__(self, name=None, create=False, size=0): ) finally: _winapi.CloseHandle(h_map) - size = _winapi.VirtualQuerySize(p_buf) + try: + size = _winapi.VirtualQuerySize(p_buf) + finally: + _winapi.UnmapViewOfFile(p_buf) self._mmap = mmap.mmap(-1, size, tagname=name) self._size = size diff --git a/Lib/ntpath.py b/Lib/ntpath.py index d9582f4087433e..873c884c3bd934 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -30,7 +30,7 @@ "ismount", "expanduser","expandvars","normpath","abspath", "curdir","pardir","sep","pathsep","defpath","altsep", "extsep","devnull","realpath","supports_unicode_filenames","relpath", - "samefile", "sameopenfile", "samestat", "commonpath"] + "samefile", "sameopenfile", "samestat", "commonpath", "isjunction"] def _get_bothseps(path): if isinstance(path, bytes): @@ -267,6 +267,24 @@ def islink(path): return False return stat.S_ISLNK(st.st_mode) + +# Is a path a junction? + +if hasattr(os.stat_result, 'st_reparse_tag'): + def isjunction(path): + """Test whether a path is a junction""" + try: + st = os.lstat(path) + except (OSError, ValueError, AttributeError): + return False + return bool(st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT) +else: + def isjunction(path): + """Test whether a path is a junction""" + os.fspath(path) + return False + + # Being true for dangling symbolic links is also useful. def lexists(path): diff --git a/Lib/opcode.py b/Lib/opcode.py index 690923061418bd..fa6dbe5d24170c 100644 --- a/Lib/opcode.py +++ b/Lib/opcode.py @@ -77,6 +77,9 @@ def pseudo_op(name, op, real_ops): def_op('CACHE', 0) def_op('POP_TOP', 1) def_op('PUSH_NULL', 2) +def_op('INTERPRETER_EXIT', 3) + +def_op('END_FOR', 4) def_op('NOP', 9) def_op('UNARY_POSITIVE', 10) @@ -109,6 +112,8 @@ def pseudo_op(name, op, real_ops): def_op('STORE_SUBSCR', 60) def_op('DELETE_SUBSCR', 61) +def_op('STOPITERATION_ERROR', 63) + def_op('GET_ITER', 68) def_op('GET_YIELD_FROM_ITER', 69) def_op('PRINT_EXPR', 70) @@ -274,7 +279,6 @@ def pseudo_op(name, op, real_ops): _specializations = { "BINARY_OP": [ - "BINARY_OP_ADAPTIVE", "BINARY_OP_ADD_FLOAT", "BINARY_OP_ADD_INT", "BINARY_OP_ADD_UNICODE", @@ -285,14 +289,12 @@ def pseudo_op(name, op, real_ops): "BINARY_OP_SUBTRACT_INT", ], "BINARY_SUBSCR": [ - "BINARY_SUBSCR_ADAPTIVE", "BINARY_SUBSCR_DICT", "BINARY_SUBSCR_GETITEM", "BINARY_SUBSCR_LIST_INT", "BINARY_SUBSCR_TUPLE_INT", ], "CALL": [ - "CALL_ADAPTIVE", "CALL_PY_EXACT_ARGS", "CALL_PY_WITH_DEFAULTS", "CALL_BOUND_METHOD_EXACT_ARGS", @@ -312,24 +314,16 @@ def pseudo_op(name, op, real_ops): "CALL_NO_KW_TYPE_1", ], "COMPARE_OP": [ - "COMPARE_OP_ADAPTIVE", "COMPARE_OP_FLOAT_JUMP", "COMPARE_OP_INT_JUMP", "COMPARE_OP_STR_JUMP", ], - "EXTENDED_ARG": [ - "EXTENDED_ARG_QUICK", - ], "FOR_ITER": [ - "FOR_ITER_ADAPTIVE", "FOR_ITER_LIST", "FOR_ITER_RANGE", - ], - "JUMP_BACKWARD": [ - "JUMP_BACKWARD_QUICK", + "FOR_ITER_GEN", ], "LOAD_ATTR": [ - "LOAD_ATTR_ADAPTIVE", # These potentially push [NULL, bound method] onto the stack. "LOAD_ATTR_CLASS", "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", @@ -352,15 +346,10 @@ def pseudo_op(name, op, real_ops): "LOAD_FAST__LOAD_FAST", ], "LOAD_GLOBAL": [ - "LOAD_GLOBAL_ADAPTIVE", "LOAD_GLOBAL_BUILTIN", "LOAD_GLOBAL_MODULE", ], - "RESUME": [ - "RESUME_QUICK", - ], "STORE_ATTR": [ - "STORE_ATTR_ADAPTIVE", "STORE_ATTR_INSTANCE_VALUE", "STORE_ATTR_SLOT", "STORE_ATTR_WITH_HINT", @@ -370,12 +359,10 @@ def pseudo_op(name, op, real_ops): "STORE_FAST__STORE_FAST", ], "STORE_SUBSCR": [ - "STORE_SUBSCR_ADAPTIVE", "STORE_SUBSCR_DICT", "STORE_SUBSCR_LIST_INT", ], "UNPACK_SEQUENCE": [ - "UNPACK_SEQUENCE_ADAPTIVE", "UNPACK_SEQUENCE_LIST", "UNPACK_SEQUENCE_TUPLE", "UNPACK_SEQUENCE_TWO_TUPLE", diff --git a/Lib/os.py b/Lib/os.py index 648188e0f13490..fd1e774fdcbcfa 100644 --- a/Lib/os.py +++ b/Lib/os.py @@ -288,7 +288,8 @@ def walk(top, topdown=True, onerror=None, followlinks=False): dirpath, dirnames, filenames dirpath is a string, the path to the directory. dirnames is a list of - the names of the subdirectories in dirpath (excluding '.' and '..'). + the names of the subdirectories in dirpath (including symlinks to directories, + and excluding '.' and '..'). filenames is a list of the names of the non-directory files in dirpath. Note that the names in the lists are just names, with no path components. To get a full path (which begins with top) to a file or directory in diff --git a/Lib/pathlib.py b/Lib/pathlib.py index 0ea621cc601037..f31eb3010368d5 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -200,6 +200,7 @@ def make_uri(self, path): # Globbing helpers # +@functools.lru_cache() def _make_selector(pattern_parts, flavour): pat = pattern_parts[0] child_parts = pattern_parts[1:] @@ -215,9 +216,6 @@ def _make_selector(pattern_parts, flavour): cls = _PreciseSelector return cls(pat, child_parts, flavour) -if hasattr(functools, "lru_cache"): - _make_selector = functools.lru_cache()(_make_selector) - class _Selector: """A selector matches a specific glob pattern part against the children @@ -626,47 +624,35 @@ def with_suffix(self, suffix): return self._from_parsed_parts(self._drv, self._root, self._parts[:-1] + [name]) - def relative_to(self, *other): + def relative_to(self, *other, walk_up=False): """Return the relative path to another path identified by the passed arguments. If the operation is not possible (because this is not - a subpath of the other path), raise ValueError. + related to the other path), raise ValueError. + + The *walk_up* parameter controls whether `..` may be used to resolve + the path. """ - # For the purpose of this method, drive and root are considered - # separate parts, i.e.: - # Path('c:/').relative_to('c:') gives Path('/') - # Path('c:/').relative_to('/') raise ValueError if not other: raise TypeError("need at least one argument") - parts = self._parts - drv = self._drv - root = self._root - if root: - abs_parts = [drv, root] + parts[1:] - else: - abs_parts = parts - to_drv, to_root, to_parts = self._parse_args(other) - if to_root: - to_abs_parts = [to_drv, to_root] + to_parts[1:] + path_cls = type(self) + other = path_cls(*other) + for step, path in enumerate([other] + list(other.parents)): + if self.is_relative_to(path): + break else: - to_abs_parts = to_parts - n = len(to_abs_parts) - cf = self._flavour.casefold_parts - if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts): - formatted = self._format_parsed_parts(to_drv, to_root, to_parts) - raise ValueError("{!r} is not in the subpath of {!r}" - " OR one path is relative and the other is absolute." - .format(str(self), str(formatted))) - return self._from_parsed_parts('', root if n == 1 else '', - abs_parts[n:]) + raise ValueError(f"{str(self)!r} and {str(other)!r} have different anchors") + if step and not walk_up: + raise ValueError(f"{str(self)!r} is not in the subpath of {str(other)!r}") + parts = ('..',) * step + self.parts[len(path.parts):] + return path_cls(*parts) def is_relative_to(self, *other): """Return True if the path is relative to another path or False. """ - try: - self.relative_to(*other) - return True - except ValueError: - return False + if not other: + raise TypeError("need at least one argument") + other = type(self)(*other) + return other == self or other in self.parents @property def parts(self): @@ -851,8 +837,10 @@ def samefile(self, other_path): return os.path.samestat(st, other_st) def iterdir(self): - """Iterate over the files in this directory. Does not yield any - result for the special paths '.' and '..'. + """Yield path objects of the directory contents. + + The children are yielded in arbitrary order, and the + special entries '.' and '..' are not included. """ for name in os.listdir(self): yield self._make_child_relpath(name) @@ -1205,6 +1193,12 @@ def is_symlink(self): # Non-encodable path return False + def is_junction(self): + """ + Whether this path is a junction. + """ + return self._flavour.pathmod.isjunction(self) + def is_block_device(self): """ Whether this path is a block device. diff --git a/Lib/pdb.py b/Lib/pdb.py index b0acb1f571d3f6..78d0ce537f1fc2 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -1332,6 +1332,12 @@ def do_list(self, arg): if last is None: last = first + 10 filename = self.curframe.f_code.co_filename + # gh-93696: stdlib frozen modules provide a useful __file__ + # this workaround can be removed with the closure of gh-89815 + if filename.startswith(">> print(sys.exc_info())\n' ' (None, None, None)\n' '\n' + '\n' + '"except*" clause\n' + '----------------\n' + '\n' 'The "except*" clause(s) are used for handling "ExceptionGroup"s. ' 'The\n' 'exception type for matching is interpreted as in the case of ' @@ -2520,13 +2550,15 @@ 'when\n' 'the type matches some of the exceptions in the group. This means ' 'that\n' - 'multiple except* clauses can execute, each handling part of the\n' - 'exception group. Each clause executes once and handles an ' - 'exception\n' - 'group of all matching exceptions. Each exception in the group ' - 'is\n' - 'handled by at most one except* clause, the first that matches ' - 'it.\n' + 'multiple "except*" clauses can execute, each handling part of ' + 'the\n' + 'exception group. Each clause executes at most once and handles ' + 'an\n' + 'exception group of all matching exceptions. Each exception in ' + 'the\n' + 'group is handled by at most one "except*" clause, the first ' + 'that\n' + 'matches it.\n' '\n' ' >>> try:\n' ' ... raise ExceptionGroup("eg",\n' @@ -2548,22 +2580,37 @@ ' +-+---------------- 1 ----------------\n' ' | ValueError: 1\n' ' +------------------------------------\n' - ' >>>\n' '\n' - ' Any remaining exceptions that were not handled by any except* ' + 'Any remaining exceptions that were not handled by any "except*" ' 'clause\n' - ' are re-raised at the end, combined into an exception group ' - 'along with\n' - ' all exceptions that were raised from within except* clauses.\n' - '\n' - ' An except* clause must have a matching type, and this type ' - 'cannot be a\n' - ' subclass of :exc:`BaseExceptionGroup`. It is not possible to ' - 'mix except\n' - ' and except* in the same :keyword:`try`. :keyword:`break`,\n' - ' :keyword:`continue` and :keyword:`return` cannot appear in an ' - 'except*\n' - ' clause.\n' + 'are re-raised at the end, combined into an exception group along ' + 'with\n' + 'all exceptions that were raised from within "except*" clauses.\n' + '\n' + 'If the raised exception is not an exception group and its type ' + 'matches\n' + 'one of the "except*" clauses, it is caught and wrapped by an ' + 'exception\n' + 'group with an empty message string.\n' + '\n' + ' >>> try:\n' + ' ... raise BlockingIOError\n' + ' ... except* BlockingIOError as e:\n' + ' ... print(repr(e))\n' + ' ...\n' + " ExceptionGroup('', (BlockingIOError()))\n" + '\n' + 'An "except*" clause must have a matching type, and this type ' + 'cannot be\n' + 'a subclass of "BaseExceptionGroup". It is not possible to mix ' + '"except"\n' + 'and "except*" in the same "try". "break", "continue" and ' + '"return"\n' + 'cannot appear in an "except*" clause.\n' + '\n' + '\n' + '"else" clause\n' + '-------------\n' '\n' 'The optional "else" clause is executed if the control flow ' 'leaves the\n' @@ -2573,6 +2620,10 @@ 'are\n' 'not handled by the preceding "except" clauses.\n' '\n' + '\n' + '"finally" clause\n' + '----------------\n' + '\n' 'If "finally" is present, it specifies a ‘cleanup’ handler. The ' '"try"\n' 'clause is executed, including any "except" and "else" clauses. ' @@ -2626,11 +2677,6 @@ ' >>> foo()\n' " 'finally'\n" '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information on using the "raise" statement to ' - 'generate\n' - 'exceptions may be found in section The raise statement.\n' - '\n' 'Changed in version 3.8: Prior to Python 3.8, a "continue" ' 'statement\n' 'was illegal in the "finally" clause due to a problem with the\n' @@ -3482,8 +3528,8 @@ ' there is matched against the whole object rather than an ' 'attribute.\n' ' For example "int(0|1)" matches the value "0", but not the ' - 'values\n' - ' "0.0" or "False".\n' + 'value\n' + ' "0.0".\n' '\n' 'In simple terms "CLS(P1, attr=P2)" matches only if the ' 'following\n' @@ -4144,7 +4190,7 @@ ' invoking the superclass’s "__new__()" method using\n' ' "super().__new__(cls[, ...])" with appropriate arguments ' 'and then\n' - ' modifying the newly-created instance as necessary before ' + ' modifying the newly created instance as necessary before ' 'returning\n' ' it.\n' '\n' @@ -4547,7 +4593,7 @@ 'Python.This is\n' ' intended to provide protection against a ' 'denial-of-service caused\n' - ' by carefully-chosen inputs that exploit the worst ' + ' by carefully chosen inputs that exploit the worst ' 'case\n' ' performance of a dict insertion, O(n^2) complexity. ' 'See\n' @@ -4861,7 +4907,10 @@ 'is\n' 'applied to separating the commands; the input is split at the ' 'first\n' - '";;" pair, even if it is in the middle of a quoted string.\n' + '";;" pair, even if it is in the middle of a quoted string. A\n' + 'workaround for strings with double semicolons is to use ' + 'implicit\n' + 'string concatenation "\';\'\';\'" or "";"";"".\n' '\n' 'If a file ".pdbrc" exists in the user’s home directory or in ' 'the\n' @@ -5537,9 +5586,10 @@ '\n' ' * "for" loop header,\n' '\n' - ' * after "as" in a "with" statement, "except" clause or in the ' - 'as-\n' - ' pattern in structural pattern matching,\n' + ' * after "as" in a "with" statement, "except" clause, ' + '"except*"\n' + ' clause, or in the as-pattern in structural pattern ' + 'matching,\n' '\n' ' * in a capture pattern in structural pattern matching\n' '\n' @@ -7100,8 +7150,8 @@ '\n' 'A non-normative HTML file listing all valid identifier ' 'characters for\n' - 'Unicode 14.0.0 can be found at\n' - 'https://www.unicode.org/Public/14.0.0/ucd/DerivedCoreProperties.txt\n' + 'Unicode 15.0.0 can be found at\n' + 'https://www.unicode.org/Public/15.0.0/ucd/DerivedCoreProperties.txt\n' '\n' '\n' 'Keywords\n' @@ -7271,7 +7321,7 @@ 'the clauses had been separated out into individual import ' 'statements.\n' '\n' - 'The details of the first step, finding and loading modules are\n' + 'The details of the first step, finding and loading modules, are\n' 'described in greater detail in the section on the import system, ' 'which\n' 'also describes the various types of packages and modules that can ' @@ -7654,9 +7704,8 @@ '\n' ' * "for" loop header,\n' '\n' - ' * after "as" in a "with" statement, "except" clause or in the ' - 'as-\n' - ' pattern in structural pattern matching,\n' + ' * after "as" in a "with" statement, "except" clause, "except*"\n' + ' clause, or in the as-pattern in structural pattern matching,\n' '\n' ' * in a capture pattern in structural pattern matching\n' '\n' @@ -8229,8 +8278,9 @@ 'the syntax is explicitly given, operators are binary. ' 'Operators in\n' 'the same box group left to right (except for ' - 'exponentiation, which\n' - 'groups from right to left).\n' + 'exponentiation and\n' + 'conditional expressions, which group from right to ' + 'left).\n' '\n' 'Note that comparisons, membership tests, and identity ' 'tests, all have\n' @@ -8254,7 +8304,7 @@ '| "x(arguments...)", "x.attribute" | ' 'attribute reference |\n' '+-------------------------------------------------+---------------------------------------+\n' - '| "await" "x" | ' + '| "await x" | ' 'Await expression |\n' '+-------------------------------------------------+---------------------------------------+\n' '| "**" | ' @@ -8290,7 +8340,7 @@ '| ">=", "!=", "==" | ' 'tests and identity tests |\n' '+-------------------------------------------------+---------------------------------------+\n' - '| "not" "x" | ' + '| "not x" | ' 'Boolean NOT |\n' '+-------------------------------------------------+---------------------------------------+\n' '| "and" | ' @@ -8980,31 +9030,7 @@ ' still alive. The list is in definition order. Example:\n' '\n' ' >>> int.__subclasses__()\n' - " []\n" - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] Additional information on these special methods may be ' - 'found in\n' - ' the Python Reference Manual (Basic customization).\n' - '\n' - '[2] As a consequence, the list "[1, 2]" is considered equal ' - 'to "[1.0,\n' - ' 2.0]", and similarly for tuples.\n' - '\n' - '[3] They must have since the parser can’t tell the type of ' - 'the\n' - ' operands.\n' - '\n' - '[4] Cased characters are those with general category ' - 'property being\n' - ' one of “Lu” (Letter, uppercase), “Ll” (Letter, ' - 'lowercase), or “Lt”\n' - ' (Letter, titlecase).\n' - '\n' - '[5] To format only a tuple you should therefore provide a ' - 'singleton\n' - ' tuple whose only element is the tuple to be formatted.\n', + " []\n", 'specialnames': 'Special method names\n' '********************\n' '\n' @@ -9073,7 +9099,7 @@ ' invoking the superclass’s "__new__()" method using\n' ' "super().__new__(cls[, ...])" with appropriate arguments ' 'and then\n' - ' modifying the newly-created instance as necessary before ' + ' modifying the newly created instance as necessary before ' 'returning\n' ' it.\n' '\n' @@ -9474,7 +9500,7 @@ 'is\n' ' intended to provide protection against a ' 'denial-of-service caused\n' - ' by carefully-chosen inputs that exploit the worst case\n' + ' by carefully chosen inputs that exploit the worst case\n' ' performance of a dict insertion, O(n^2) complexity. ' 'See\n' ' http://www.ocert.org/advisories/ocert-2011-003.html ' @@ -11289,6 +11315,10 @@ '*start* and\n' ' *end* are interpreted as in slice notation.\n' '\n' + ' If *sub* is empty, returns the number of empty strings ' + 'between\n' + ' characters which is the length of the string plus one.\n' + '\n' "str.encode(encoding='utf-8', errors='strict')\n" '\n' ' Return an encoded version of the string as a bytes ' @@ -12191,12 +12221,15 @@ 'single quotes ("\'") or double quotes ("""). They can also be ' 'enclosed\n' 'in matching groups of three single or double quotes (these are\n' - 'generally referred to as *triple-quoted strings*). The ' - 'backslash\n' - '("\\") character is used to escape characters that otherwise have ' - 'a\n' - 'special meaning, such as newline, backslash itself, or the quote\n' + 'generally referred to as *triple-quoted strings*). The backslash ' + '("\\")\n' + 'character is used to give special meaning to otherwise ordinary\n' + 'characters like "n", which means ‘newline’ when escaped ("\\n"). ' + 'It can\n' + 'also be used to escape characters that otherwise have a special\n' + 'meaning, such as newline, backslash itself, or the quote ' 'character.\n' + 'See escape sequences below for examples.\n' '\n' 'Bytes literals are always prefixed with "\'b\'" or "\'B\'"; they ' 'produce\n' @@ -12253,8 +12286,8 @@ '| Escape Sequence | Meaning | Notes ' '|\n' '|===================|===================================|=========|\n' - '| "\\newline" | Backslash and newline ignored ' - '| |\n' + '| "\\" | Backslash and newline ignored | ' + '(1) |\n' '+-------------------+-----------------------------------+---------+\n' '| "\\\\" | Backslash ("\\") ' '| |\n' @@ -12287,10 +12320,10 @@ '| |\n' '+-------------------+-----------------------------------+---------+\n' '| "\\ooo" | Character with octal value *ooo* | ' - '(1,3) |\n' + '(2,4) |\n' '+-------------------+-----------------------------------+---------+\n' '| "\\xhh" | Character with hex value *hh* | ' - '(2,3) |\n' + '(3,4) |\n' '+-------------------+-----------------------------------+---------+\n' '\n' 'Escape sequences only recognized in string literals are:\n' @@ -12300,45 +12333,59 @@ '|\n' '|===================|===================================|=========|\n' '| "\\N{name}" | Character named *name* in the | ' - '(4) |\n' + '(5) |\n' '| | Unicode database | ' '|\n' '+-------------------+-----------------------------------+---------+\n' '| "\\uxxxx" | Character with 16-bit hex value | ' - '(5) |\n' + '(6) |\n' '| | *xxxx* | ' '|\n' '+-------------------+-----------------------------------+---------+\n' '| "\\Uxxxxxxxx" | Character with 32-bit hex value | ' - '(6) |\n' + '(7) |\n' '| | *xxxxxxxx* | ' '|\n' '+-------------------+-----------------------------------+---------+\n' '\n' 'Notes:\n' '\n' - '1. As in Standard C, up to three octal digits are accepted.\n' + '1. A backslash can be added at the end of a line to ignore the\n' + ' newline:\n' + '\n' + " >>> 'This string will not include \\\n" + " ... backslashes or newline characters.'\n" + " 'This string will not include backslashes or newline " + "characters.'\n" + '\n' + ' The same result can be achieved using triple-quoted strings, ' + 'or\n' + ' parentheses and string literal concatenation.\n' + '\n' + '2. As in Standard C, up to three octal digits are accepted.\n' '\n' ' Changed in version 3.11: Octal escapes with value larger than\n' - ' "0o377" produce a "DeprecationWarning". In a future Python ' - 'version\n' - ' they will be a "SyntaxWarning" and eventually a ' - '"SyntaxError".\n' + ' "0o377" produce a "DeprecationWarning".\n' '\n' - '2. Unlike in Standard C, exactly two hex digits are required.\n' + ' Changed in version 3.12: Octal escapes with value larger than\n' + ' "0o377" produce a "SyntaxWarning". In a future Python version ' + 'they\n' + ' will be eventually a "SyntaxError".\n' '\n' - '3. In a bytes literal, hexadecimal and octal escapes denote the ' + '3. Unlike in Standard C, exactly two hex digits are required.\n' + '\n' + '4. In a bytes literal, hexadecimal and octal escapes denote the ' 'byte\n' ' with the given value. In a string literal, these escapes ' 'denote a\n' ' Unicode character with the given value.\n' '\n' - '4. Changed in version 3.3: Support for name aliases [1] has been\n' + '5. Changed in version 3.3: Support for name aliases [1] has been\n' ' added.\n' '\n' - '5. Exactly four hex digits are required.\n' + '6. Exactly four hex digits are required.\n' '\n' - '6. Any Unicode character can be encoded this way. Exactly eight ' + '7. Any Unicode character can be encoded this way. Exactly eight ' 'hex\n' ' digits are required.\n' '\n' @@ -12358,9 +12405,13 @@ '\n' ' Changed in version 3.6: Unrecognized escape sequences produce ' 'a\n' - ' "DeprecationWarning". In a future Python version they will be ' + ' "DeprecationWarning".\n' + '\n' + ' Changed in version 3.12: Unrecognized escape sequences produce ' 'a\n' - ' "SyntaxWarning" and eventually a "SyntaxError".\n' + ' "SyntaxWarning". In a future Python version they will be ' + 'eventually\n' + ' a "SyntaxError".\n' '\n' 'Even in a raw literal, quotes can be escaped with a backslash, ' 'but the\n' @@ -12509,31 +12560,39 @@ ' try3_stmt ::= "try" ":" suite\n' ' "finally" ":" suite\n' '\n' + 'Additional information on exceptions can be found in section\n' + 'Exceptions, and information on using the "raise" statement to ' + 'generate\n' + 'exceptions may be found in section The raise statement.\n' + '\n' + '\n' + '"except" clause\n' + '===============\n' + '\n' 'The "except" clause(s) specify one or more exception handlers. When ' 'no\n' 'exception occurs in the "try" clause, no exception handler is\n' 'executed. When an exception occurs in the "try" suite, a search for ' 'an\n' - 'exception handler is started. This search inspects the except ' - 'clauses\n' - 'in turn until one is found that matches the exception. An ' - 'expression-\n' - 'less except clause, if present, must be last; it matches any\n' - 'exception. For an except clause with an expression, that expression\n' - 'is evaluated, and the clause matches the exception if the resulting\n' - 'object is “compatible” with the exception. An object is compatible\n' - 'with an exception if the object is the class or a *non-virtual base\n' - 'class* of the exception object, or a tuple containing an item that ' - 'is\n' - 'the class or a non-virtual base class of the exception object.\n' + 'exception handler is started. This search inspects the "except"\n' + 'clauses in turn until one is found that matches the exception. An\n' + 'expression-less "except" clause, if present, must be last; it ' + 'matches\n' + 'any exception. For an "except" clause with an expression, that\n' + 'expression is evaluated, and the clause matches the exception if the\n' + 'resulting object is “compatible” with the exception. An object is\n' + 'compatible with an exception if the object is the class or a *non-\n' + 'virtual base class* of the exception object, or a tuple containing ' + 'an\n' + 'item that is the class or a non-virtual base class of the exception\n' + 'object.\n' '\n' - 'If no except clause matches the exception, the search for an ' - 'exception\n' - 'handler continues in the surrounding code and on the invocation ' - 'stack.\n' - '[1]\n' + 'If no "except" clause matches the exception, the search for an\n' + 'exception handler continues in the surrounding code and on the\n' + 'invocation stack. [1]\n' '\n' - 'If the evaluation of an expression in the header of an except clause\n' + 'If the evaluation of an expression in the header of an "except" ' + 'clause\n' 'raises an exception, the original search for a handler is canceled ' 'and\n' 'a search starts for the new exception in the surrounding code and on\n' @@ -12541,21 +12600,20 @@ 'raised\n' 'the exception).\n' '\n' - 'When a matching except clause is found, the exception is assigned to\n' - 'the target specified after the "as" keyword in that except clause, ' - 'if\n' - 'present, and the except clause’s suite is executed. All except\n' - 'clauses must have an executable block. When the end of this block ' - 'is\n' - 'reached, execution continues normally after the entire try ' - 'statement.\n' - '(This means that if two nested handlers exist for the same ' - 'exception,\n' - 'and the exception occurs in the try clause of the inner handler, the\n' - 'outer handler will not handle the exception.)\n' + 'When a matching "except" clause is found, the exception is assigned ' + 'to\n' + 'the target specified after the "as" keyword in that "except" clause,\n' + 'if present, and the "except" clause’s suite is executed. All ' + '"except"\n' + 'clauses must have an executable block. When the end of this block is\n' + 'reached, execution continues normally after the entire "try"\n' + 'statement. (This means that if two nested handlers exist for the ' + 'same\n' + 'exception, and the exception occurs in the "try" clause of the inner\n' + 'handler, the outer handler will not handle the exception.)\n' '\n' 'When an exception has been assigned using "as target", it is cleared\n' - 'at the end of the except clause. This is as if\n' + 'at the end of the "except" clause. This is as if\n' '\n' ' except E as N:\n' ' foo\n' @@ -12569,12 +12627,13 @@ ' del N\n' '\n' 'This means the exception must be assigned to a different name to be\n' - 'able to refer to it after the except clause. Exceptions are cleared\n' + 'able to refer to it after the "except" clause. Exceptions are ' + 'cleared\n' 'because with the traceback attached to them, they form a reference\n' 'cycle with the stack frame, keeping all locals in that frame alive\n' 'until the next garbage collection occurs.\n' '\n' - 'Before an except clause’s suite is executed, details about the\n' + 'Before an "except" clause’s suite is executed, details about the\n' 'exception are stored in the "sys" module and can be accessed via\n' '"sys.exc_info()". "sys.exc_info()" returns a 3-tuple consisting of ' 'the\n' @@ -12606,16 +12665,21 @@ ' >>> print(sys.exc_info())\n' ' (None, None, None)\n' '\n' + '\n' + '"except*" clause\n' + '================\n' + '\n' 'The "except*" clause(s) are used for handling "ExceptionGroup"s. The\n' 'exception type for matching is interpreted as in the case of ' '"except",\n' 'but in the case of exception groups we can have partial matches when\n' 'the type matches some of the exceptions in the group. This means ' 'that\n' - 'multiple except* clauses can execute, each handling part of the\n' - 'exception group. Each clause executes once and handles an exception\n' - 'group of all matching exceptions. Each exception in the group is\n' - 'handled by at most one except* clause, the first that matches it.\n' + 'multiple "except*" clauses can execute, each handling part of the\n' + 'exception group. Each clause executes at most once and handles an\n' + 'exception group of all matching exceptions. Each exception in the\n' + 'group is handled by at most one "except*" clause, the first that\n' + 'matches it.\n' '\n' ' >>> try:\n' ' ... raise ExceptionGroup("eg",\n' @@ -12635,22 +12699,36 @@ ' +-+---------------- 1 ----------------\n' ' | ValueError: 1\n' ' +------------------------------------\n' - ' >>>\n' '\n' - ' Any remaining exceptions that were not handled by any except* ' + 'Any remaining exceptions that were not handled by any "except*" ' 'clause\n' - ' are re-raised at the end, combined into an exception group along ' + 'are re-raised at the end, combined into an exception group along ' 'with\n' - ' all exceptions that were raised from within except* clauses.\n' + 'all exceptions that were raised from within "except*" clauses.\n' '\n' - ' An except* clause must have a matching type, and this type cannot ' - 'be a\n' - ' subclass of :exc:`BaseExceptionGroup`. It is not possible to mix ' - 'except\n' - ' and except* in the same :keyword:`try`. :keyword:`break`,\n' - ' :keyword:`continue` and :keyword:`return` cannot appear in an ' - 'except*\n' - ' clause.\n' + 'If the raised exception is not an exception group and its type ' + 'matches\n' + 'one of the "except*" clauses, it is caught and wrapped by an ' + 'exception\n' + 'group with an empty message string.\n' + '\n' + ' >>> try:\n' + ' ... raise BlockingIOError\n' + ' ... except* BlockingIOError as e:\n' + ' ... print(repr(e))\n' + ' ...\n' + " ExceptionGroup('', (BlockingIOError()))\n" + '\n' + 'An "except*" clause must have a matching type, and this type cannot ' + 'be\n' + 'a subclass of "BaseExceptionGroup". It is not possible to mix ' + '"except"\n' + 'and "except*" in the same "try". "break", "continue" and "return"\n' + 'cannot appear in an "except*" clause.\n' + '\n' + '\n' + '"else" clause\n' + '=============\n' '\n' 'The optional "else" clause is executed if the control flow leaves ' 'the\n' @@ -12659,6 +12737,10 @@ '"break" statement was executed. Exceptions in the "else" clause are\n' 'not handled by the preceding "except" clauses.\n' '\n' + '\n' + '"finally" clause\n' + '================\n' + '\n' 'If "finally" is present, it specifies a ‘cleanup’ handler. The ' '"try"\n' 'clause is executed, including any "except" and "else" clauses. If ' @@ -12706,11 +12788,6 @@ ' >>> foo()\n' " 'finally'\n" '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information on using the "raise" statement to ' - 'generate\n' - 'exceptions may be found in section The raise statement.\n' - '\n' 'Changed in version 3.8: Prior to Python 3.8, a "continue" statement\n' 'was illegal in the "finally" clause due to a problem with the\n' 'implementation.\n', @@ -12910,7 +12987,7 @@ ' points. All the code points in the range "U+0000 - ' 'U+10FFFF"\n' ' can be represented in a string. Python doesn’t have a ' - '*char*\n' + 'char\n' ' type; instead, every code point in the string is ' 'represented\n' ' as a string object with length "1". The built-in ' @@ -13908,17 +13985,11 @@ 'dictionaries or\n' 'other mutable types (that are compared by value rather than ' 'by object\n' - 'identity) may not be used as keys. Numeric types used for ' - 'keys obey\n' - 'the normal rules for numeric comparison: if two numbers ' - 'compare equal\n' - '(such as "1" and "1.0") then they can be used ' - 'interchangeably to index\n' - 'the same dictionary entry. (Note however, that since ' - 'computers store\n' - 'floating-point numbers as approximations it is usually ' - 'unwise to use\n' - 'them as dictionary keys.)\n' + 'identity) may not be used as keys. Values that compare equal ' + '(such as\n' + '"1", "1.0", and "True") can be used interchangeably to index ' + 'the same\n' + 'dictionary entry.\n' '\n' 'class dict(**kwargs)\n' 'class dict(mapping, **kwargs)\n' @@ -14336,7 +14407,11 @@ 'abstract\n' 'base class "collections.abc.Set" are available (for example, ' '"==",\n' - '"<", or "^").\n' + '"<", or "^"). While using set operators, set-like views ' + 'accept any\n' + 'iterable as the other operand, unlike sets which only accept ' + 'sets as\n' + 'the input.\n' '\n' 'An example of dictionary view usage:\n' '\n' @@ -14370,6 +14445,8 @@ " {'bacon'}\n" " >>> keys ^ {'sausage', 'juice'}\n" " {'juice', 'sausage', 'bacon', 'spam'}\n" + " >>> keys | ['juice', 'juice', 'juice']\n" + " {'juice', 'sausage', 'bacon', 'spam', 'eggs'}\n" '\n' ' >>> # get back a read-only proxy for the original ' 'dictionary\n' diff --git a/Lib/re/__init__.py b/Lib/re/__init__.py index d58c2117ef3e14..4515650a721ac6 100644 --- a/Lib/re/__init__.py +++ b/Lib/re/__init__.py @@ -124,6 +124,7 @@ import enum from . import _compiler, _parser import functools +import _sre # public symbols @@ -229,7 +230,8 @@ def compile(pattern, flags=0): def purge(): "Clear the regular expression caches" _cache.clear() - _compile_repl.cache_clear() + _cache2.clear() + _compile_template.cache_clear() def template(pattern, flags=0): "Compile a template pattern, returning a Pattern object, deprecated" @@ -266,61 +268,70 @@ def escape(pattern): # -------------------------------------------------------------------- # internals -_cache = {} # ordered! - +# Use the fact that dict keeps the insertion order. +# _cache2 uses the simple FIFO policy which has better latency. +# _cache uses the LRU policy which has better hit rate. +_cache = {} # LRU +_cache2 = {} # FIFO _MAXCACHE = 512 +_MAXCACHE2 = 256 +assert _MAXCACHE2 < _MAXCACHE + def _compile(pattern, flags): # internal: compile pattern if isinstance(flags, RegexFlag): flags = flags.value try: - return _cache[type(pattern), pattern, flags] + return _cache2[type(pattern), pattern, flags] except KeyError: pass - if isinstance(pattern, Pattern): - if flags: - raise ValueError( - "cannot process flags argument with a compiled pattern") - return pattern - if not _compiler.isstring(pattern): - raise TypeError("first argument must be string or compiled pattern") - if flags & T: - import warnings - warnings.warn("The re.TEMPLATE/re.T flag is deprecated " - "as it is an undocumented flag " - "without an obvious purpose. " - "Don't use it.", - DeprecationWarning) - p = _compiler.compile(pattern, flags) - if not (flags & DEBUG): + + key = (type(pattern), pattern, flags) + # Item in _cache should be moved to the end if found. + p = _cache.pop(key, None) + if p is None: + if isinstance(pattern, Pattern): + if flags: + raise ValueError( + "cannot process flags argument with a compiled pattern") + return pattern + if not _compiler.isstring(pattern): + raise TypeError("first argument must be string or compiled pattern") + if flags & T: + import warnings + warnings.warn("The re.TEMPLATE/re.T flag is deprecated " + "as it is an undocumented flag " + "without an obvious purpose. " + "Don't use it.", + DeprecationWarning) + p = _compiler.compile(pattern, flags) + if flags & DEBUG: + return p if len(_cache) >= _MAXCACHE: - # Drop the oldest item + # Drop the least recently used item. + # next(iter(_cache)) is known to have linear amortized time, + # but it is used here to avoid a dependency from using OrderedDict. + # For the small _MAXCACHE value it doesn't make much of a difference. try: del _cache[next(iter(_cache))] except (StopIteration, RuntimeError, KeyError): pass - _cache[type(pattern), pattern, flags] = p + # Append to the end. + _cache[key] = p + + if len(_cache2) >= _MAXCACHE2: + # Drop the oldest item. + try: + del _cache2[next(iter(_cache2))] + except (StopIteration, RuntimeError, KeyError): + pass + _cache2[key] = p return p @functools.lru_cache(_MAXCACHE) -def _compile_repl(repl, pattern): +def _compile_template(pattern, repl): # internal: compile replacement pattern - return _parser.parse_template(repl, pattern) - -def _expand(pattern, match, template): - # internal: Match.expand implementation hook - template = _parser.parse_template(template, pattern) - return _parser.expand_template(template, match) - -def _subx(pattern, template): - # internal: Pattern.sub/subn implementation helper - template = _compile_repl(template, pattern) - if not template[0] and len(template[1]) == 1: - # literal replacement - return template[1][0] - def filter(match, template=template): - return _parser.expand_template(template, match) - return filter + return _sre.template(pattern, _parser.parse_template(repl, pattern)) # register myself for pickling diff --git a/Lib/re/_constants.py b/Lib/re/_constants.py index 10ee14bfab46ee..d8718d36075aac 100644 --- a/Lib/re/_constants.py +++ b/Lib/re/_constants.py @@ -13,7 +13,7 @@ # update when constants are added or removed -MAGIC = 20220615 +MAGIC = 20221023 from _sre import MAXREPEAT, MAXGROUPS diff --git a/Lib/re/_parser.py b/Lib/re/_parser.py index 0d9cf632ea7105..5709acb6267238 100644 --- a/Lib/re/_parser.py +++ b/Lib/re/_parser.py @@ -984,24 +984,28 @@ def parse(str, flags=0, state=None): return p -def parse_template(source, state): +def parse_template(source, pattern): # parse 're' replacement string into list of literals and # group references s = Tokenizer(source) sget = s.get - groups = [] - literals = [] + result = [] literal = [] lappend = literal.append + def addliteral(): + if s.istext: + result.append(''.join(literal)) + else: + # The tokenizer implicitly decodes bytes objects as latin-1, we must + # therefore re-encode the final representation. + result.append(''.join(literal).encode('latin-1')) + del literal[:] def addgroup(index, pos): - if index > state.groups: + if index > pattern.groups: raise s.error("invalid group reference %d" % index, pos) - if literal: - literals.append(''.join(literal)) - del literal[:] - groups.append((len(literals), index)) - literals.append(None) - groupindex = state.groupindex + addliteral() + result.append(index) + groupindex = pattern.groupindex while True: this = sget() if this is None: @@ -1063,22 +1067,5 @@ def addgroup(index, pos): lappend(this) else: lappend(this) - if literal: - literals.append(''.join(literal)) - if not isinstance(source, str): - # The tokenizer implicitly decodes bytes objects as latin-1, we must - # therefore re-encode the final representation. - literals = [None if s is None else s.encode('latin-1') for s in literals] - return groups, literals - -def expand_template(template, match): - g = match.group - empty = match.string[:0] - groups, literals = template - literals = literals[:] - try: - for index, group in groups: - literals[index] = g(group) or empty - except IndexError: - raise error("invalid group reference %d" % index) from None - return empty.join(literals) + addliteral() + return result diff --git a/Lib/secrets.py b/Lib/secrets.py index 900381a89f53ec..566a09b7311154 100644 --- a/Lib/secrets.py +++ b/Lib/secrets.py @@ -13,7 +13,6 @@ import base64 -import binascii from hmac import compare_digest from random import SystemRandom @@ -56,7 +55,7 @@ def token_hex(nbytes=None): 'f9bf78b9a18ce6d46a0cd2b0b86df9da' """ - return binascii.hexlify(token_bytes(nbytes)).decode('ascii') + return token_bytes(nbytes).hex() def token_urlsafe(nbytes=None): """Return a random URL-safe text string, in Base64 encoding. diff --git a/Lib/shutil.py b/Lib/shutil.py index ac1dd530528c0a..f372406a6c51a8 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -490,12 +490,13 @@ def _copytree(entries, src, dst, symlinks, ignore, copy_function, # otherwise let the copy occur. copy2 will raise an error if srcentry.is_dir(): copytree(srcobj, dstname, symlinks, ignore, - copy_function, dirs_exist_ok=dirs_exist_ok) + copy_function, ignore_dangling_symlinks, + dirs_exist_ok) else: copy_function(srcobj, dstname) elif srcentry.is_dir(): copytree(srcobj, dstname, symlinks, ignore, copy_function, - dirs_exist_ok=dirs_exist_ok) + ignore_dangling_symlinks, dirs_exist_ok) else: # Will raise a SpecialFileError for unsupported file types copy_function(srcobj, dstname) @@ -564,18 +565,6 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, dirs_exist_ok=dirs_exist_ok) if hasattr(os.stat_result, 'st_file_attributes'): - # Special handling for directory junctions to make them behave like - # symlinks for shutil.rmtree, since in general they do not appear as - # regular links. - def _rmtree_isdir(entry): - try: - st = entry.stat(follow_symlinks=False) - return (stat.S_ISDIR(st.st_mode) and not - (st.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT - and st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT)) - except OSError: - return False - def _rmtree_islink(path): try: st = os.lstat(path) @@ -585,12 +574,6 @@ def _rmtree_islink(path): except OSError: return False else: - def _rmtree_isdir(entry): - try: - return entry.is_dir(follow_symlinks=False) - except OSError: - return False - def _rmtree_islink(path): return os.path.islink(path) @@ -604,7 +587,12 @@ def _rmtree_unsafe(path, onerror): entries = [] for entry in entries: fullname = entry.path - if _rmtree_isdir(entry): + try: + is_dir = entry.is_dir(follow_symlinks=False) + except OSError: + is_dir = False + + if is_dir and not entry.is_junction(): try: if entry.is_symlink(): # This can only happen if someone replaces diff --git a/Lib/smtplib.py b/Lib/smtplib.py index 324a1c19f12afe..05d2f8ccd73c98 100755 --- a/Lib/smtplib.py +++ b/Lib/smtplib.py @@ -749,14 +749,14 @@ def login(self, user, password, *, initial_response_ok=True): # We could not login successfully. Return result of last attempt. raise last_exception - def starttls(self, keyfile=None, certfile=None, context=None): + def starttls(self, *, context=None): """Puts the connection to the SMTP server into TLS mode. If there has been no previous EHLO or HELO command this session, this method tries ESMTP EHLO first. If the server supports TLS, this will encrypt the rest of the SMTP - session. If you provide the keyfile and certfile parameters, + session. If you provide the context parameter, the identity of the SMTP server and client can be checked. This, however, depends on whether the socket module really checks the certificates. @@ -774,19 +774,8 @@ def starttls(self, keyfile=None, certfile=None, context=None): if resp == 220: if not _have_ssl: raise RuntimeError("No SSL support included in this Python") - if context is not None and keyfile is not None: - raise ValueError("context and keyfile arguments are mutually " - "exclusive") - if context is not None and certfile is not None: - raise ValueError("context and certfile arguments are mutually " - "exclusive") - if keyfile is not None or certfile is not None: - import warnings - warnings.warn("keyfile and certfile are deprecated, use a " - "custom context instead", DeprecationWarning, 2) if context is None: - context = ssl._create_stdlib_context(certfile=certfile, - keyfile=keyfile) + context = ssl._create_stdlib_context() self.sock = context.wrap_socket(self.sock, server_hostname=self._host) self.file = None @@ -1017,35 +1006,18 @@ class SMTP_SSL(SMTP): compiled with SSL support). If host is not specified, '' (the local host) is used. If port is omitted, the standard SMTP-over-SSL port (465) is used. local_hostname and source_address have the same meaning - as they do in the SMTP class. keyfile and certfile are also optional - - they can contain a PEM formatted private key and certificate chain file - for the SSL connection. context also optional, can contain a - SSLContext, and is an alternative to keyfile and certfile; If it is - specified both keyfile and certfile must be None. + as they do in the SMTP class. context also optional, can contain a + SSLContext. """ default_port = SMTP_SSL_PORT def __init__(self, host='', port=0, local_hostname=None, - keyfile=None, certfile=None, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + *, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, context=None): - if context is not None and keyfile is not None: - raise ValueError("context and keyfile arguments are mutually " - "exclusive") - if context is not None and certfile is not None: - raise ValueError("context and certfile arguments are mutually " - "exclusive") - if keyfile is not None or certfile is not None: - import warnings - warnings.warn("keyfile and certfile are deprecated, use a " - "custom context instead", DeprecationWarning, 2) - self.keyfile = keyfile - self.certfile = certfile if context is None: - context = ssl._create_stdlib_context(certfile=certfile, - keyfile=keyfile) + context = ssl._create_stdlib_context() self.context = context SMTP.__init__(self, host, port, local_hostname, timeout, source_address) diff --git a/Lib/sndhdr.py b/Lib/sndhdr.py index 98a783448239a8..45def9ad16d32c 100644 --- a/Lib/sndhdr.py +++ b/Lib/sndhdr.py @@ -77,6 +77,7 @@ def whathdr(filename): tests = [] def test_aifc(h, f): + """AIFC and AIFF files""" with warnings.catch_warnings(): warnings.simplefilter('ignore', category=DeprecationWarning) import aifc @@ -100,6 +101,7 @@ def test_aifc(h, f): def test_au(h, f): + """AU and SND files""" if h.startswith(b'.snd'): func = get_long_be elif h[:4] in (b'\0ds.', b'dns.'): @@ -133,6 +135,7 @@ def test_au(h, f): def test_hcom(h, f): + """HCOM file""" if h[65:69] != b'FSSD' or h[128:132] != b'HCOM': return None divisor = get_long_be(h[144:148]) @@ -146,6 +149,7 @@ def test_hcom(h, f): def test_voc(h, f): + """VOC file""" if not h.startswith(b'Creative Voice File\032'): return None sbseek = get_short_le(h[20:22]) @@ -160,6 +164,7 @@ def test_voc(h, f): def test_wav(h, f): + """WAV file""" import wave # 'RIFF' 'WAVE' 'fmt ' if not h.startswith(b'RIFF') or h[8:12] != b'WAVE' or h[12:16] != b'fmt ': @@ -176,6 +181,7 @@ def test_wav(h, f): def test_8svx(h, f): + """8SVX file""" if not h.startswith(b'FORM') or h[8:12] != b'8SVX': return None # Should decode it to get #channels -- assume always 1 @@ -185,6 +191,7 @@ def test_8svx(h, f): def test_sndt(h, f): + """SNDT file""" if h.startswith(b'SOUND'): nsamples = get_long_le(h[8:12]) rate = get_short_le(h[20:22]) @@ -194,6 +201,7 @@ def test_sndt(h, f): def test_sndr(h, f): + """SNDR file""" if h.startswith(b'\0\0'): rate = get_short_le(h[2:4]) if 4000 <= rate <= 25000: diff --git a/Lib/statistics.py b/Lib/statistics.py index b4adabd3f05ae8..07d1fd5ba6e98e 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -1446,3 +1446,9 @@ def __hash__(self): def __repr__(self): return f'{type(self).__name__}(mu={self._mu!r}, sigma={self._sigma!r})' + + def __getstate__(self): + return self._mu, self._sigma + + def __setstate__(self, state): + self._mu, self._sigma = state diff --git a/Lib/subprocess.py b/Lib/subprocess.py index 760b93b47ebba6..9cadd1bf8e622c 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -509,7 +509,8 @@ def run(*popenargs, The returned instance will have attributes args, returncode, stdout and stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. + will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, + or pass capture_output=True to capture both. If check is True and the exit code was non-zero, it raises a CalledProcessError. The CalledProcessError object will have the return code diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py index ebe37118274222..c61100a6da8503 100644 --- a/Lib/sysconfig.py +++ b/Lib/sysconfig.py @@ -2,6 +2,7 @@ import os import sys +import threading from os.path import pardir, realpath __all__ = [ @@ -172,7 +173,11 @@ def joinuser(*args): _BASE_PREFIX = os.path.normpath(sys.base_prefix) _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) _BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix) +# Mutex guarding initialization of _CONFIG_VARS. +_CONFIG_VARS_LOCK = threading.RLock() _CONFIG_VARS = None +# True iff _CONFIG_VARS has been fully initialized. +_CONFIG_VARS_INITIALIZED = False _USER_BASE = None # Regexes needed for parsing Makefile (and similar syntaxes, @@ -539,7 +544,12 @@ def _init_non_posix(vars): vars['LIBDEST'] = get_path('stdlib') vars['BINLIBDEST'] = get_path('platstdlib') vars['INCLUDEPY'] = get_path('include') - vars['EXT_SUFFIX'] = _imp.extension_suffixes()[0] + try: + # GH-99201: _imp.extension_suffixes may be empty when + # HAVE_DYNAMIC_LOADING is not set. In this case, don't set EXT_SUFFIX. + vars['EXT_SUFFIX'] = _imp.extension_suffixes()[0] + except IndexError: + pass vars['EXE'] = '.exe' vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) @@ -626,6 +636,71 @@ def get_path(name, scheme=get_default_scheme(), vars=None, expand=True): return get_paths(scheme, vars, expand)[name] +def _init_config_vars(): + global _CONFIG_VARS + _CONFIG_VARS = {} + # Normalized versions of prefix and exec_prefix are handy to have; + # in fact, these are the standard versions used most places in the + # Distutils. + _CONFIG_VARS['prefix'] = _PREFIX + _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX + _CONFIG_VARS['py_version'] = _PY_VERSION + _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT + _CONFIG_VARS['py_version_nodot'] = _PY_VERSION_SHORT_NO_DOT + _CONFIG_VARS['installed_base'] = _BASE_PREFIX + _CONFIG_VARS['base'] = _PREFIX + _CONFIG_VARS['installed_platbase'] = _BASE_EXEC_PREFIX + _CONFIG_VARS['platbase'] = _EXEC_PREFIX + _CONFIG_VARS['projectbase'] = _PROJECT_BASE + _CONFIG_VARS['platlibdir'] = sys.platlibdir + try: + _CONFIG_VARS['abiflags'] = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + _CONFIG_VARS['abiflags'] = '' + try: + _CONFIG_VARS['py_version_nodot_plat'] = sys.winver.replace('.', '') + except AttributeError: + _CONFIG_VARS['py_version_nodot_plat'] = '' + + if os.name == 'nt': + _init_non_posix(_CONFIG_VARS) + _CONFIG_VARS['VPATH'] = sys._vpath + if os.name == 'posix': + _init_posix(_CONFIG_VARS) + if _HAS_USER_BASE: + # Setting 'userbase' is done below the call to the + # init function to enable using 'get_config_var' in + # the init-function. + _CONFIG_VARS['userbase'] = _getuserbase() + + # Always convert srcdir to an absolute path + srcdir = _CONFIG_VARS.get('srcdir', _PROJECT_BASE) + if os.name == 'posix': + if _PYTHON_BUILD: + # If srcdir is a relative path (typically '.' or '..') + # then it should be interpreted relative to the directory + # containing Makefile. + base = os.path.dirname(get_makefile_filename()) + srcdir = os.path.join(base, srcdir) + else: + # srcdir is not meaningful since the installation is + # spread about the filesystem. We choose the + # directory containing the Makefile since we know it + # exists. + srcdir = os.path.dirname(get_makefile_filename()) + _CONFIG_VARS['srcdir'] = _safe_realpath(srcdir) + + # OS X platforms require special customization to handle + # multi-architecture, multi-os-version installers + if sys.platform == 'darwin': + import _osx_support + _osx_support.customize_config_vars(_CONFIG_VARS) + + global _CONFIG_VARS_INITIALIZED + _CONFIG_VARS_INITIALIZED = True + + def get_config_vars(*args): """With no arguments, return a dictionary of all configuration variables relevant for the current platform. @@ -636,66 +711,16 @@ def get_config_vars(*args): With arguments, return a list of values that result from looking up each argument in the configuration variable dictionary. """ - global _CONFIG_VARS - if _CONFIG_VARS is None: - _CONFIG_VARS = {} - # Normalized versions of prefix and exec_prefix are handy to have; - # in fact, these are the standard versions used most places in the - # Distutils. - _CONFIG_VARS['prefix'] = _PREFIX - _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX - _CONFIG_VARS['py_version'] = _PY_VERSION - _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT - _CONFIG_VARS['py_version_nodot'] = _PY_VERSION_SHORT_NO_DOT - _CONFIG_VARS['installed_base'] = _BASE_PREFIX - _CONFIG_VARS['base'] = _PREFIX - _CONFIG_VARS['installed_platbase'] = _BASE_EXEC_PREFIX - _CONFIG_VARS['platbase'] = _EXEC_PREFIX - _CONFIG_VARS['projectbase'] = _PROJECT_BASE - _CONFIG_VARS['platlibdir'] = sys.platlibdir - try: - _CONFIG_VARS['abiflags'] = sys.abiflags - except AttributeError: - # sys.abiflags may not be defined on all platforms. - _CONFIG_VARS['abiflags'] = '' - try: - _CONFIG_VARS['py_version_nodot_plat'] = sys.winver.replace('.', '') - except AttributeError: - _CONFIG_VARS['py_version_nodot_plat'] = '' - - if os.name == 'nt': - _init_non_posix(_CONFIG_VARS) - _CONFIG_VARS['VPATH'] = sys._vpath - if os.name == 'posix': - _init_posix(_CONFIG_VARS) - if _HAS_USER_BASE: - # Setting 'userbase' is done below the call to the - # init function to enable using 'get_config_var' in - # the init-function. - _CONFIG_VARS['userbase'] = _getuserbase() - - # Always convert srcdir to an absolute path - srcdir = _CONFIG_VARS.get('srcdir', _PROJECT_BASE) - if os.name == 'posix': - if _PYTHON_BUILD: - # If srcdir is a relative path (typically '.' or '..') - # then it should be interpreted relative to the directory - # containing Makefile. - base = os.path.dirname(get_makefile_filename()) - srcdir = os.path.join(base, srcdir) - else: - # srcdir is not meaningful since the installation is - # spread about the filesystem. We choose the - # directory containing the Makefile since we know it - # exists. - srcdir = os.path.dirname(get_makefile_filename()) - _CONFIG_VARS['srcdir'] = _safe_realpath(srcdir) - - # OS X platforms require special customization to handle - # multi-architecture, multi-os-version installers - if sys.platform == 'darwin': - import _osx_support - _osx_support.customize_config_vars(_CONFIG_VARS) + + # Avoid claiming the lock once initialization is complete. + if not _CONFIG_VARS_INITIALIZED: + with _CONFIG_VARS_LOCK: + # Test again with the lock held to avoid races. Note that + # we test _CONFIG_VARS here, not _CONFIG_VARS_INITIALIZED, + # to ensure that recursive calls to get_config_vars() + # don't re-enter init_config_vars(). + if _CONFIG_VARS is None: + _init_config_vars() if args: vals = [] diff --git a/Lib/tarfile.py b/Lib/tarfile.py index a08f247f496b3d..42100e9a39436e 100755 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -57,13 +57,9 @@ grp = None # os.symlink on Windows prior to 6.0 raises NotImplementedError -symlink_exception = (AttributeError, NotImplementedError) -try: - # OSError (winerror=1314) will be raised if the caller does not hold the - # SeCreateSymbolicLinkPrivilege privilege - symlink_exception += (OSError,) -except NameError: - pass +# OSError (winerror=1314) will be raised if the caller does not hold the +# SeCreateSymbolicLinkPrivilege privilege +symlink_exception = (AttributeError, NotImplementedError, OSError) # from tarfile import * __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError", diff --git a/Lib/test/_test_embed_set_config.py b/Lib/test/_test_embed_set_config.py index 7ff641b37bf188..0c016b5d75d734 100644 --- a/Lib/test/_test_embed_set_config.py +++ b/Lib/test/_test_embed_set_config.py @@ -84,7 +84,6 @@ def test_set_invalid(self): 'skip_source_first_line', '_install_importlib', '_init_main', - '_isolated_interpreter', ] if MS_WINDOWS: options.append('legacy_windows_stdio') diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index f74d8d7fbd726c..a66f4f5b897cd3 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -5698,45 +5698,48 @@ def test_joinable_queue(self): @classmethod def _test_list(cls, obj): - assert obj[0] == 5 - assert obj.count(5) == 1 - assert obj.index(5) == 0 + case = unittest.TestCase() + case.assertEqual(obj[0], 5) + case.assertEqual(obj.count(5), 1) + case.assertEqual(obj.index(5), 0) obj.sort() obj.reverse() for x in obj: pass - assert len(obj) == 1 - assert obj.pop(0) == 5 + case.assertEqual(len(obj), 1) + case.assertEqual(obj.pop(0), 5) def test_list(self): o = self.manager.list() o.append(5) self.run_worker(self._test_list, o) - assert not o + self.assertIsNotNone(o) self.assertEqual(len(o), 0) @classmethod def _test_dict(cls, obj): - assert len(obj) == 1 - assert obj['foo'] == 5 - assert obj.get('foo') == 5 - assert list(obj.items()) == [('foo', 5)] - assert list(obj.keys()) == ['foo'] - assert list(obj.values()) == [5] - assert obj.copy() == {'foo': 5} - assert obj.popitem() == ('foo', 5) + case = unittest.TestCase() + case.assertEqual(len(obj), 1) + case.assertEqual(obj['foo'], 5) + case.assertEqual(obj.get('foo'), 5) + case.assertListEqual(list(obj.items()), [('foo', 5)]) + case.assertListEqual(list(obj.keys()), ['foo']) + case.assertListEqual(list(obj.values()), [5]) + case.assertDictEqual(obj.copy(), {'foo': 5}) + case.assertTupleEqual(obj.popitem(), ('foo', 5)) def test_dict(self): o = self.manager.dict() o['foo'] = 5 self.run_worker(self._test_dict, o) - assert not o + self.assertIsNotNone(o) self.assertEqual(len(o), 0) @classmethod def _test_value(cls, obj): - assert obj.value == 1 - assert obj.get() == 1 + case = unittest.TestCase() + case.assertEqual(obj.value, 1) + case.assertEqual(obj.get(), 1) obj.set(2) def test_value(self): @@ -5747,10 +5750,11 @@ def test_value(self): @classmethod def _test_array(cls, obj): - assert obj[0] == 0 - assert obj[1] == 1 - assert len(obj) == 2 - assert list(obj) == [0, 1] + case = unittest.TestCase() + case.assertEqual(obj[0], 0) + case.assertEqual(obj[1], 1) + case.assertEqual(len(obj), 2) + case.assertListEqual(list(obj), [0, 1]) def test_array(self): o = self.manager.Array('i', [0, 1]) @@ -5758,8 +5762,9 @@ def test_array(self): @classmethod def _test_namespace(cls, obj): - assert obj.x == 0 - assert obj.y == 1 + case = unittest.TestCase() + case.assertEqual(obj.x, 0) + case.assertEqual(obj.y, 1) def test_namespace(self): o = self.manager.Namespace() diff --git a/Lib/test/_test_venv_multiprocessing.py b/Lib/test/_test_venv_multiprocessing.py new file mode 100644 index 00000000000000..af72e915ba52bb --- /dev/null +++ b/Lib/test/_test_venv_multiprocessing.py @@ -0,0 +1,40 @@ +import multiprocessing +import random +import sys +import time + +def fill_queue(queue, code): + queue.put(code) + + +def drain_queue(queue, code): + if code != queue.get(): + sys.exit(1) + + +def test_func(): + code = random.randrange(0, 1000) + queue = multiprocessing.Queue() + fill_pool = multiprocessing.Process( + target=fill_queue, + args=(queue, code) + ) + drain_pool = multiprocessing.Process( + target=drain_queue, + args=(queue, code) + ) + drain_pool.start() + fill_pool.start() + fill_pool.join() + drain_pool.join() + + +def main(): + test_pool = multiprocessing.Process(target=test_func) + test_pool.start() + test_pool.join() + sys.exit(test_pool.exitcode) + + +if __name__ == "__main__": + main() diff --git a/Lib/test/audit-tests.py b/Lib/test/audit-tests.py index 66c08f7f2ff8d5..bf56cea541d121 100644 --- a/Lib/test/audit-tests.py +++ b/Lib/test/audit-tests.py @@ -419,6 +419,48 @@ def hook(event, args): sys._getframe() +def test_threading(): + import _thread + + def hook(event, args): + if event.startswith(("_thread.", "cpython.PyThreadState", "test.")): + print(event, args) + + sys.addaudithook(hook) + + lock = _thread.allocate_lock() + lock.acquire() + + class test_func: + def __repr__(self): return "" + def __call__(self): + sys.audit("test.test_func") + lock.release() + + i = _thread.start_new_thread(test_func(), ()) + lock.acquire() + + +def test_threading_abort(): + # Ensures that aborting PyThreadState_New raises the correct exception + import _thread + + class ThreadNewAbortError(Exception): + pass + + def hook(event, args): + if event == "cpython.PyThreadState_New": + raise ThreadNewAbortError() + + sys.addaudithook(hook) + + try: + _thread.start_new_thread(lambda: None, ()) + except ThreadNewAbortError: + # Other exceptions are raised and the test will fail + pass + + def test_wmi_exec_query(): import _wmi @@ -429,6 +471,37 @@ def hook(event, args): sys.addaudithook(hook) _wmi.exec_query("SELECT * FROM Win32_OperatingSystem") +def test_syslog(): + import syslog + + def hook(event, args): + if event.startswith("syslog."): + print(event, *args) + + sys.addaudithook(hook) + syslog.openlog('python') + syslog.syslog('test') + syslog.setlogmask(syslog.LOG_DEBUG) + syslog.closelog() + # implicit open + syslog.syslog('test2') + # open with default ident + syslog.openlog(logoption=syslog.LOG_NDELAY, facility=syslog.LOG_LOCAL0) + sys.argv = None + syslog.openlog() + syslog.closelog() + + +def test_not_in_gc(): + import gc + + hook = lambda *a: None + sys.addaudithook(hook) + + for o in gc.get_objects(): + if isinstance(o, list): + assert hook not in o + if __name__ == "__main__": from test.support import suppress_msvcrt_asserts diff --git a/Lib/test/clinic.test b/Lib/test/clinic.test index 47e3e02490c816..0d844234d9d1f6 100644 --- a/Lib/test/clinic.test +++ b/Lib/test/clinic.test @@ -1740,29 +1740,18 @@ test_str_converter_encoding(PyObject *module, PyObject *const *args, Py_ssize_t goto exit; } return_value = test_str_converter_encoding_impl(module, a, b, c, d, d_length, e, e_length); + /* Post parse cleanup for a */ + PyMem_FREE(a); + /* Post parse cleanup for b */ + PyMem_FREE(b); + /* Post parse cleanup for c */ + PyMem_FREE(c); + /* Post parse cleanup for d */ + PyMem_FREE(d); + /* Post parse cleanup for e */ + PyMem_FREE(e); exit: - /* Cleanup for a */ - if (a) { - PyMem_FREE(a); - } - /* Cleanup for b */ - if (b) { - PyMem_FREE(b); - } - /* Cleanup for c */ - if (c) { - PyMem_FREE(c); - } - /* Cleanup for d */ - if (d) { - PyMem_FREE(d); - } - /* Cleanup for e */ - if (e) { - PyMem_FREE(e); - } - return return_value; } @@ -1770,7 +1759,7 @@ static PyObject * test_str_converter_encoding_impl(PyObject *module, char *a, char *b, char *c, char *d, Py_ssize_t d_length, char *e, Py_ssize_t e_length) -/*[clinic end generated code: output=8acb886a3843f3bc input=eb4c38e1f898f402]*/ +/*[clinic end generated code: output=999c1deecfa15b0a input=eb4c38e1f898f402]*/ /*[clinic input] @@ -3793,7 +3782,7 @@ test_vararg_and_posonly(PyObject *module, PyObject *const *args, Py_ssize_t narg a = args[0]; __clinic_args = PyTuple_New(nargs - 1); for (Py_ssize_t i = 0; i < nargs - 1; ++i) { - PyTuple_SET_ITEM(__clinic_args, i, args[1 + i]); + PyTuple_SET_ITEM(__clinic_args, i, Py_NewRef(args[1 + i])); } return_value = test_vararg_and_posonly_impl(module, a, __clinic_args); @@ -3804,7 +3793,7 @@ exit: static PyObject * test_vararg_and_posonly_impl(PyObject *module, PyObject *a, PyObject *args) -/*[clinic end generated code: output=548bca3a127c22c1 input=08dc2bf7afbf1613]*/ +/*[clinic end generated code: output=081a953b8cbe7617 input=08dc2bf7afbf1613]*/ /*[clinic input] test_vararg @@ -3856,7 +3845,6 @@ test_vararg(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject }; #undef KWTUPLE PyObject *argsbuf[2]; - Py_ssize_t noptargs = 0 + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; PyObject *a; PyObject *__clinic_args = NULL; @@ -3875,7 +3863,7 @@ exit: static PyObject * test_vararg_impl(PyObject *module, PyObject *a, PyObject *args) -/*[clinic end generated code: output=6661f3ca97d85e8c input=81d33815ad1bae6e]*/ +/*[clinic end generated code: output=880365c61ae205d7 input=81d33815ad1bae6e]*/ /*[clinic input] test_vararg_with_default @@ -3929,7 +3917,7 @@ test_vararg_with_default(PyObject *module, PyObject *const *args, Py_ssize_t nar }; #undef KWTUPLE PyObject *argsbuf[3]; - Py_ssize_t noptargs = 0 + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + Py_ssize_t noptargs = Py_MIN(nargs, 1) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; PyObject *a; PyObject *__clinic_args = NULL; int b = 0; @@ -3958,7 +3946,7 @@ exit: static PyObject * test_vararg_with_default_impl(PyObject *module, PyObject *a, PyObject *args, int b) -/*[clinic end generated code: output=5fe3cfccb1bef781 input=6e110b54acd9b22d]*/ +/*[clinic end generated code: output=291e9a5a09831128 input=6e110b54acd9b22d]*/ /*[clinic input] test_vararg_with_only_defaults diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index bba96698e9e2eb..121d973b6d5f20 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -1489,6 +1489,9 @@ def test_strftime(self): #check that this standard extension works t.strftime("%f") + # bpo-41260: The parameter was named "fmt" in the pure python impl. + t.strftime(format="%f") + def test_strftime_trailing_percent(self): # bpo-35066: Make sure trailing '%' doesn't cause datetime's strftime to # complain. Different libcs have different handling of trailing diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 655e4d2e56f8f0..3eeef029b22d48 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -28,6 +28,11 @@ # Must be smaller than buildbot "1200 seconds without output" limit. EXIT_TIMEOUT = 120.0 +EXITCODE_BAD_TEST = 2 +EXITCODE_INTERRUPTED = 130 +EXITCODE_ENV_CHANGED = 3 +EXITCODE_NO_TESTS_RAN = 4 + class Regrtest: """Execute a test suite. @@ -493,15 +498,18 @@ def display_header(self): print("== encodings: locale=%s, FS=%s" % (locale.getencoding(), sys.getfilesystemencoding())) + def no_tests_run(self): + return not any((self.good, self.bad, self.skipped, self.interrupted, + self.environment_changed)) + def get_tests_result(self): result = [] if self.bad: result.append("FAILURE") elif self.ns.fail_env_changed and self.environment_changed: result.append("ENV CHANGED") - elif not any((self.good, self.bad, self.skipped, self.interrupted, - self.environment_changed)): - result.append("NO TEST RUN") + elif self.no_tests_run(): + result.append("NO TESTS RAN") if self.interrupted: result.append("INTERRUPTED") @@ -750,11 +758,13 @@ def _main(self, tests, kwargs): self.save_xml_result() if self.bad: - sys.exit(2) + sys.exit(EXITCODE_BAD_TEST) if self.interrupted: - sys.exit(130) + sys.exit(EXITCODE_INTERRUPTED) if self.ns.fail_env_changed and self.environment_changed: - sys.exit(3) + sys.exit(EXITCODE_ENV_CHANGED) + if self.no_tests_run(): + sys.exit(EXITCODE_NO_TESTS_RAN) sys.exit(0) diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index a0538cbb3c3772..4298fa806e1065 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -73,7 +73,6 @@ def get_pooled_int(value): fd_deltas = [0] * repcount getallocatedblocks = sys.getallocatedblocks gettotalrefcount = sys.gettotalrefcount - _getquickenedcount = sys._getquickenedcount fd_count = os_helper.fd_count # initialize variables to make pyflakes quiet rc_before = alloc_before = fd_before = 0 @@ -93,7 +92,7 @@ def get_pooled_int(value): support.gc_collect() # Read memory statistics immediately after the garbage collection - alloc_after = getallocatedblocks() - _getquickenedcount() + alloc_after = getallocatedblocks() rc_after = gettotalrefcount() fd_after = fd_count() diff --git a/Lib/test/libregrtest/runtest_mp.py b/Lib/test/libregrtest/runtest_mp.py index a4d3e5c3146a8c..a12fcb46e0fd0b 100644 --- a/Lib/test/libregrtest/runtest_mp.py +++ b/Lib/test/libregrtest/runtest_mp.py @@ -22,6 +22,9 @@ from test.libregrtest.setup import setup_tests from test.libregrtest.utils import format_duration, print_warning +if sys.platform == 'win32': + import locale + # Display the running tests if nothing happened last N seconds PROGRESS_UPDATE = 30.0 # seconds @@ -267,11 +270,16 @@ def _run_process(self, test_name: str, tmp_dir: str, stdout_fh: TextIO) -> int: self.current_test_name = None def _runtest(self, test_name: str) -> MultiprocessResult: + if sys.platform == 'win32': + # gh-95027: When stdout is not a TTY, Python uses the ANSI code + # page for the sys.stdout encoding. If the main process runs in a + # terminal, sys.stdout uses WindowsConsoleIO with UTF-8 encoding. + encoding = locale.getencoding() + else: + encoding = sys.stdout.encoding # gh-94026: Write stdout+stderr to a tempfile as workaround for # non-blocking pipes on Emscripten with NodeJS. - with tempfile.TemporaryFile( - 'w+', encoding=sys.stdout.encoding - ) as stdout_fh: + with tempfile.TemporaryFile('w+', encoding=encoding) as stdout_fh: # gh-93353: Check for leaked temporary files in the parent process, # since the deletion of temporary files can happen late during # Python finalization: too late for libregrtest. diff --git a/Lib/test/libregrtest/save_env.py b/Lib/test/libregrtest/save_env.py index 60c9be24617a65..cc5870ab2b833e 100644 --- a/Lib/test/libregrtest/save_env.py +++ b/Lib/test/libregrtest/save_env.py @@ -161,11 +161,11 @@ def restore_warnings_filters(self, saved_filters): warnings.filters[:] = saved_filters[2] def get_asyncore_socket_map(self): - asyncore = sys.modules.get('asyncore') + asyncore = sys.modules.get('test.support.asyncore') # XXX Making a copy keeps objects alive until __exit__ gets called. return asyncore and asyncore.socket_map.copy() or {} def restore_asyncore_socket_map(self, saved_map): - asyncore = sys.modules.get('asyncore') + asyncore = sys.modules.get('test.support.asyncore') if asyncore is not None: asyncore.close_all(ignore_all=True) asyncore.socket_map.update(saved_map) diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index 332dcc4c6db247..e6909170334e15 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -124,15 +124,6 @@ def clear_caches(): if stream is not None: stream.flush() - # Clear assorted module caches. - # Don't worry about resetting the cache if the module is not loaded - try: - distutils_dir_util = sys.modules['distutils.dir_util'] - except KeyError: - pass - else: - distutils_dir_util._path_created.clear() - try: re = sys.modules['re'] except KeyError: diff --git a/Lib/test/pydocfodder.py b/Lib/test/pydocfodder.py index 2530320a222726..d0750e5a43c0c0 100644 --- a/Lib/test/pydocfodder.py +++ b/Lib/test/pydocfodder.py @@ -2,85 +2,7 @@ import types -class A_classic: - "A classic class." - def A_method(self): - "Method defined in A." - def AB_method(self): - "Method defined in A and B." - def AC_method(self): - "Method defined in A and C." - def AD_method(self): - "Method defined in A and D." - def ABC_method(self): - "Method defined in A, B and C." - def ABD_method(self): - "Method defined in A, B and D." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - - -class B_classic(A_classic): - "A classic class, derived from A_classic." - def AB_method(self): - "Method defined in A and B." - def ABC_method(self): - "Method defined in A, B and C." - def ABD_method(self): - "Method defined in A, B and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def B_method(self): - "Method defined in B." - def BC_method(self): - "Method defined in B and C." - def BD_method(self): - "Method defined in B and D." - def BCD_method(self): - "Method defined in B, C and D." - -class C_classic(A_classic): - "A classic class, derived from A_classic." - def AC_method(self): - "Method defined in A and C." - def ABC_method(self): - "Method defined in A, B and C." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def BC_method(self): - "Method defined in B and C." - def BCD_method(self): - "Method defined in B, C and D." - def C_method(self): - "Method defined in C." - def CD_method(self): - "Method defined in C and D." - -class D_classic(B_classic, C_classic): - "A classic class, derived from B_classic and C_classic." - def AD_method(self): - "Method defined in A and D." - def ABD_method(self): - "Method defined in A, B and D." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def BD_method(self): - "Method defined in B and D." - def BCD_method(self): - "Method defined in B, C and D." - def CD_method(self): - "Method defined in C and D." - def D_method(self): - "Method defined in D." - - -class A_new(object): +class A_new: "A new-style class." def A_method(self): diff --git a/Lib/test/smtpd.py b/Lib/test/smtpd.py index f9d4b048a83f68..6052232ec2b585 100755 --- a/Lib/test/smtpd.py +++ b/Lib/test/smtpd.py @@ -77,7 +77,7 @@ import time import socket import collections -from test.support.import_helper import import_module +from test.support import asyncore, asynchat from warnings import warn from email._header_value_parser import get_addr_spec, get_angle_addr @@ -85,9 +85,6 @@ "SMTPChannel", "SMTPServer", "DebuggingServer", "PureProxy", ] -asyncore = import_module('asyncore', deprecated=True) -asynchat = import_module('asynchat', deprecated=True) - program = sys.argv[0] __version__ = 'Python SMTP proxy version 0.3' diff --git a/Lib/test/string_tests.py b/Lib/test/string_tests.py index e998146c190df4..709cac7a27a449 100644 --- a/Lib/test/string_tests.py +++ b/Lib/test/string_tests.py @@ -505,6 +505,11 @@ def test_split(self): self.checkraises(ValueError, 'hello', 'split', '', 0) def test_rsplit(self): + # without arg + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit') + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit') + self.checkequal([], '', 'rsplit') + # by a char self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', '|') self.checkequal(['a|b|c', 'd'], 'a|b|c|d', 'rsplit', '|', 1) @@ -558,6 +563,9 @@ def test_rsplit(self): # with keyword args self.checkequal(['a', 'b', 'c', 'd'], 'a|b|c|d', 'rsplit', sep='|') + self.checkequal(['a', 'b', 'c', 'd'], 'a b c d', 'rsplit', sep=None) + self.checkequal(['a b c', 'd'], + 'a b c d', 'rsplit', sep=None, maxsplit=1) self.checkequal(['a|b|c', 'd'], 'a|b|c|d', 'rsplit', '|', maxsplit=1) self.checkequal(['a|b|c', 'd'], diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 9fdad641232c4f..a631bfc80cfaf0 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1503,7 +1503,7 @@ def _platform_specific(self): self._env = {k.upper(): os.getenv(k) for k in os.environ} self._env["PYTHONHOME"] = os.path.dirname(self.real) - if sysconfig.is_python_build(True): + if sysconfig.is_python_build(): self._env["PYTHONPATH"] = STDLIB_DIR else: def _platform_specific(self): @@ -1793,6 +1793,22 @@ def run_in_subinterp(code): Run code in a subinterpreter. Raise unittest.SkipTest if the tracemalloc module is enabled. """ + _check_tracemalloc() + import _testcapi + return _testcapi.run_in_subinterp(code) + + +def run_in_subinterp_with_config(code, **config): + """ + Run code in a subinterpreter. Raise unittest.SkipTest if the tracemalloc + module is enabled. + """ + _check_tracemalloc() + import _testcapi + return _testcapi.run_in_subinterp_with_config(code, **config) + + +def _check_tracemalloc(): # Issue #10915, #15751: PyGILState_*() functions don't work with # sub-interpreters, the tracemalloc module uses these functions internally try: @@ -1804,8 +1820,6 @@ def run_in_subinterp(code): raise unittest.SkipTest("run_in_subinterp() cannot be used " "if tracemalloc module is tracing " "memory allocations") - import _testcapi - return _testcapi.run_in_subinterp(code) def check_free_after_iterating(test, iter, cls, args=()): @@ -2083,7 +2097,7 @@ def wait_process(pid, *, exitcode, timeout=None): Raise an AssertionError if the process exit code is not equal to exitcode. - If the process runs longer than timeout seconds (SHORT_TIMEOUT by default), + If the process runs longer than timeout seconds (LONG_TIMEOUT by default), kill the process (if signal.SIGKILL is available) and raise an AssertionError. The timeout feature is not available on Windows. """ @@ -2091,7 +2105,7 @@ def wait_process(pid, *, exitcode, timeout=None): import signal if timeout is None: - timeout = SHORT_TIMEOUT + timeout = LONG_TIMEOUT start_time = time.monotonic() for _ in sleeping_retry(timeout, error=False): diff --git a/Lib/asynchat.py b/Lib/test/support/asynchat.py similarity index 97% rename from Lib/asynchat.py rename to Lib/test/support/asynchat.py index bed797e989e136..38c47a1fda683e 100644 --- a/Lib/asynchat.py +++ b/Lib/test/support/asynchat.py @@ -1,3 +1,8 @@ +# TODO: This module was deprecated and removed from CPython 3.12 +# Now it is a test-only helper. Any attempts to rewrite exising tests that +# are using this module and remove it completely are appreciated! +# See: https://github.com/python/cpython/issues/72719 + # -*- Mode: Python; tab-width: 4 -*- # Id: asynchat.py,v 2.26 2000/09/07 22:29:26 rushing Exp # Author: Sam Rushing @@ -45,15 +50,10 @@ method) up to the terminator, and then control will be returned to you - by calling your self.found_terminator() method. """ -import asyncore -from collections import deque -from warnings import _deprecated - -_DEPRECATION_MSG = ('The {name} module is deprecated and will be removed in ' - 'Python {remove}. The recommended replacement is asyncio') -_deprecated(__name__, _DEPRECATION_MSG, remove=(3, 12)) +from collections import deque +from test.support import asyncore class async_chat(asyncore.dispatcher): diff --git a/Lib/asyncore.py b/Lib/test/support/asyncore.py similarity index 98% rename from Lib/asyncore.py rename to Lib/test/support/asyncore.py index 57c86871f3dcf0..401fa60bcf35f2 100644 --- a/Lib/asyncore.py +++ b/Lib/test/support/asyncore.py @@ -1,3 +1,8 @@ +# TODO: This module was deprecated and removed from CPython 3.12 +# Now it is a test-only helper. Any attempts to rewrite exising tests that +# are using this module and remove it completely are appreciated! +# See: https://github.com/python/cpython/issues/72719 + # -*- Mode: Python -*- # Id: asyncore.py,v 2.51 2000/09/07 22:29:26 rushing Exp # Author: Sam Rushing @@ -57,10 +62,6 @@ ENOTCONN, ESHUTDOWN, EISCONN, EBADF, ECONNABORTED, EPIPE, EAGAIN, \ errorcode -_DEPRECATION_MSG = ('The {name} module is deprecated and will be removed in ' - 'Python {remove}. The recommended replacement is asyncio') -warnings._deprecated(__name__, _DEPRECATION_MSG, remove=(3, 12)) - _DISCONNECTED = frozenset({ECONNRESET, ENOTCONN, ESHUTDOWN, ECONNABORTED, EPIPE, EBADF}) diff --git a/Lib/test/support/bytecode_helper.py b/Lib/test/support/bytecode_helper.py index eb4ae1a68e3fc1..65ae7a227baafe 100644 --- a/Lib/test/support/bytecode_helper.py +++ b/Lib/test/support/bytecode_helper.py @@ -3,7 +3,7 @@ import unittest import dis import io -from _testinternalcapi import optimize_cfg +from _testinternalcapi import compiler_codegen, optimize_cfg _UNSPECIFIED = object() @@ -44,8 +44,7 @@ def assertNotInBytecode(self, x, opname, argval=_UNSPECIFIED): msg = msg % (opname, argval, disassembly) self.fail(msg) - -class CfgOptimizationTestCase(unittest.TestCase): +class CompilationStepTestCase(unittest.TestCase): HAS_ARG = set(dis.hasarg) HAS_TARGET = set(dis.hasjrel + dis.hasjabs + dis.hasexc) @@ -58,24 +57,35 @@ def Label(self): self.last_label += 1 return self.last_label - def complete_insts_info(self, insts): - # fill in omitted fields in location, and oparg 0 for ops with no arg. - instructions = [] - for item in insts: - if isinstance(item, int): - instructions.append(item) - else: - assert isinstance(item, tuple) - inst = list(reversed(item)) - opcode = dis.opmap[inst.pop()] - oparg = inst.pop() if opcode in self.HAS_ARG_OR_TARGET else 0 - loc = inst + [-1] * (4 - len(inst)) - instructions.append((opcode, oparg, *loc)) - return instructions + def assertInstructionsMatch(self, actual_, expected_): + # get two lists where each entry is a label or + # an instruction tuple. Compare them, while mapping + # each actual label to a corresponding expected label + # based on their locations. + + self.assertIsInstance(actual_, list) + self.assertIsInstance(expected_, list) + + actual = self.normalize_insts(actual_) + expected = self.normalize_insts(expected_) + self.assertEqual(len(actual), len(expected)) + + # compare instructions + for act, exp in zip(actual, expected): + if isinstance(act, int): + self.assertEqual(exp, act) + continue + self.assertIsInstance(exp, tuple) + self.assertIsInstance(act, tuple) + # crop comparison to the provided expected values + if len(act) > len(exp): + act = act[:len(exp)] + self.assertEqual(exp, act) def normalize_insts(self, insts): """ Map labels to instruction index. Remove labels which are not used as jump targets. + Map opcodes to opnames. """ labels_map = {} targets = set() @@ -107,31 +117,32 @@ def normalize_insts(self, insts): res.append((opcode, arg, *loc)) return res - def get_optimized(self, insts, consts): - insts = self.complete_insts_info(insts) - insts = optimize_cfg(insts, consts) - return insts, consts - def compareInstructions(self, actual_, expected_): - # get two lists where each entry is a label or - # an instruction tuple. Compare them, while mapping - # each actual label to a corresponding expected label - # based on their locations. +class CodegenTestCase(CompilationStepTestCase): - self.assertIsInstance(actual_, list) - self.assertIsInstance(expected_, list) + def generate_code(self, ast): + insts = compiler_codegen(ast, "my_file.py", 0) + return insts - actual = self.normalize_insts(actual_) - expected = self.normalize_insts(expected_) - self.assertEqual(len(actual), len(expected)) - # compare instructions - for act, exp in zip(actual, expected): - if isinstance(act, int): - self.assertEqual(exp, act) - continue - self.assertIsInstance(exp, tuple) - self.assertIsInstance(act, tuple) - # pad exp with -1's (if location info is incomplete) - exp += (-1,) * (len(act) - len(exp)) - self.assertEqual(exp, act) +class CfgOptimizationTestCase(CompilationStepTestCase): + + def complete_insts_info(self, insts): + # fill in omitted fields in location, and oparg 0 for ops with no arg. + instructions = [] + for item in insts: + if isinstance(item, int): + instructions.append(item) + else: + assert isinstance(item, tuple) + inst = list(reversed(item)) + opcode = dis.opmap[inst.pop()] + oparg = inst.pop() if opcode in self.HAS_ARG_OR_TARGET else 0 + loc = inst + [-1] * (4 - len(inst)) + instructions.append((opcode, oparg, *loc)) + return instructions + + def get_optimized(self, insts, consts): + insts = self.complete_insts_info(insts) + insts = optimize_cfg(insts, consts) + return insts, consts diff --git a/Lib/test/support/import_helper.py b/Lib/test/support/import_helper.py index 5201dc84cf6df4..63a8a7952db7a6 100644 --- a/Lib/test/support/import_helper.py +++ b/Lib/test/support/import_helper.py @@ -246,3 +246,11 @@ def modules_cleanup(oldmodules): # do currently). Implicitly imported *real* modules should be left alone # (see issue 10556). sys.modules.update(oldmodules) + + +def mock_register_at_fork(func): + # bpo-30599: Mock os.register_at_fork() when importing the random module, + # since this function doesn't allow to unregister callbacks and would leak + # memory. + from unittest import mock + return mock.patch('os.register_at_fork', create=True)(func) diff --git a/Lib/test/test__opcode.py b/Lib/test/test__opcode.py index f548e3647b31c7..db831069c7aeb8 100644 --- a/Lib/test/test__opcode.py +++ b/Lib/test/test__opcode.py @@ -40,7 +40,7 @@ def test_stack_effect_jump(self): self.assertEqual(stack_effect(JUMP_IF_TRUE_OR_POP, 0, jump=False), -1) FOR_ITER = dis.opmap['FOR_ITER'] self.assertEqual(stack_effect(FOR_ITER, 0), 1) - self.assertEqual(stack_effect(FOR_ITER, 0, jump=True), -1) + self.assertEqual(stack_effect(FOR_ITER, 0, jump=True), 1) self.assertEqual(stack_effect(FOR_ITER, 0, jump=False), 1) JUMP_FORWARD = dis.opmap['JUMP_FORWARD'] self.assertEqual(stack_effect(JUMP_FORWARD, 0), 0) @@ -72,9 +72,10 @@ def test_specialization_stats(self): stat_names = opcode._specialization_stats specialized_opcodes = [ - op[:-len("_ADAPTIVE")].lower() for - op in opcode._specialized_instructions - if op.endswith("_ADAPTIVE")] + op.lower() + for op in opcode._specializations + if opcode._inline_cache_entries[opcode.opmap[op]] + ] self.assertIn('load_attr', specialized_opcodes) self.assertIn('binary_subscr', specialized_opcodes) diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index f20aae8e21c66f..66f29b95af10c3 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -801,7 +801,7 @@ def f(): self.assertEqual(out, 'it worked!') def test_create_thread(self): - subinterp = interpreters.create(isolated=False) + subinterp = interpreters.create() script, file = _captured_script(""" import threading def f(): @@ -817,6 +817,61 @@ def f(): self.assertEqual(out, 'it worked!') + def test_create_daemon_thread(self): + with self.subTest('isolated'): + expected = 'spam spam spam spam spam' + subinterp = interpreters.create(isolated=True) + script, file = _captured_script(f""" + import threading + def f(): + print('it worked!', end='') + + try: + t = threading.Thread(target=f, daemon=True) + t.start() + t.join() + except RuntimeError: + print('{expected}', end='') + """) + with file: + interpreters.run_string(subinterp, script) + out = file.read() + + self.assertEqual(out, expected) + + with self.subTest('not isolated'): + subinterp = interpreters.create(isolated=False) + script, file = _captured_script(""" + import threading + def f(): + print('it worked!', end='') + + t = threading.Thread(target=f, daemon=True) + t.start() + t.join() + """) + with file: + interpreters.run_string(subinterp, script) + out = file.read() + + self.assertEqual(out, 'it worked!') + + def test_os_exec(self): + expected = 'spam spam spam spam spam' + subinterp = interpreters.create() + script, file = _captured_script(f""" + import os, sys + try: + os.execl(sys.executable) + except RuntimeError: + print('{expected}', end='') + """) + with file: + interpreters.run_string(subinterp, script) + out = file.read() + + self.assertEqual(out, expected) + @support.requires_fork() def test_fork(self): import tempfile diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py index a083236fb0fc44..86f31a9acb4d55 100644 --- a/Lib/test/test_abc.py +++ b/Lib/test/test_abc.py @@ -154,7 +154,7 @@ class C(metaclass=abc_ABCMeta): @abc.abstractmethod def method_one(self): pass - msg = r"class C without an implementation for abstract method method_one" + msg = r"class C without an implementation for abstract method 'method_one'" self.assertRaisesRegex(TypeError, msg, C) def test_object_new_with_many_abstractmethods(self): @@ -165,7 +165,7 @@ def method_one(self): @abc.abstractmethod def method_two(self): pass - msg = r"class C without an implementation for abstract methods method_one, method_two" + msg = r"class C without an implementation for abstract methods 'method_one', 'method_two'" self.assertRaisesRegex(TypeError, msg, C) def test_abstractmethod_integration(self): @@ -535,7 +535,7 @@ def updated_foo(self): A.foo = updated_foo abc.update_abstractmethods(A) self.assertEqual(A.__abstractmethods__, {'foo', 'bar'}) - msg = "class A without an implementation for abstract methods bar, foo" + msg = "class A without an implementation for abstract methods 'bar', 'foo'" self.assertRaisesRegex(TypeError, msg, A) def test_update_implementation(self): @@ -547,7 +547,7 @@ def foo(self): class B(A): pass - msg = "class B without an implementation for abstract method foo" + msg = "class B without an implementation for abstract method 'foo'" self.assertRaisesRegex(TypeError, msg, B) self.assertEqual(B.__abstractmethods__, {'foo'}) @@ -605,7 +605,7 @@ def foo(self): abc.update_abstractmethods(B) - msg = "class B without an implementation for abstract method foo" + msg = "class B without an implementation for abstract method 'foo'" self.assertRaisesRegex(TypeError, msg, B) def test_update_layered_implementation(self): @@ -627,7 +627,7 @@ def foo(self): abc.update_abstractmethods(C) - msg = "class C without an implementation for abstract method foo" + msg = "class C without an implementation for abstract method 'foo'" self.assertRaisesRegex(TypeError, msg, C) def test_update_multi_inheritance(self): diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index 2b7f008d38564b..cabb2f837693ff 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -296,7 +296,7 @@ class TestOptionalsSingleDashCombined(ParserTestCase): Sig('-z'), ] failures = ['a', '--foo', '-xa', '-x --foo', '-x -z', '-z -x', - '-yx', '-yz a', '-yyyx', '-yyyza', '-xyza'] + '-yx', '-yz a', '-yyyx', '-yyyza', '-xyza', '-x='] successes = [ ('', NS(x=False, yyy=None, z=None)), ('-x', NS(x=True, yyy=None, z=None)), diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index b34644118d2815..773fba87632b0a 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -1036,6 +1036,18 @@ def test_increment_lineno(self): self.assertEqual(ast.increment_lineno(src).lineno, 2) self.assertIsNone(ast.increment_lineno(src).end_lineno) + def test_increment_lineno_on_module(self): + src = ast.parse(dedent("""\ + a = 1 + b = 2 # type: ignore + c = 3 + d = 4 # type: ignore@tag + """), type_comments=True) + ast.increment_lineno(src, n=5) + self.assertEqual(src.type_ignores[0].lineno, 7) + self.assertEqual(src.type_ignores[1].lineno, 9) + self.assertEqual(src.type_ignores[1].tag, '@tag') + def test_iter_fields(self): node = ast.parse('foo()', mode='eval') d = dict(ast.iter_fields(node.body)) diff --git a/Lib/test/test_asyncgen.py b/Lib/test/test_asyncgen.py index f6184c0cab4694..0421efdbf9dac9 100644 --- a/Lib/test/test_asyncgen.py +++ b/Lib/test/test_asyncgen.py @@ -378,6 +378,19 @@ async def async_gen_wrapper(): self.compare_generators(sync_gen_wrapper(), async_gen_wrapper()) + def test_async_gen_exception_12(self): + async def gen(): + await anext(me) + yield 123 + + me = gen() + ai = me.__aiter__() + an = ai.__anext__() + + with self.assertRaisesRegex(RuntimeError, + r'anext\(\): asynchronous generator is already running'): + an.__next__() + def test_async_gen_3_arg_deprecation_warning(self): async def gen(): yield 123 diff --git a/Lib/test/test_asynchat.py b/Lib/test/test_asynchat.py deleted file mode 100644 index 9d08bd02968fd6..00000000000000 --- a/Lib/test/test_asynchat.py +++ /dev/null @@ -1,293 +0,0 @@ -# test asynchat - -from test import support -from test.support import socket_helper -from test.support import threading_helper -from test.support import warnings_helper - -import errno -import socket -import sys -import threading -import time -import unittest -import unittest.mock - - -asynchat = warnings_helper.import_deprecated('asynchat') -asyncore = warnings_helper.import_deprecated('asyncore') - -support.requires_working_socket(module=True) - -HOST = socket_helper.HOST -SERVER_QUIT = b'QUIT\n' - - -class echo_server(threading.Thread): - # parameter to determine the number of bytes passed back to the - # client each send - chunk_size = 1 - - def __init__(self, event): - threading.Thread.__init__(self) - self.event = event - self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.port = socket_helper.bind_port(self.sock) - # This will be set if the client wants us to wait before echoing - # data back. - self.start_resend_event = None - - def run(self): - self.sock.listen() - self.event.set() - conn, client = self.sock.accept() - self.buffer = b"" - # collect data until quit message is seen - while SERVER_QUIT not in self.buffer: - data = conn.recv(1) - if not data: - break - self.buffer = self.buffer + data - - # remove the SERVER_QUIT message - self.buffer = self.buffer.replace(SERVER_QUIT, b'') - - if self.start_resend_event: - self.start_resend_event.wait() - - # re-send entire set of collected data - try: - # this may fail on some tests, such as test_close_when_done, - # since the client closes the channel when it's done sending - while self.buffer: - n = conn.send(self.buffer[:self.chunk_size]) - time.sleep(0.001) - self.buffer = self.buffer[n:] - except: - pass - - conn.close() - self.sock.close() - -class echo_client(asynchat.async_chat): - - def __init__(self, terminator, server_port): - asynchat.async_chat.__init__(self) - self.contents = [] - self.create_socket(socket.AF_INET, socket.SOCK_STREAM) - self.connect((HOST, server_port)) - self.set_terminator(terminator) - self.buffer = b"" - - def handle_connect(self): - pass - - if sys.platform == 'darwin': - # select.poll returns a select.POLLHUP at the end of the tests - # on darwin, so just ignore it - def handle_expt(self): - pass - - def collect_incoming_data(self, data): - self.buffer += data - - def found_terminator(self): - self.contents.append(self.buffer) - self.buffer = b"" - -def start_echo_server(): - event = threading.Event() - s = echo_server(event) - s.start() - event.wait() - event.clear() - time.sleep(0.01) # Give server time to start accepting. - return s, event - - -class TestAsynchat(unittest.TestCase): - usepoll = False - - def setUp(self): - self._threads = threading_helper.threading_setup() - - def tearDown(self): - threading_helper.threading_cleanup(*self._threads) - - def line_terminator_check(self, term, server_chunk): - event = threading.Event() - s = echo_server(event) - s.chunk_size = server_chunk - s.start() - event.wait() - event.clear() - time.sleep(0.01) # Give server time to start accepting. - c = echo_client(term, s.port) - c.push(b"hello ") - c.push(b"world" + term) - c.push(b"I'm not dead yet!" + term) - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"]) - - # the line terminator tests below check receiving variously-sized - # chunks back from the server in order to exercise all branches of - # async_chat.handle_read - - def test_line_terminator1(self): - # test one-character terminator - for l in (1, 2, 3): - self.line_terminator_check(b'\n', l) - - def test_line_terminator2(self): - # test two-character terminator - for l in (1, 2, 3): - self.line_terminator_check(b'\r\n', l) - - def test_line_terminator3(self): - # test three-character terminator - for l in (1, 2, 3): - self.line_terminator_check(b'qqq', l) - - def numeric_terminator_check(self, termlen): - # Try reading a fixed number of bytes - s, event = start_echo_server() - c = echo_client(termlen, s.port) - data = b"hello world, I'm not dead yet!\n" - c.push(data) - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, [data[:termlen]]) - - def test_numeric_terminator1(self): - # check that ints & longs both work (since type is - # explicitly checked in async_chat.handle_read) - self.numeric_terminator_check(1) - - def test_numeric_terminator2(self): - self.numeric_terminator_check(6) - - def test_none_terminator(self): - # Try reading a fixed number of bytes - s, event = start_echo_server() - c = echo_client(None, s.port) - data = b"hello world, I'm not dead yet!\n" - c.push(data) - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, []) - self.assertEqual(c.buffer, data) - - def test_simple_producer(self): - s, event = start_echo_server() - c = echo_client(b'\n', s.port) - data = b"hello world\nI'm not dead yet!\n" - p = asynchat.simple_producer(data+SERVER_QUIT, buffer_size=8) - c.push_with_producer(p) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"]) - - def test_string_producer(self): - s, event = start_echo_server() - c = echo_client(b'\n', s.port) - data = b"hello world\nI'm not dead yet!\n" - c.push_with_producer(data+SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"]) - - def test_empty_line(self): - # checks that empty lines are handled correctly - s, event = start_echo_server() - c = echo_client(b'\n', s.port) - c.push(b"hello world\n\nI'm not dead yet!\n") - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - - self.assertEqual(c.contents, - [b"hello world", b"", b"I'm not dead yet!"]) - - def test_close_when_done(self): - s, event = start_echo_server() - s.start_resend_event = threading.Event() - c = echo_client(b'\n', s.port) - c.push(b"hello world\nI'm not dead yet!\n") - c.push(SERVER_QUIT) - c.close_when_done() - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - - # Only allow the server to start echoing data back to the client after - # the client has closed its connection. This prevents a race condition - # where the server echoes all of its data before we can check that it - # got any down below. - s.start_resend_event.set() - threading_helper.join_thread(s) - - self.assertEqual(c.contents, []) - # the server might have been able to send a byte or two back, but this - # at least checks that it received something and didn't just fail - # (which could still result in the client not having received anything) - self.assertGreater(len(s.buffer), 0) - - def test_push(self): - # Issue #12523: push() should raise a TypeError if it doesn't get - # a bytes string - s, event = start_echo_server() - c = echo_client(b'\n', s.port) - data = b'bytes\n' - c.push(data) - c.push(bytearray(data)) - c.push(memoryview(data)) - self.assertRaises(TypeError, c.push, 10) - self.assertRaises(TypeError, c.push, 'unicode') - c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) - threading_helper.join_thread(s) - self.assertEqual(c.contents, [b'bytes', b'bytes', b'bytes']) - - -class TestAsynchat_WithPoll(TestAsynchat): - usepoll = True - - -class TestAsynchatMocked(unittest.TestCase): - def test_blockingioerror(self): - # Issue #16133: handle_read() must ignore BlockingIOError - sock = unittest.mock.Mock() - sock.recv.side_effect = BlockingIOError(errno.EAGAIN) - - dispatcher = asynchat.async_chat() - dispatcher.set_socket(sock) - self.addCleanup(dispatcher.del_channel) - - with unittest.mock.patch.object(dispatcher, 'handle_error') as error: - dispatcher.handle_read() - self.assertFalse(error.called) - - -class TestHelperFunctions(unittest.TestCase): - def test_find_prefix_at_end(self): - self.assertEqual(asynchat.find_prefix_at_end("qwerty\r", "\r\n"), 1) - self.assertEqual(asynchat.find_prefix_at_end("qwertydkjf", "\r\n"), 0) - - -class TestNotConnected(unittest.TestCase): - def test_disallow_negative_terminator(self): - # Issue #11259 - client = asynchat.async_chat() - self.assertRaises(ValueError, client.set_terminator, -1) - - - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 2dcb20c1cec7f9..7421d18dc636c8 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -2052,11 +2052,11 @@ def prepare(self): def cleanup(): server.close() - self.run_loop(server.wait_closed()) sock.close() if proto.transport is not None: proto.transport.close() self.run_loop(proto.wait_closed()) + self.run_loop(server.wait_closed()) self.addCleanup(cleanup) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 80d7152128c469..cabe75f56d9fb0 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -22,7 +22,7 @@ import unittest from unittest import mock import weakref - +import warnings if sys.platform not in ('win32', 'vxworks'): import tty @@ -2055,12 +2055,16 @@ def test_remove_fds_after_closing(self): class UnixEventLoopTestsMixin(EventLoopTestsMixin): def setUp(self): super().setUp() - watcher = asyncio.SafeChildWatcher() - watcher.attach_loop(self.loop) - asyncio.set_child_watcher(watcher) + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + watcher = asyncio.SafeChildWatcher() + watcher.attach_loop(self.loop) + asyncio.set_child_watcher(watcher) def tearDown(self): - asyncio.set_child_watcher(None) + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + asyncio.set_child_watcher(None) super().tearDown() @@ -2652,14 +2656,18 @@ def setUp(self): asyncio.set_event_loop(self.loop) if sys.platform != 'win32': - watcher = asyncio.SafeChildWatcher() - watcher.attach_loop(self.loop) - asyncio.set_child_watcher(watcher) + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + watcher = asyncio.SafeChildWatcher() + watcher.attach_loop(self.loop) + asyncio.set_child_watcher(watcher) def tearDown(self): try: if sys.platform != 'win32': - asyncio.set_child_watcher(None) + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + asyncio.set_child_watcher(None) super().tearDown() finally: diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index 3dc6b658cfae8d..83ea01c2452521 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -229,22 +229,14 @@ def test_future_cancel_message_getter(self): self.assertTrue(hasattr(f, '_cancel_message')) self.assertEqual(f._cancel_message, None) - with self.assertWarnsRegex( - DeprecationWarning, - "Passing 'msg' argument" - ): - f.cancel('my message') + f.cancel('my message') with self.assertRaises(asyncio.CancelledError): self.loop.run_until_complete(f) self.assertEqual(f._cancel_message, 'my message') def test_future_cancel_message_setter(self): f = self._new_future(loop=self.loop) - with self.assertWarnsRegex( - DeprecationWarning, - "Passing 'msg' argument" - ): - f.cancel('my message') + f.cancel('my message') f._cancel_message = 'my new message' self.assertEqual(f._cancel_message, 'my new message') diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py index 7fca0541ee75a2..ae30185cef776a 100644 --- a/Lib/test/test_asyncio/test_proactor_events.py +++ b/Lib/test/test_asyncio/test_proactor_events.py @@ -290,7 +290,33 @@ def test_force_close_idempotent(self): tr._closing = True tr._force_close(None) test_utils.run_briefly(self.loop) + # See https://github.com/python/cpython/issues/89237 + # `protocol.connection_lost` should be called even if + # the transport was closed forcefully otherwise + # the resources held by protocol will never be freed + # and waiters will never be notified leading to hang. + self.assertTrue(self.protocol.connection_lost.called) + + def test_force_close_protocol_connection_lost_once(self): + tr = self.socket_transport() self.assertFalse(self.protocol.connection_lost.called) + tr._closing = True + # Calling _force_close twice should not call + # protocol.connection_lost twice + tr._force_close(None) + tr._force_close(None) + test_utils.run_briefly(self.loop) + self.assertEqual(1, self.protocol.connection_lost.call_count) + + def test_close_protocol_connection_lost_once(self): + tr = self.socket_transport() + self.assertFalse(self.protocol.connection_lost.called) + # Calling close twice should not call + # protocol.connection_lost twice + tr.close() + tr.close() + test_utils.run_briefly(self.loop) + self.assertEqual(1, self.protocol.connection_lost.call_count) def test_fatal_error_2(self): tr = self.socket_transport() diff --git a/Lib/test/test_asyncio/test_runners.py b/Lib/test/test_asyncio/test_runners.py index 1308b7e2ba4f82..811cf8b72488b8 100644 --- a/Lib/test/test_asyncio/test_runners.py +++ b/Lib/test/test_asyncio/test_runners.py @@ -257,6 +257,16 @@ def new_event_loop(): with self.assertRaises(asyncio.CancelledError): asyncio.run(main()) + def test_asyncio_run_loop_factory(self): + factory = mock.Mock() + loop = factory.return_value = self.new_loop() + + async def main(): + self.assertEqual(asyncio.get_running_loop(), loop) + + asyncio.run(main(), loop_factory=factory) + factory.assert_called_once_with() + class RunnerTests(BaseTest): diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py index 22dcfb23083522..ca555387dd2493 100644 --- a/Lib/test/test_asyncio/test_selector_events.py +++ b/Lib/test/test_asyncio/test_selector_events.py @@ -61,8 +61,10 @@ def setUp(self): def test_make_socket_transport(self): m = mock.Mock() self.loop.add_reader = mock.Mock() + self.loop._ensure_fd_no_transport = mock.Mock() transport = self.loop._make_socket_transport(m, asyncio.Protocol()) self.assertIsInstance(transport, _SelectorSocketTransport) + self.assertEqual(self.loop._ensure_fd_no_transport.call_count, 1) # Calling repr() must not fail when the event loop is closed self.loop.close() @@ -78,8 +80,10 @@ def test_make_ssl_transport_without_ssl_error(self): self.loop.add_writer = mock.Mock() self.loop.remove_reader = mock.Mock() self.loop.remove_writer = mock.Mock() + self.loop._ensure_fd_no_transport = mock.Mock() with self.assertRaises(RuntimeError): self.loop._make_ssl_transport(m, m, m, m) + self.assertEqual(self.loop._ensure_fd_no_transport.call_count, 1) def test_close(self): class EventLoop(BaseSelectorEventLoop): @@ -1077,6 +1081,10 @@ def test_read_ready(self): self.protocol.datagram_received.assert_called_with( b'data', ('0.0.0.0', 1234)) + def test_transport_inheritance(self): + transport = self.datagram_transport() + self.assertIsInstance(transport, asyncio.DatagramTransport) + def test_read_ready_tryagain(self): transport = self.datagram_transport() diff --git a/Lib/test/test_asyncio/test_sendfile.py b/Lib/test/test_asyncio/test_sendfile.py index a10504b1c4130e..0198da21d77028 100644 --- a/Lib/test/test_asyncio/test_sendfile.py +++ b/Lib/test/test_asyncio/test_sendfile.py @@ -1,6 +1,7 @@ """Tests for sendfile functionality.""" import asyncio +import errno import os import socket import sys @@ -484,8 +485,17 @@ def sendfile_native(transp, file, offset, count): srv_proto, cli_proto = self.prepare_sendfile(close_after=1024) with self.assertRaises(ConnectionError): - self.run_loop( - self.loop.sendfile(cli_proto.transport, self.file)) + try: + self.run_loop( + self.loop.sendfile(cli_proto.transport, self.file)) + except OSError as e: + # macOS may raise OSError of EPROTOTYPE when writing to a + # socket that is in the process of closing down. + if e.errno == errno.EPROTOTYPE and sys.platform == "darwin": + raise ConnectionError + else: + raise + self.run_loop(srv_proto.done) self.assertTrue(1024 <= srv_proto.nbytes < len(self.DATA), diff --git a/Lib/test/test_asyncio/test_server.py b/Lib/test/test_asyncio/test_server.py index 860d62d52ef1ea..06d8b60f219f1a 100644 --- a/Lib/test/test_asyncio/test_server.py +++ b/Lib/test/test_asyncio/test_server.py @@ -120,6 +120,33 @@ async def main(srv): self.loop.run_until_complete(srv.serve_forever()) +class TestServer2(unittest.IsolatedAsyncioTestCase): + + async def test_wait_closed(self): + async def serve(*args): + pass + + srv = await asyncio.start_server(serve, socket_helper.HOSTv4, 0) + + # active count = 0 + task1 = asyncio.create_task(srv.wait_closed()) + await asyncio.sleep(0) + self.assertTrue(task1.done()) + + # active count != 0 + srv._attach() + task2 = asyncio.create_task(srv.wait_closed()) + await asyncio.sleep(0) + self.assertFalse(task2.done()) + + srv.close() + await asyncio.sleep(0) + self.assertFalse(task2.done()) + + srv._detach() + await task2 + + @unittest.skipUnless(hasattr(asyncio, 'ProactorEventLoop'), 'Windows only') class ProactorStartServerTests(BaseStartServer, unittest.TestCase): diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py index 61d5e984dfbfbb..01d5407a497a04 100644 --- a/Lib/test/test_asyncio/test_streams.py +++ b/Lib/test/test_asyncio/test_streams.py @@ -9,6 +9,7 @@ import threading import unittest from unittest import mock +import warnings from test.support import socket_helper try: import ssl @@ -566,10 +567,46 @@ def test_exception_cancel(self): test_utils.run_briefly(self.loop) self.assertIs(stream._waiter, None) + def test_start_server(self): -class NewStreamTests(unittest.IsolatedAsyncioTestCase): + class MyServer: - async def test_start_server(self): + def __init__(self, loop): + self.server = None + self.loop = loop + + async def handle_client(self, client_reader, client_writer): + data = await client_reader.readline() + client_writer.write(data) + await client_writer.drain() + client_writer.close() + await client_writer.wait_closed() + + def start(self): + sock = socket.create_server(('127.0.0.1', 0)) + self.server = self.loop.run_until_complete( + asyncio.start_server(self.handle_client, + sock=sock)) + return sock.getsockname() + + def handle_client_callback(self, client_reader, client_writer): + self.loop.create_task(self.handle_client(client_reader, + client_writer)) + + def start_callback(self): + sock = socket.create_server(('127.0.0.1', 0)) + addr = sock.getsockname() + sock.close() + self.server = self.loop.run_until_complete( + asyncio.start_server(self.handle_client_callback, + host=addr[0], port=addr[1])) + return addr + + def stop(self): + if self.server is not None: + self.server.close() + self.loop.run_until_complete(self.server.wait_closed()) + self.server = None async def client(addr): reader, writer = await asyncio.open_connection(*addr) @@ -581,43 +618,61 @@ async def client(addr): await writer.wait_closed() return msgback - async def handle_client(client_reader, client_writer): - data = await client_reader.readline() - client_writer.write(data) - await client_writer.drain() - client_writer.close() - await client_writer.wait_closed() - - with self.subTest(msg="coroutine"): - server = await asyncio.start_server( - handle_client, - host=socket_helper.HOSTv4 - ) - addr = server.sockets[0].getsockname() - msg = await client(addr) - server.close() - await server.wait_closed() - self.assertEqual(msg, b"hello world!\n") + messages = [] + self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx)) - with self.subTest(msg="callback"): - async def handle_client_callback(client_reader, client_writer): - asyncio.get_running_loop().create_task( - handle_client(client_reader, client_writer) - ) + # test the server variant with a coroutine as client handler + server = MyServer(self.loop) + addr = server.start() + msg = self.loop.run_until_complete(self.loop.create_task(client(addr))) + server.stop() + self.assertEqual(msg, b"hello world!\n") - server = await asyncio.start_server( - handle_client_callback, - host=socket_helper.HOSTv4 - ) - addr = server.sockets[0].getsockname() - reader, writer = await asyncio.open_connection(*addr) - msg = await client(addr) - server.close() - await server.wait_closed() - self.assertEqual(msg, b"hello world!\n") + # test the server variant with a callback as client handler + server = MyServer(self.loop) + addr = server.start_callback() + msg = self.loop.run_until_complete(self.loop.create_task(client(addr))) + server.stop() + self.assertEqual(msg, b"hello world!\n") + + self.assertEqual(messages, []) @socket_helper.skip_unless_bind_unix_socket - async def test_start_unix_server(self): + def test_start_unix_server(self): + + class MyServer: + + def __init__(self, loop, path): + self.server = None + self.loop = loop + self.path = path + + async def handle_client(self, client_reader, client_writer): + data = await client_reader.readline() + client_writer.write(data) + await client_writer.drain() + client_writer.close() + await client_writer.wait_closed() + + def start(self): + self.server = self.loop.run_until_complete( + asyncio.start_unix_server(self.handle_client, + path=self.path)) + + def handle_client_callback(self, client_reader, client_writer): + self.loop.create_task(self.handle_client(client_reader, + client_writer)) + + def start_callback(self): + start = asyncio.start_unix_server(self.handle_client_callback, + path=self.path) + self.server = self.loop.run_until_complete(start) + + def stop(self): + if self.server is not None: + self.server.close() + self.loop.run_until_complete(self.server.wait_closed()) + self.server = None async def client(path): reader, writer = await asyncio.open_unix_connection(path) @@ -629,42 +684,64 @@ async def client(path): await writer.wait_closed() return msgback - async def handle_client(client_reader, client_writer): - data = await client_reader.readline() - client_writer.write(data) - await client_writer.drain() - client_writer.close() - await client_writer.wait_closed() - - with self.subTest(msg="coroutine"): - with test_utils.unix_socket_path() as path: - server = await asyncio.start_unix_server( - handle_client, - path=path - ) - msg = await client(path) - server.close() - await server.wait_closed() - self.assertEqual(msg, b"hello world!\n") - - with self.subTest(msg="callback"): - async def handle_client_callback(client_reader, client_writer): - asyncio.get_running_loop().create_task( - handle_client(client_reader, client_writer) - ) - - with test_utils.unix_socket_path() as path: - server = await asyncio.start_unix_server( - handle_client_callback, - path=path - ) - msg = await client(path) - server.close() - await server.wait_closed() - self.assertEqual(msg, b"hello world!\n") + messages = [] + self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx)) + + # test the server variant with a coroutine as client handler + with test_utils.unix_socket_path() as path: + server = MyServer(self.loop, path) + server.start() + msg = self.loop.run_until_complete( + self.loop.create_task(client(path))) + server.stop() + self.assertEqual(msg, b"hello world!\n") + + # test the server variant with a callback as client handler + with test_utils.unix_socket_path() as path: + server = MyServer(self.loop, path) + server.start_callback() + msg = self.loop.run_until_complete( + self.loop.create_task(client(path))) + server.stop() + self.assertEqual(msg, b"hello world!\n") + + self.assertEqual(messages, []) @unittest.skipIf(ssl is None, 'No ssl module') - async def test_start_tls(self): + def test_start_tls(self): + + class MyServer: + + def __init__(self, loop): + self.server = None + self.loop = loop + + async def handle_client(self, client_reader, client_writer): + data1 = await client_reader.readline() + client_writer.write(data1) + await client_writer.drain() + assert client_writer.get_extra_info('sslcontext') is None + await client_writer.start_tls( + test_utils.simple_server_sslcontext()) + assert client_writer.get_extra_info('sslcontext') is not None + data2 = await client_reader.readline() + client_writer.write(data2) + await client_writer.drain() + client_writer.close() + await client_writer.wait_closed() + + def start(self): + sock = socket.create_server(('127.0.0.1', 0)) + self.server = self.loop.run_until_complete( + asyncio.start_server(self.handle_client, + sock=sock)) + return sock.getsockname() + + def stop(self): + if self.server is not None: + self.server.close() + self.loop.run_until_complete(self.server.wait_closed()) + self.server = None async def client(addr): reader, writer = await asyncio.open_connection(*addr) @@ -681,48 +758,17 @@ async def client(addr): await writer.wait_closed() return msgback1, msgback2 - async def handle_client(client_reader, client_writer): - data1 = await client_reader.readline() - client_writer.write(data1) - await client_writer.drain() - assert client_writer.get_extra_info('sslcontext') is None - await client_writer.start_tls( - test_utils.simple_server_sslcontext()) - assert client_writer.get_extra_info('sslcontext') is not None - - data2 = await client_reader.readline() - client_writer.write(data2) - await client_writer.drain() - client_writer.close() - await client_writer.wait_closed() - - server = await asyncio.start_server( - handle_client, - host=socket_helper.HOSTv4 - ) - addr = server.sockets[0].getsockname() - - msg1, msg2 = await client(addr) - server.close() - await server.wait_closed() - self.assertEqual(msg1, b"hello world 1!\n") - self.assertEqual(msg2, b"hello world 2!\n") - - -class StreamTests2(test_utils.TestCase): - - def setUp(self): - super().setUp() - self.loop = asyncio.new_event_loop() - self.set_event_loop(self.loop) + messages = [] + self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx)) - def tearDown(self): - # just in case if we have transport close callbacks - test_utils.run_briefly(self.loop) + server = MyServer(self.loop) + addr = server.start() + msg1, msg2 = self.loop.run_until_complete(client(addr)) + server.stop() - self.loop.close() - gc.collect() - super().tearDown() + self.assertEqual(messages, []) + self.assertEqual(msg1, b"hello world 1!\n") + self.assertEqual(msg2, b"hello world 2!\n") @unittest.skipIf(sys.platform == 'win32', "Don't have pipes") def test_read_all_from_pipe_reader(self): @@ -746,11 +792,14 @@ def test_read_all_from_pipe_reader(self): protocol = asyncio.StreamReaderProtocol(reader, loop=self.loop) transport, _ = self.loop.run_until_complete( self.loop.connect_read_pipe(lambda: protocol, pipe)) - - watcher = asyncio.SafeChildWatcher() + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + watcher = asyncio.SafeChildWatcher() watcher.attach_loop(self.loop) try: - asyncio.set_child_watcher(watcher) + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + asyncio.set_child_watcher(watcher) create = asyncio.create_subprocess_exec( *args, pass_fds={wfd}, @@ -758,7 +807,9 @@ def test_read_all_from_pipe_reader(self): proc = self.loop.run_until_complete(create) self.loop.run_until_complete(proc.wait()) finally: - asyncio.set_child_watcher(None) + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + asyncio.set_child_watcher(None) os.close(wfd) data = self.loop.run_until_complete(reader.read(-1)) @@ -941,32 +992,36 @@ def test_LimitOverrunError_pickleable(self): self.assertEqual(str(e), str(e2)) self.assertEqual(e.consumed, e2.consumed) -class NewStreamTests2(unittest.IsolatedAsyncioTestCase): - async def test_wait_closed_on_close(self): + def test_wait_closed_on_close(self): with test_utils.run_test_server() as httpd: - rd, wr = await asyncio.open_connection(*httpd.address) + rd, wr = self.loop.run_until_complete( + asyncio.open_connection(*httpd.address)) wr.write(b'GET / HTTP/1.0\r\n\r\n') - data = await rd.readline() + f = rd.readline() + data = self.loop.run_until_complete(f) self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') - data = await rd.read() + f = rd.read() + data = self.loop.run_until_complete(f) self.assertTrue(data.endswith(b'\r\n\r\nTest message')) self.assertFalse(wr.is_closing()) wr.close() self.assertTrue(wr.is_closing()) - await wr.wait_closed() + self.loop.run_until_complete(wr.wait_closed()) - async def test_wait_closed_on_close_with_unread_data(self): + def test_wait_closed_on_close_with_unread_data(self): with test_utils.run_test_server() as httpd: - rd, wr = await asyncio.open_connection(*httpd.address) + rd, wr = self.loop.run_until_complete( + asyncio.open_connection(*httpd.address)) wr.write(b'GET / HTTP/1.0\r\n\r\n') - data = await rd.readline() + f = rd.readline() + data = self.loop.run_until_complete(f) self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') wr.close() - await wr.wait_closed() + self.loop.run_until_complete(wr.wait_closed()) - async def test_async_writer_api(self): + def test_async_writer_api(self): async def inner(httpd): rd, wr = await asyncio.open_connection(*httpd.address) @@ -978,10 +1033,15 @@ async def inner(httpd): wr.close() await wr.wait_closed() + messages = [] + self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx)) + with test_utils.run_test_server() as httpd: - await inner(httpd) + self.loop.run_until_complete(inner(httpd)) - async def test_async_writer_api_exception_after_close(self): + self.assertEqual(messages, []) + + def test_async_writer_api_exception_after_close(self): async def inner(httpd): rd, wr = await asyncio.open_connection(*httpd.address) @@ -995,19 +1055,33 @@ async def inner(httpd): wr.write(b'data') await wr.drain() + messages = [] + self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx)) + with test_utils.run_test_server() as httpd: - await inner(httpd) + self.loop.run_until_complete(inner(httpd)) - async def test_eof_feed_when_closing_writer(self): + self.assertEqual(messages, []) + + def test_eof_feed_when_closing_writer(self): # See http://bugs.python.org/issue35065 + messages = [] + self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx)) + with test_utils.run_test_server() as httpd: - rd, wr = await asyncio.open_connection(*httpd.address) + rd, wr = self.loop.run_until_complete( + asyncio.open_connection(*httpd.address)) + wr.close() - await wr.wait_closed() + f = wr.wait_closed() + self.loop.run_until_complete(f) self.assertTrue(rd.at_eof()) - data = await rd.read() + f = rd.read() + data = self.loop.run_until_complete(f) self.assertEqual(data, b'') + self.assertEqual(messages, []) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index 9bc60b9dc2ae2e..7411f735da3be6 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -184,7 +184,10 @@ def test_kill(self): self.assertEqual(-signal.SIGKILL, returncode) def test_kill_issue43884(self): - blocking_shell_command = f'{sys.executable} -c "import time; time.sleep(100000000)"' + if sys.platform == 'win32': + blocking_shell_command = f'{sys.executable} -c "import time; time.sleep(2)"' + else: + blocking_shell_command = 'sleep 1; sleep 1' creationflags = 0 if sys.platform == 'win32': from subprocess import CREATE_NEW_PROCESS_GROUP @@ -427,6 +430,26 @@ async def empty_error(): self.assertEqual(output, None) self.assertEqual(exitcode, 0) + @unittest.skipIf(sys.platform != 'linux', "Don't have /dev/stdin") + def test_devstdin_input(self): + + async def devstdin_input(message): + code = 'file = open("/dev/stdin"); data = file.read(); print(len(data))' + proc = await asyncio.create_subprocess_exec( + sys.executable, '-c', code, + stdin=asyncio.subprocess.PIPE, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + close_fds=False, + ) + stdout, stderr = await proc.communicate(message) + exitcode = await proc.wait() + return (stdout, exitcode) + + output, exitcode = self.loop.run_until_complete(devstdin_input(b'abc')) + self.assertEqual(output.rstrip(), b'3') + self.assertEqual(exitcode, 0) + def test_cancel_process_wait(self): # Issue #23140: cancel Process.wait() @@ -562,7 +585,9 @@ async def kill_running(): # manually to avoid a warning when the watcher is detached. if (sys.platform != 'win32' and isinstance(self, SubprocessFastWatcherTests)): - asyncio.get_child_watcher()._callbacks.clear() + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + asyncio.get_child_watcher()._callbacks.clear() async def _test_popen_error(self, stdin): if sys.platform == 'win32': @@ -674,65 +699,64 @@ def setUp(self): self.loop = policy.new_event_loop() self.set_event_loop(self.loop) - watcher = self.Watcher() + watcher = self._get_watcher() watcher.attach_loop(self.loop) - policy.set_child_watcher(watcher) + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + policy.set_child_watcher(watcher) def tearDown(self): super().tearDown() policy = asyncio.get_event_loop_policy() - watcher = policy.get_child_watcher() - policy.set_child_watcher(None) + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + watcher = policy.get_child_watcher() + policy.set_child_watcher(None) watcher.attach_loop(None) watcher.close() class SubprocessThreadedWatcherTests(SubprocessWatcherMixin, test_utils.TestCase): - Watcher = unix_events.ThreadedChildWatcher - - @unittest.skip("bpo-38323: MultiLoopChildWatcher has a race condition \ - and these tests can hang the test suite") - class SubprocessMultiLoopWatcherTests(SubprocessWatcherMixin, - test_utils.TestCase): - - Watcher = unix_events.MultiLoopChildWatcher + def _get_watcher(self): + return unix_events.ThreadedChildWatcher() class SubprocessSafeWatcherTests(SubprocessWatcherMixin, test_utils.TestCase): - Watcher = unix_events.SafeChildWatcher + def _get_watcher(self): + with self.assertWarns(DeprecationWarning): + return unix_events.SafeChildWatcher() + + class MultiLoopChildWatcherTests(test_utils.TestCase): + + def test_warns(self): + with self.assertWarns(DeprecationWarning): + unix_events.MultiLoopChildWatcher() class SubprocessFastWatcherTests(SubprocessWatcherMixin, test_utils.TestCase): - Watcher = unix_events.FastChildWatcher - - def has_pidfd_support(): - if not hasattr(os, 'pidfd_open'): - return False - try: - os.close(os.pidfd_open(os.getpid())) - except OSError: - return False - return True + def _get_watcher(self): + with self.assertWarns(DeprecationWarning): + return unix_events.FastChildWatcher() @unittest.skipUnless( - has_pidfd_support(), + unix_events.can_use_pidfd(), "operating system does not support pidfds", ) class SubprocessPidfdWatcherTests(SubprocessWatcherMixin, test_utils.TestCase): - Watcher = unix_events.PidfdChildWatcher + + def _get_watcher(self): + return unix_events.PidfdChildWatcher() class GenericWatcherTests(test_utils.TestCase): def test_create_subprocess_fails_with_inactive_watcher(self): - watcher = mock.create_autospec( - asyncio.AbstractChildWatcher, - **{"__enter__.return_value.is_active.return_value": False} - ) + watcher = mock.create_autospec(asyncio.AbstractChildWatcher) + watcher.is_active.return_value = False async def execute(): asyncio.set_child_watcher(watcher) @@ -744,13 +768,45 @@ async def execute(): watcher.add_child_handler.assert_not_called() with asyncio.Runner(loop_factory=asyncio.new_event_loop) as runner: - self.assertIsNone(runner.run(execute())) + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + self.assertIsNone(runner.run(execute())) self.assertListEqual(watcher.mock_calls, [ mock.call.__enter__(), - mock.call.__enter__().is_active(), + mock.call.is_active(), mock.call.__exit__(RuntimeError, mock.ANY, mock.ANY), - ]) + ], watcher.mock_calls) + + + @unittest.skipUnless( + unix_events.can_use_pidfd(), + "operating system does not support pidfds", + ) + def test_create_subprocess_with_pidfd(self): + async def in_thread(): + proc = await asyncio.create_subprocess_exec( + *PROGRAM_CAT, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + stdout, stderr = await proc.communicate(b"some data") + return proc.returncode, stdout + async def main(): + # asyncio.Runner did not call asyncio.set_event_loop() + with self.assertRaises(RuntimeError): + asyncio.get_event_loop_policy().get_event_loop() + return await asyncio.to_thread(asyncio.run, in_thread()) + with self.assertWarns(DeprecationWarning): + asyncio.set_child_watcher(asyncio.PidfdChildWatcher()) + try: + with asyncio.Runner(loop_factory=asyncio.new_event_loop) as runner: + returncode, stdout = runner.run(main()) + self.assertEqual(returncode, 0) + self.assertEqual(stdout, b'some data') + finally: + with self.assertWarns(DeprecationWarning): + asyncio.set_child_watcher(None) else: # Windows class SubprocessProactorTests(SubprocessMixin, test_utils.TestCase): diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 2491285206bcd7..d8ba2f4e2a742a 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -109,11 +109,7 @@ async def coro(): self.assertTrue(hasattr(t, '_cancel_message')) self.assertEqual(t._cancel_message, None) - with self.assertWarnsRegex( - DeprecationWarning, - "Passing 'msg' argument" - ): - t.cancel('my message') + t.cancel('my message') self.assertEqual(t._cancel_message, 'my message') with self.assertRaises(asyncio.CancelledError) as cm: @@ -125,11 +121,7 @@ def test_task_cancel_message_setter(self): async def coro(): pass t = self.new_task(self.loop, coro()) - with self.assertWarnsRegex( - DeprecationWarning, - "Passing 'msg' argument" - ): - t.cancel('my message') + t.cancel('my message') t._cancel_message = 'my new message' self.assertEqual(t._cancel_message, 'my new message') @@ -643,27 +635,30 @@ def on_timeout(): await asyncio.sleep(0) return timed_out, structured_block_finished, outer_code_reached - # Test which timed out. - t1 = self.new_task(loop, make_request_with_timeout(sleep=10.0, timeout=0.1)) - timed_out, structured_block_finished, outer_code_reached = ( - loop.run_until_complete(t1) - ) - self.assertTrue(timed_out) - self.assertFalse(structured_block_finished) # it was cancelled - self.assertTrue(outer_code_reached) # task got uncancelled after leaving - # the structured block and continued until - # completion - self.assertEqual(t1.cancelling(), 0) # no pending cancellation of the outer task - - # Test which did not time out. - t2 = self.new_task(loop, make_request_with_timeout(sleep=0, timeout=10.0)) - timed_out, structured_block_finished, outer_code_reached = ( - loop.run_until_complete(t2) - ) - self.assertFalse(timed_out) - self.assertTrue(structured_block_finished) - self.assertTrue(outer_code_reached) - self.assertEqual(t2.cancelling(), 0) + try: + # Test which timed out. + t1 = self.new_task(loop, make_request_with_timeout(sleep=10.0, timeout=0.1)) + timed_out, structured_block_finished, outer_code_reached = ( + loop.run_until_complete(t1) + ) + self.assertTrue(timed_out) + self.assertFalse(structured_block_finished) # it was cancelled + self.assertTrue(outer_code_reached) # task got uncancelled after leaving + # the structured block and continued until + # completion + self.assertEqual(t1.cancelling(), 0) # no pending cancellation of the outer task + + # Test which did not time out. + t2 = self.new_task(loop, make_request_with_timeout(sleep=0, timeout=10.0)) + timed_out, structured_block_finished, outer_code_reached = ( + loop.run_until_complete(t2) + ) + self.assertFalse(timed_out) + self.assertTrue(structured_block_finished) + self.assertTrue(outer_code_reached) + self.assertEqual(t2.cancelling(), 0) + finally: + loop.close() def test_cancel(self): @@ -706,14 +701,7 @@ async def sleep(): async def coro(): task = self.new_task(loop, sleep()) await asyncio.sleep(0) - if cancel_args not in ((), (None,)): - with self.assertWarnsRegex( - DeprecationWarning, - "Passing 'msg' argument" - ): - task.cancel(*cancel_args) - else: - task.cancel(*cancel_args) + task.cancel(*cancel_args) done, pending = await asyncio.wait([task]) task.result() @@ -747,14 +735,7 @@ async def sleep(): async def coro(): task = self.new_task(loop, sleep()) await asyncio.sleep(0) - if cancel_args not in ((), (None,)): - with self.assertWarnsRegex( - DeprecationWarning, - "Passing 'msg' argument" - ): - task.cancel(*cancel_args) - else: - task.cancel(*cancel_args) + task.cancel(*cancel_args) done, pending = await asyncio.wait([task]) task.exception() @@ -777,17 +758,10 @@ async def sleep(): fut.set_result(None) await asyncio.sleep(10) - def cancel(task, msg): - with self.assertWarnsRegex( - DeprecationWarning, - "Passing 'msg' argument" - ): - task.cancel(msg) - async def coro(): inner_task = self.new_task(loop, sleep()) await fut - loop.call_soon(cancel, inner_task, 'msg') + loop.call_soon(inner_task.cancel, 'msg') try: await inner_task except asyncio.CancelledError as ex: @@ -813,11 +787,7 @@ async def sleep(): async def coro(): task = self.new_task(loop, sleep()) # We deliberately leave out the sleep here. - with self.assertWarnsRegex( - DeprecationWarning, - "Passing 'msg' argument" - ): - task.cancel('my message') + task.cancel('my message') done, pending = await asyncio.wait([task]) task.exception() @@ -2179,14 +2149,7 @@ async def test(): async def main(): qwe = self.new_task(loop, test()) await asyncio.sleep(0.2) - if cancel_args not in ((), (None,)): - with self.assertWarnsRegex( - DeprecationWarning, - "Passing 'msg' argument" - ): - qwe.cancel(*cancel_args) - else: - qwe.cancel(*cancel_args) + qwe.cancel(*cancel_args) await qwe try: diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 5bad21ecbae4af..93e8611f184d25 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -12,6 +12,7 @@ import threading import unittest from unittest import mock +import warnings from test.support import os_helper from test.support import socket_helper @@ -1107,6 +1108,11 @@ def test_write_eof_pending(self): class AbstractChildWatcherTests(unittest.TestCase): + def test_warns_on_subclassing(self): + with self.assertWarns(DeprecationWarning): + class MyWatcher(asyncio.AbstractChildWatcher): + pass + def test_not_implemented(self): f = mock.Mock() watcher = asyncio.AbstractChildWatcher() @@ -1686,12 +1692,16 @@ def test_close(self, m_waitpid): class SafeChildWatcherTests (ChildWatcherTestsMixin, test_utils.TestCase): def create_watcher(self): - return asyncio.SafeChildWatcher() + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + return asyncio.SafeChildWatcher() class FastChildWatcherTests (ChildWatcherTestsMixin, test_utils.TestCase): def create_watcher(self): - return asyncio.FastChildWatcher() + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + return asyncio.FastChildWatcher() class PolicyTests(unittest.TestCase): @@ -1702,21 +1712,38 @@ def create_policy(self): def test_get_default_child_watcher(self): policy = self.create_policy() self.assertIsNone(policy._watcher) - - watcher = policy.get_child_watcher() + unix_events.can_use_pidfd = mock.Mock() + unix_events.can_use_pidfd.return_value = False + with self.assertWarns(DeprecationWarning): + watcher = policy.get_child_watcher() self.assertIsInstance(watcher, asyncio.ThreadedChildWatcher) self.assertIs(policy._watcher, watcher) + with self.assertWarns(DeprecationWarning): + self.assertIs(watcher, policy.get_child_watcher()) - self.assertIs(watcher, policy.get_child_watcher()) + policy = self.create_policy() + self.assertIsNone(policy._watcher) + unix_events.can_use_pidfd = mock.Mock() + unix_events.can_use_pidfd.return_value = True + with self.assertWarns(DeprecationWarning): + watcher = policy.get_child_watcher() + self.assertIsInstance(watcher, asyncio.PidfdChildWatcher) + + self.assertIs(policy._watcher, watcher) + with self.assertWarns(DeprecationWarning): + self.assertIs(watcher, policy.get_child_watcher()) def test_get_child_watcher_after_set(self): policy = self.create_policy() - watcher = asyncio.FastChildWatcher() + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + watcher = asyncio.FastChildWatcher() + policy.set_child_watcher(watcher) - policy.set_child_watcher(watcher) self.assertIs(policy._watcher, watcher) - self.assertIs(watcher, policy.get_child_watcher()) + with self.assertWarns(DeprecationWarning): + self.assertIs(watcher, policy.get_child_watcher()) def test_get_child_watcher_thread(self): @@ -1725,7 +1752,9 @@ def f(): self.assertIsInstance(policy.get_event_loop(), asyncio.AbstractEventLoop) - watcher = policy.get_child_watcher() + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + watcher = policy.get_child_watcher() self.assertIsInstance(watcher, asyncio.SafeChildWatcher) self.assertIsNone(watcher._loop) @@ -1733,7 +1762,9 @@ def f(): policy.get_event_loop().close() policy = self.create_policy() - policy.set_child_watcher(asyncio.SafeChildWatcher()) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + policy.set_child_watcher(asyncio.SafeChildWatcher()) th = threading.Thread(target=f) th.start() @@ -1745,8 +1776,10 @@ def test_child_watcher_replace_mainloop_existing(self): # Explicitly setup SafeChildWatcher, # default ThreadedChildWatcher has no _loop property - watcher = asyncio.SafeChildWatcher() - policy.set_child_watcher(watcher) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + watcher = asyncio.SafeChildWatcher() + policy.set_child_watcher(watcher) watcher.attach_loop(loop) self.assertIs(watcher._loop, loop) diff --git a/Lib/test/test_asyncio/test_windows_events.py b/Lib/test/test_asyncio/test_windows_events.py index 6b4f65c3376f5d..5033acc052486b 100644 --- a/Lib/test/test_asyncio/test_windows_events.py +++ b/Lib/test/test_asyncio/test_windows_events.py @@ -239,6 +239,17 @@ def test_read_self_pipe_restart(self): self.close_loop(self.loop) self.assertFalse(self.loop.call_exception_handler.called) + def test_address_argument_type_error(self): + # Regression test for https://github.com/python/cpython/issues/98793 + proactor = self.loop._proactor + sock = socket.socket(type=socket.SOCK_DGRAM) + bad_address = None + with self.assertRaises(TypeError): + proactor.connect(sock, bad_address) + with self.assertRaises(TypeError): + proactor.sendto(sock, b'abc', addr=bad_address) + sock.close() + class WinPolicyTests(test_utils.TestCase): diff --git a/Lib/test/test_asyncio/utils.py b/Lib/test/test_asyncio/utils.py index 96be5a1c3bcf77..5b9c86eb9859a0 100644 --- a/Lib/test/test_asyncio/utils.py +++ b/Lib/test/test_asyncio/utils.py @@ -14,7 +14,7 @@ import threading import unittest import weakref - +import warnings from unittest import mock from http.server import HTTPServer @@ -544,7 +544,9 @@ def close_loop(loop): policy = support.maybe_get_event_loop_policy() if policy is not None: try: - watcher = policy.get_child_watcher() + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + watcher = policy.get_child_watcher() except NotImplementedError: # watcher is not implemented by EventLoopPolicy, e.g. Windows pass diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py deleted file mode 100644 index 5a037fb9c5d595..00000000000000 --- a/Lib/test/test_asyncore.py +++ /dev/null @@ -1,840 +0,0 @@ -import unittest -import select -import os -import socket -import sys -import time -import errno -import struct -import threading - -from test import support -from test.support import os_helper -from test.support import socket_helper -from test.support import threading_helper -from test.support import warnings_helper -from io import BytesIO - -if support.PGO: - raise unittest.SkipTest("test is not helpful for PGO") - -support.requires_working_socket(module=True) - -asyncore = warnings_helper.import_deprecated('asyncore') - - -HAS_UNIX_SOCKETS = hasattr(socket, 'AF_UNIX') - -class dummysocket: - def __init__(self): - self.closed = False - - def close(self): - self.closed = True - - def fileno(self): - return 42 - -class dummychannel: - def __init__(self): - self.socket = dummysocket() - - def close(self): - self.socket.close() - -class exitingdummy: - def __init__(self): - pass - - def handle_read_event(self): - raise asyncore.ExitNow() - - handle_write_event = handle_read_event - handle_close = handle_read_event - handle_expt_event = handle_read_event - -class crashingdummy: - def __init__(self): - self.error_handled = False - - def handle_read_event(self): - raise Exception() - - handle_write_event = handle_read_event - handle_close = handle_read_event - handle_expt_event = handle_read_event - - def handle_error(self): - self.error_handled = True - -# used when testing senders; just collects what it gets until newline is sent -def capture_server(evt, buf, serv): - try: - serv.listen() - conn, addr = serv.accept() - except TimeoutError: - pass - else: - n = 200 - for _ in support.busy_retry(support.SHORT_TIMEOUT): - r, w, e = select.select([conn], [], [], 0.1) - if r: - n -= 1 - data = conn.recv(10) - # keep everything except for the newline terminator - buf.write(data.replace(b'\n', b'')) - if b'\n' in data: - break - if n <= 0: - break - - conn.close() - finally: - serv.close() - evt.set() - -def bind_af_aware(sock, addr): - """Helper function to bind a socket according to its family.""" - if HAS_UNIX_SOCKETS and sock.family == socket.AF_UNIX: - # Make sure the path doesn't exist. - os_helper.unlink(addr) - socket_helper.bind_unix_socket(sock, addr) - else: - sock.bind(addr) - - -class HelperFunctionTests(unittest.TestCase): - def test_readwriteexc(self): - # Check exception handling behavior of read, write and _exception - - # check that ExitNow exceptions in the object handler method - # bubbles all the way up through asyncore read/write/_exception calls - tr1 = exitingdummy() - self.assertRaises(asyncore.ExitNow, asyncore.read, tr1) - self.assertRaises(asyncore.ExitNow, asyncore.write, tr1) - self.assertRaises(asyncore.ExitNow, asyncore._exception, tr1) - - # check that an exception other than ExitNow in the object handler - # method causes the handle_error method to get called - tr2 = crashingdummy() - asyncore.read(tr2) - self.assertEqual(tr2.error_handled, True) - - tr2 = crashingdummy() - asyncore.write(tr2) - self.assertEqual(tr2.error_handled, True) - - tr2 = crashingdummy() - asyncore._exception(tr2) - self.assertEqual(tr2.error_handled, True) - - # asyncore.readwrite uses constants in the select module that - # are not present in Windows systems (see this thread: - # http://mail.python.org/pipermail/python-list/2001-October/109973.html) - # These constants should be present as long as poll is available - - @unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required') - def test_readwrite(self): - # Check that correct methods are called by readwrite() - - attributes = ('read', 'expt', 'write', 'closed', 'error_handled') - - expected = ( - (select.POLLIN, 'read'), - (select.POLLPRI, 'expt'), - (select.POLLOUT, 'write'), - (select.POLLERR, 'closed'), - (select.POLLHUP, 'closed'), - (select.POLLNVAL, 'closed'), - ) - - class testobj: - def __init__(self): - self.read = False - self.write = False - self.closed = False - self.expt = False - self.error_handled = False - - def handle_read_event(self): - self.read = True - - def handle_write_event(self): - self.write = True - - def handle_close(self): - self.closed = True - - def handle_expt_event(self): - self.expt = True - - def handle_error(self): - self.error_handled = True - - for flag, expectedattr in expected: - tobj = testobj() - self.assertEqual(getattr(tobj, expectedattr), False) - asyncore.readwrite(tobj, flag) - - # Only the attribute modified by the routine we expect to be - # called should be True. - for attr in attributes: - self.assertEqual(getattr(tobj, attr), attr==expectedattr) - - # check that ExitNow exceptions in the object handler method - # bubbles all the way up through asyncore readwrite call - tr1 = exitingdummy() - self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) - - # check that an exception other than ExitNow in the object handler - # method causes the handle_error method to get called - tr2 = crashingdummy() - self.assertEqual(tr2.error_handled, False) - asyncore.readwrite(tr2, flag) - self.assertEqual(tr2.error_handled, True) - - def test_closeall(self): - self.closeall_check(False) - - def test_closeall_default(self): - self.closeall_check(True) - - def closeall_check(self, usedefault): - # Check that close_all() closes everything in a given map - - l = [] - testmap = {} - for i in range(10): - c = dummychannel() - l.append(c) - self.assertEqual(c.socket.closed, False) - testmap[i] = c - - if usedefault: - socketmap = asyncore.socket_map - try: - asyncore.socket_map = testmap - asyncore.close_all() - finally: - testmap, asyncore.socket_map = asyncore.socket_map, socketmap - else: - asyncore.close_all(testmap) - - self.assertEqual(len(testmap), 0) - - for c in l: - self.assertEqual(c.socket.closed, True) - - def test_compact_traceback(self): - try: - raise Exception("I don't like spam!") - except: - real_t, real_v, real_tb = sys.exc_info() - r = asyncore.compact_traceback() - else: - self.fail("Expected exception") - - (f, function, line), t, v, info = r - self.assertEqual(os.path.split(f)[-1], 'test_asyncore.py') - self.assertEqual(function, 'test_compact_traceback') - self.assertEqual(t, real_t) - self.assertEqual(v, real_v) - self.assertEqual(info, '[%s|%s|%s]' % (f, function, line)) - - -class DispatcherTests(unittest.TestCase): - def setUp(self): - pass - - def tearDown(self): - asyncore.close_all() - - def test_basic(self): - d = asyncore.dispatcher() - self.assertEqual(d.readable(), True) - self.assertEqual(d.writable(), True) - - def test_repr(self): - d = asyncore.dispatcher() - self.assertEqual(repr(d), '' % id(d)) - - def test_log(self): - d = asyncore.dispatcher() - - # capture output of dispatcher.log() (to stderr) - l1 = "Lovely spam! Wonderful spam!" - l2 = "I don't like spam!" - with support.captured_stderr() as stderr: - d.log(l1) - d.log(l2) - - lines = stderr.getvalue().splitlines() - self.assertEqual(lines, ['log: %s' % l1, 'log: %s' % l2]) - - def test_log_info(self): - d = asyncore.dispatcher() - - # capture output of dispatcher.log_info() (to stdout via print) - l1 = "Have you got anything without spam?" - l2 = "Why can't she have egg bacon spam and sausage?" - l3 = "THAT'S got spam in it!" - with support.captured_stdout() as stdout: - d.log_info(l1, 'EGGS') - d.log_info(l2) - d.log_info(l3, 'SPAM') - - lines = stdout.getvalue().splitlines() - expected = ['EGGS: %s' % l1, 'info: %s' % l2, 'SPAM: %s' % l3] - self.assertEqual(lines, expected) - - def test_unhandled(self): - d = asyncore.dispatcher() - d.ignore_log_types = () - - # capture output of dispatcher.log_info() (to stdout via print) - with support.captured_stdout() as stdout: - d.handle_expt() - d.handle_read() - d.handle_write() - d.handle_connect() - - lines = stdout.getvalue().splitlines() - expected = ['warning: unhandled incoming priority event', - 'warning: unhandled read event', - 'warning: unhandled write event', - 'warning: unhandled connect event'] - self.assertEqual(lines, expected) - - def test_strerror(self): - # refers to bug #8573 - err = asyncore._strerror(errno.EPERM) - if hasattr(os, 'strerror'): - self.assertEqual(err, os.strerror(errno.EPERM)) - err = asyncore._strerror(-1) - self.assertTrue(err != "") - - -class dispatcherwithsend_noread(asyncore.dispatcher_with_send): - def readable(self): - return False - - def handle_connect(self): - pass - - -class DispatcherWithSendTests(unittest.TestCase): - def setUp(self): - pass - - def tearDown(self): - asyncore.close_all() - - @threading_helper.reap_threads - def test_send(self): - evt = threading.Event() - sock = socket.socket() - sock.settimeout(3) - port = socket_helper.bind_port(sock) - - cap = BytesIO() - args = (evt, cap, sock) - t = threading.Thread(target=capture_server, args=args) - t.start() - try: - # wait a little longer for the server to initialize (it sometimes - # refuses connections on slow machines without this wait) - time.sleep(0.2) - - data = b"Suppose there isn't a 16-ton weight?" - d = dispatcherwithsend_noread() - d.create_socket() - d.connect((socket_helper.HOST, port)) - - # give time for socket to connect - time.sleep(0.1) - - d.send(data) - d.send(data) - d.send(b'\n') - - n = 1000 - while d.out_buffer and n > 0: - asyncore.poll() - n -= 1 - - evt.wait() - - self.assertEqual(cap.getvalue(), data*2) - finally: - threading_helper.join_thread(t) - - -@unittest.skipUnless(hasattr(asyncore, 'file_wrapper'), - 'asyncore.file_wrapper required') -class FileWrapperTest(unittest.TestCase): - def setUp(self): - self.d = b"It's not dead, it's sleeping!" - with open(os_helper.TESTFN, 'wb') as file: - file.write(self.d) - - def tearDown(self): - os_helper.unlink(os_helper.TESTFN) - - def test_recv(self): - fd = os.open(os_helper.TESTFN, os.O_RDONLY) - w = asyncore.file_wrapper(fd) - os.close(fd) - - self.assertNotEqual(w.fd, fd) - self.assertNotEqual(w.fileno(), fd) - self.assertEqual(w.recv(13), b"It's not dead") - self.assertEqual(w.read(6), b", it's") - w.close() - self.assertRaises(OSError, w.read, 1) - - def test_send(self): - d1 = b"Come again?" - d2 = b"I want to buy some cheese." - fd = os.open(os_helper.TESTFN, os.O_WRONLY | os.O_APPEND) - w = asyncore.file_wrapper(fd) - os.close(fd) - - w.write(d1) - w.send(d2) - w.close() - with open(os_helper.TESTFN, 'rb') as file: - self.assertEqual(file.read(), self.d + d1 + d2) - - @unittest.skipUnless(hasattr(asyncore, 'file_dispatcher'), - 'asyncore.file_dispatcher required') - def test_dispatcher(self): - fd = os.open(os_helper.TESTFN, os.O_RDONLY) - data = [] - class FileDispatcher(asyncore.file_dispatcher): - def handle_read(self): - data.append(self.recv(29)) - s = FileDispatcher(fd) - os.close(fd) - asyncore.loop(timeout=0.01, use_poll=True, count=2) - self.assertEqual(b"".join(data), self.d) - - def test_resource_warning(self): - # Issue #11453 - fd = os.open(os_helper.TESTFN, os.O_RDONLY) - f = asyncore.file_wrapper(fd) - - os.close(fd) - with warnings_helper.check_warnings(('', ResourceWarning)): - f = None - support.gc_collect() - - def test_close_twice(self): - fd = os.open(os_helper.TESTFN, os.O_RDONLY) - f = asyncore.file_wrapper(fd) - os.close(fd) - - os.close(f.fd) # file_wrapper dupped fd - with self.assertRaises(OSError): - f.close() - - self.assertEqual(f.fd, -1) - # calling close twice should not fail - f.close() - - -class BaseTestHandler(asyncore.dispatcher): - - def __init__(self, sock=None): - asyncore.dispatcher.__init__(self, sock) - self.flag = False - - def handle_accept(self): - raise Exception("handle_accept not supposed to be called") - - def handle_accepted(self): - raise Exception("handle_accepted not supposed to be called") - - def handle_connect(self): - raise Exception("handle_connect not supposed to be called") - - def handle_expt(self): - raise Exception("handle_expt not supposed to be called") - - def handle_close(self): - raise Exception("handle_close not supposed to be called") - - def handle_error(self): - raise - - -class BaseServer(asyncore.dispatcher): - """A server which listens on an address and dispatches the - connection to a handler. - """ - - def __init__(self, family, addr, handler=BaseTestHandler): - asyncore.dispatcher.__init__(self) - self.create_socket(family) - self.set_reuse_addr() - bind_af_aware(self.socket, addr) - self.listen(5) - self.handler = handler - - @property - def address(self): - return self.socket.getsockname() - - def handle_accepted(self, sock, addr): - self.handler(sock) - - def handle_error(self): - raise - - -class BaseClient(BaseTestHandler): - - def __init__(self, family, address): - BaseTestHandler.__init__(self) - self.create_socket(family) - self.connect(address) - - def handle_connect(self): - pass - - -class BaseTestAPI: - - def tearDown(self): - asyncore.close_all(ignore_all=True) - - def loop_waiting_for_flag(self, instance, timeout=5): - timeout = float(timeout) / 100 - count = 100 - while asyncore.socket_map and count > 0: - asyncore.loop(timeout=0.01, count=1, use_poll=self.use_poll) - if instance.flag: - return - count -= 1 - time.sleep(timeout) - self.fail("flag not set") - - def test_handle_connect(self): - # make sure handle_connect is called on connect() - - class TestClient(BaseClient): - def handle_connect(self): - self.flag = True - - server = BaseServer(self.family, self.addr) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_accept(self): - # make sure handle_accept() is called when a client connects - - class TestListener(BaseTestHandler): - - def __init__(self, family, addr): - BaseTestHandler.__init__(self) - self.create_socket(family) - bind_af_aware(self.socket, addr) - self.listen(5) - self.address = self.socket.getsockname() - - def handle_accept(self): - self.flag = True - - server = TestListener(self.family, self.addr) - client = BaseClient(self.family, server.address) - self.loop_waiting_for_flag(server) - - def test_handle_accepted(self): - # make sure handle_accepted() is called when a client connects - - class TestListener(BaseTestHandler): - - def __init__(self, family, addr): - BaseTestHandler.__init__(self) - self.create_socket(family) - bind_af_aware(self.socket, addr) - self.listen(5) - self.address = self.socket.getsockname() - - def handle_accept(self): - asyncore.dispatcher.handle_accept(self) - - def handle_accepted(self, sock, addr): - sock.close() - self.flag = True - - server = TestListener(self.family, self.addr) - client = BaseClient(self.family, server.address) - self.loop_waiting_for_flag(server) - - - def test_handle_read(self): - # make sure handle_read is called on data received - - class TestClient(BaseClient): - def handle_read(self): - self.flag = True - - class TestHandler(BaseTestHandler): - def __init__(self, conn): - BaseTestHandler.__init__(self, conn) - self.send(b'x' * 1024) - - server = BaseServer(self.family, self.addr, TestHandler) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_write(self): - # make sure handle_write is called - - class TestClient(BaseClient): - def handle_write(self): - self.flag = True - - server = BaseServer(self.family, self.addr) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_close(self): - # make sure handle_close is called when the other end closes - # the connection - - class TestClient(BaseClient): - - def handle_read(self): - # in order to make handle_close be called we are supposed - # to make at least one recv() call - self.recv(1024) - - def handle_close(self): - self.flag = True - self.close() - - class TestHandler(BaseTestHandler): - def __init__(self, conn): - BaseTestHandler.__init__(self, conn) - self.close() - - server = BaseServer(self.family, self.addr, TestHandler) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_close_after_conn_broken(self): - # Check that ECONNRESET/EPIPE is correctly handled (issues #5661 and - # #11265). - - data = b'\0' * 128 - - class TestClient(BaseClient): - - def handle_write(self): - self.send(data) - - def handle_close(self): - self.flag = True - self.close() - - def handle_expt(self): - self.flag = True - self.close() - - class TestHandler(BaseTestHandler): - - def handle_read(self): - self.recv(len(data)) - self.close() - - def writable(self): - return False - - server = BaseServer(self.family, self.addr, TestHandler) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - @unittest.skipIf(sys.platform.startswith("sunos"), - "OOB support is broken on Solaris") - def test_handle_expt(self): - # Make sure handle_expt is called on OOB data received. - # Note: this might fail on some platforms as OOB data is - # tenuously supported and rarely used. - if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX: - self.skipTest("Not applicable to AF_UNIX sockets.") - - if sys.platform == "darwin" and self.use_poll: - self.skipTest("poll may fail on macOS; see issue #28087") - - class TestClient(BaseClient): - def handle_expt(self): - self.socket.recv(1024, socket.MSG_OOB) - self.flag = True - - class TestHandler(BaseTestHandler): - def __init__(self, conn): - BaseTestHandler.__init__(self, conn) - self.socket.send(bytes(chr(244), 'latin-1'), socket.MSG_OOB) - - server = BaseServer(self.family, self.addr, TestHandler) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_handle_error(self): - - class TestClient(BaseClient): - def handle_write(self): - 1.0 / 0 - def handle_error(self): - self.flag = True - try: - raise - except ZeroDivisionError: - pass - else: - raise Exception("exception not raised") - - server = BaseServer(self.family, self.addr) - client = TestClient(self.family, server.address) - self.loop_waiting_for_flag(client) - - def test_connection_attributes(self): - server = BaseServer(self.family, self.addr) - client = BaseClient(self.family, server.address) - - # we start disconnected - self.assertFalse(server.connected) - self.assertTrue(server.accepting) - # this can't be taken for granted across all platforms - #self.assertFalse(client.connected) - self.assertFalse(client.accepting) - - # execute some loops so that client connects to server - asyncore.loop(timeout=0.01, use_poll=self.use_poll, count=100) - self.assertFalse(server.connected) - self.assertTrue(server.accepting) - self.assertTrue(client.connected) - self.assertFalse(client.accepting) - - # disconnect the client - client.close() - self.assertFalse(server.connected) - self.assertTrue(server.accepting) - self.assertFalse(client.connected) - self.assertFalse(client.accepting) - - # stop serving - server.close() - self.assertFalse(server.connected) - self.assertFalse(server.accepting) - - def test_create_socket(self): - s = asyncore.dispatcher() - s.create_socket(self.family) - self.assertEqual(s.socket.type, socket.SOCK_STREAM) - self.assertEqual(s.socket.family, self.family) - self.assertEqual(s.socket.gettimeout(), 0) - self.assertFalse(s.socket.get_inheritable()) - - def test_bind(self): - if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX: - self.skipTest("Not applicable to AF_UNIX sockets.") - s1 = asyncore.dispatcher() - s1.create_socket(self.family) - s1.bind(self.addr) - s1.listen(5) - port = s1.socket.getsockname()[1] - - s2 = asyncore.dispatcher() - s2.create_socket(self.family) - # EADDRINUSE indicates the socket was correctly bound - self.assertRaises(OSError, s2.bind, (self.addr[0], port)) - - def test_set_reuse_addr(self): - if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX: - self.skipTest("Not applicable to AF_UNIX sockets.") - - with socket.socket(self.family) as sock: - try: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - except OSError: - unittest.skip("SO_REUSEADDR not supported on this platform") - else: - # if SO_REUSEADDR succeeded for sock we expect asyncore - # to do the same - s = asyncore.dispatcher(socket.socket(self.family)) - self.assertFalse(s.socket.getsockopt(socket.SOL_SOCKET, - socket.SO_REUSEADDR)) - s.socket.close() - s.create_socket(self.family) - s.set_reuse_addr() - self.assertTrue(s.socket.getsockopt(socket.SOL_SOCKET, - socket.SO_REUSEADDR)) - - @threading_helper.reap_threads - def test_quick_connect(self): - # see: http://bugs.python.org/issue10340 - if self.family not in (socket.AF_INET, getattr(socket, "AF_INET6", object())): - self.skipTest("test specific to AF_INET and AF_INET6") - - server = BaseServer(self.family, self.addr) - # run the thread 500 ms: the socket should be connected in 200 ms - t = threading.Thread(target=lambda: asyncore.loop(timeout=0.1, - count=5)) - t.start() - try: - with socket.socket(self.family, socket.SOCK_STREAM) as s: - s.settimeout(.2) - s.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, - struct.pack('ii', 1, 0)) - - try: - s.connect(server.address) - except OSError: - pass - finally: - threading_helper.join_thread(t) - -class TestAPI_UseIPv4Sockets(BaseTestAPI): - family = socket.AF_INET - addr = (socket_helper.HOST, 0) - -@unittest.skipUnless(socket_helper.IPV6_ENABLED, 'IPv6 support required') -class TestAPI_UseIPv6Sockets(BaseTestAPI): - family = socket.AF_INET6 - addr = (socket_helper.HOSTv6, 0) - -@unittest.skipUnless(HAS_UNIX_SOCKETS, 'Unix sockets required') -class TestAPI_UseUnixSockets(BaseTestAPI): - if HAS_UNIX_SOCKETS: - family = socket.AF_UNIX - addr = os_helper.TESTFN - - def tearDown(self): - os_helper.unlink(self.addr) - BaseTestAPI.tearDown(self) - -class TestAPI_UseIPv4Select(TestAPI_UseIPv4Sockets, unittest.TestCase): - use_poll = False - -@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required') -class TestAPI_UseIPv4Poll(TestAPI_UseIPv4Sockets, unittest.TestCase): - use_poll = True - -class TestAPI_UseIPv6Select(TestAPI_UseIPv6Sockets, unittest.TestCase): - use_poll = False - -@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required') -class TestAPI_UseIPv6Poll(TestAPI_UseIPv6Sockets, unittest.TestCase): - use_poll = True - -class TestAPI_UseUnixSocketsSelect(TestAPI_UseUnixSockets, unittest.TestCase): - use_poll = False - -@unittest.skipUnless(hasattr(select, 'poll'), 'select.poll required') -class TestAPI_UseUnixSocketsPoll(TestAPI_UseUnixSockets, unittest.TestCase): - use_poll = True - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_audit.py b/Lib/test/test_audit.py index 09b3333afe184f..70f8a77a4761a7 100644 --- a/Lib/test/test_audit.py +++ b/Lib/test/test_audit.py @@ -16,6 +16,7 @@ class AuditTest(unittest.TestCase): + maxDiff = None @support.requires_subprocess() def do_test(self, *args): @@ -186,6 +187,22 @@ def test_sys_getframe(self): self.assertEqual(actual, expected) + def test_threading(self): + returncode, events, stderr = self.run_python("test_threading") + if returncode: + self.fail(stderr) + + if support.verbose: + print(*events, sep='\n') + actual = [(ev[0], ev[2]) for ev in events] + expected = [ + ("_thread.start_new_thread", "(, (), None)"), + ("test.test_func", "()"), + ] + + self.assertEqual(actual, expected) + + def test_wmi_exec_query(self): import_helper.import_module("_wmi") returncode, events, stderr = self.run_python("test_wmi_exec_query") @@ -199,6 +216,34 @@ def test_wmi_exec_query(self): self.assertEqual(actual, expected) + def test_syslog(self): + syslog = import_helper.import_module("syslog") + + returncode, events, stderr = self.run_python("test_syslog") + if returncode: + self.fail(stderr) + + if support.verbose: + print('Events:', *events, sep='\n ') + + self.assertSequenceEqual( + events, + [('syslog.openlog', ' ', f'python 0 {syslog.LOG_USER}'), + ('syslog.syslog', ' ', f'{syslog.LOG_INFO} test'), + ('syslog.setlogmask', ' ', f'{syslog.LOG_DEBUG}'), + ('syslog.closelog', '', ''), + ('syslog.syslog', ' ', f'{syslog.LOG_INFO} test2'), + ('syslog.openlog', ' ', f'audit-tests.py 0 {syslog.LOG_USER}'), + ('syslog.openlog', ' ', f'audit-tests.py {syslog.LOG_NDELAY} {syslog.LOG_LOCAL0}'), + ('syslog.openlog', ' ', f'None 0 {syslog.LOG_USER}'), + ('syslog.closelog', '', '')] + ) + + def test_not_in_gc(self): + returncode, _, stderr = self.run_python("test_not_in_gc") + if returncode: + self.fail(stderr) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 31e50638d4a7c1..eb1c389257cc4b 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -159,7 +159,7 @@ def test_import(self): __import__('string') __import__(name='sys') __import__(name='time', level=0) - self.assertRaises(ImportError, __import__, 'spamspam') + self.assertRaises(ModuleNotFoundError, __import__, 'spamspam') self.assertRaises(TypeError, __import__, 1, 2, 3, 4) self.assertRaises(ValueError, __import__, '') self.assertRaises(TypeError, __import__, 'sys', name='sys') @@ -736,6 +736,7 @@ def test_exec_globals(self): self.assertRaises(TypeError, exec, code, {'__builtins__': 123}) + def test_exec_globals_frozen(self): class frozendict_error(Exception): pass @@ -767,6 +768,36 @@ def __setitem__(self, key, value): self.assertRaises(frozendict_error, exec, code, namespace) + def test_exec_globals_error_on_get(self): + # custom `globals` or `builtins` can raise errors on item access + class setonlyerror(Exception): + pass + + class setonlydict(dict): + def __getitem__(self, key): + raise setonlyerror + + # globals' `__getitem__` raises + code = compile("globalname", "test", "exec") + self.assertRaises(setonlyerror, + exec, code, setonlydict({'globalname': 1})) + + # builtins' `__getitem__` raises + code = compile("superglobal", "test", "exec") + self.assertRaises(setonlyerror, exec, code, + {'__builtins__': setonlydict({'superglobal': 1})}) + + def test_exec_globals_dict_subclass(self): + class customdict(dict): # this one should not do anything fancy + pass + + code = compile("superglobal", "test", "exec") + # works correctly + exec(code, {'__builtins__': customdict({'superglobal': 1})}) + # custom builtins dict subclass is missing key + self.assertRaisesRegex(NameError, "name 'superglobal' is not defined", + exec, code, {'__builtins__': customdict()}) + def test_exec_redirected(self): savestdout = sys.stdout sys.stdout = None # Whatever that cannot flush() @@ -2349,7 +2380,7 @@ def test_type_name(self): self.assertEqual(A.__module__, __name__) with self.assertRaises(ValueError): type('A\x00B', (), {}) - with self.assertRaises(ValueError): + with self.assertRaises(UnicodeEncodeError): type('A\udcdcB', (), {}) with self.assertRaises(TypeError): type(b'A', (), {}) @@ -2366,7 +2397,7 @@ def test_type_name(self): with self.assertRaises(ValueError): A.__name__ = 'A\x00B' self.assertEqual(A.__name__, 'C') - with self.assertRaises(ValueError): + with self.assertRaises(UnicodeEncodeError): A.__name__ = 'A\udcdcB' self.assertEqual(A.__name__, 'C') with self.assertRaises(TypeError): diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py index 53ba1ad6f5911f..7c62b722059d12 100644 --- a/Lib/test/test_bytes.py +++ b/Lib/test/test_bytes.py @@ -1225,6 +1225,8 @@ class SubBytes(bytes): class ByteArrayTest(BaseBytesTest, unittest.TestCase): type2test = bytearray + _testcapi = import_helper.import_module('_testcapi') + def test_getitem_error(self): b = bytearray(b'python') msg = "bytearray indices must be integers or slices" @@ -1317,47 +1319,73 @@ def by(s): self.assertEqual(re.findall(br"\w+", b), [by("Hello"), by("world")]) def test_setitem(self): - b = bytearray([1, 2, 3]) - b[1] = 100 - self.assertEqual(b, bytearray([1, 100, 3])) - b[-1] = 200 - self.assertEqual(b, bytearray([1, 100, 200])) - b[0] = Indexable(10) - self.assertEqual(b, bytearray([10, 100, 200])) - try: - b[3] = 0 - self.fail("Didn't raise IndexError") - except IndexError: - pass - try: - b[-10] = 0 - self.fail("Didn't raise IndexError") - except IndexError: - pass - try: - b[0] = 256 - self.fail("Didn't raise ValueError") - except ValueError: - pass - try: - b[0] = Indexable(-1) - self.fail("Didn't raise ValueError") - except ValueError: - pass - try: - b[0] = None - self.fail("Didn't raise TypeError") - except TypeError: - pass + def setitem_as_mapping(b, i, val): + b[i] = val + + def setitem_as_sequence(b, i, val): + self._testcapi.sequence_setitem(b, i, val) + + def do_tests(setitem): + b = bytearray([1, 2, 3]) + setitem(b, 1, 100) + self.assertEqual(b, bytearray([1, 100, 3])) + setitem(b, -1, 200) + self.assertEqual(b, bytearray([1, 100, 200])) + setitem(b, 0, Indexable(10)) + self.assertEqual(b, bytearray([10, 100, 200])) + try: + setitem(b, 3, 0) + self.fail("Didn't raise IndexError") + except IndexError: + pass + try: + setitem(b, -10, 0) + self.fail("Didn't raise IndexError") + except IndexError: + pass + try: + setitem(b, 0, 256) + self.fail("Didn't raise ValueError") + except ValueError: + pass + try: + setitem(b, 0, Indexable(-1)) + self.fail("Didn't raise ValueError") + except ValueError: + pass + try: + setitem(b, 0, None) + self.fail("Didn't raise TypeError") + except TypeError: + pass + + with self.subTest("tp_as_mapping"): + do_tests(setitem_as_mapping) + + with self.subTest("tp_as_sequence"): + do_tests(setitem_as_sequence) def test_delitem(self): - b = bytearray(range(10)) - del b[0] - self.assertEqual(b, bytearray(range(1, 10))) - del b[-1] - self.assertEqual(b, bytearray(range(1, 9))) - del b[4] - self.assertEqual(b, bytearray([1, 2, 3, 4, 6, 7, 8])) + def del_as_mapping(b, i): + del b[i] + + def del_as_sequence(b, i): + self._testcapi.sequence_delitem(b, i) + + def do_tests(delete): + b = bytearray(range(10)) + delete(b, 0) + self.assertEqual(b, bytearray(range(1, 10))) + delete(b, -1) + self.assertEqual(b, bytearray(range(1, 9))) + delete(b, 4) + self.assertEqual(b, bytearray([1, 2, 3, 4, 6, 7, 8])) + + with self.subTest("tp_as_mapping"): + do_tests(del_as_mapping) + + with self.subTest("tp_as_sequence"): + do_tests(del_as_sequence) def test_setslice(self): b = bytearray(range(10)) @@ -1729,6 +1757,8 @@ def test_repeat_after_setslice(self): self.assertEqual(b3, b'xcxcxc') def test_mutating_index(self): + # See gh-91153 + class Boom: def __index__(self): b.clear() @@ -1740,10 +1770,9 @@ def __index__(self): b[0] = Boom() with self.subTest("tp_as_sequence"): - _testcapi = import_helper.import_module('_testcapi') b = bytearray(b'Now you see me...') with self.assertRaises(IndexError): - _testcapi.sequence_setitem(b, 0, Boom()) + self._testcapi.sequence_setitem(b, 0, Boom()) class AssortedBytesTest(unittest.TestCase): diff --git a/Lib/test/test_call.py b/Lib/test/test_call.py index 1f3307f822a5fc..f148b5ebbc5a9b 100644 --- a/Lib/test/test_call.py +++ b/Lib/test/test_call.py @@ -140,9 +140,9 @@ def test_varargs14_kw(self): itertools.product, 0, repeat=1, foo=2) def test_varargs15_kw(self): - msg = r"^ImportError\(\) takes at most 2 keyword arguments \(3 given\)$" + msg = r"^ImportError\(\) takes at most 3 keyword arguments \(4 given\)$" self.assertRaisesRegex(TypeError, msg, - ImportError, 0, name=1, path=2, foo=3) + ImportError, 0, name=1, path=2, name_from=3, foo=3) def test_varargs16_kw(self): msg = r"^min\(\) takes at most 2 keyword arguments \(3 given\)$" @@ -580,7 +580,7 @@ def testfunction_kw(self, *, kw): return self -QUICKENING_WARMUP_DELAY = 8 +ADAPTIVE_WARMUP_DELAY = 2 class TestPEP590(unittest.TestCase): @@ -771,7 +771,7 @@ def f(num): return num + 1 assert_equal(11, f(num)) function_setvectorcall(f) # make sure specializer is triggered by running > 50 times - for _ in range(10 * QUICKENING_WARMUP_DELAY): + for _ in range(10 * ADAPTIVE_WARMUP_DELAY): assert_equal("overridden", f(num)) def test_setvectorcall_load_attr_specialization_skip(self): @@ -787,7 +787,7 @@ def __getattribute__(self, attr): function_setvectorcall(X.__getattribute__) # make sure specialization doesn't trigger # when vectorcall is overridden - for _ in range(QUICKENING_WARMUP_DELAY): + for _ in range(ADAPTIVE_WARMUP_DELAY): assert_equal("overridden", x.a) def test_setvectorcall_load_attr_specialization_deopt(self): @@ -803,20 +803,52 @@ def get_a(x): assert_equal = self.assertEqual x = X() # trigger LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN specialization - for _ in range(QUICKENING_WARMUP_DELAY): + for _ in range(ADAPTIVE_WARMUP_DELAY): assert_equal("a", get_a(x)) function_setvectorcall(X.__getattribute__) # make sure specialized LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN # gets deopted due to overridden vectorcall - for _ in range(QUICKENING_WARMUP_DELAY): + for _ in range(ADAPTIVE_WARMUP_DELAY): assert_equal("overridden", get_a(x)) @requires_limited_api - def test_vectorcall_limited(self): + def test_vectorcall_limited_incoming(self): from _testcapi import pyobject_vectorcall obj = _testcapi.LimitedVectorCallClass() self.assertEqual(pyobject_vectorcall(obj, (), ()), "vectorcall called") + @requires_limited_api + def test_vectorcall_limited_outgoing(self): + from _testcapi import call_vectorcall + + args_captured = [] + kwargs_captured = [] + + def f(*args, **kwargs): + args_captured.append(args) + kwargs_captured.append(kwargs) + return "success" + + self.assertEqual(call_vectorcall(f), "success") + self.assertEqual(args_captured, [("foo",)]) + self.assertEqual(kwargs_captured, [{"baz": "bar"}]) + + @requires_limited_api + def test_vectorcall_limited_outgoing_method(self): + from _testcapi import call_vectorcall_method + + args_captured = [] + kwargs_captured = [] + + class TestInstance: + def f(self, *args, **kwargs): + args_captured.append(args) + kwargs_captured.append(kwargs) + return "success" + + self.assertEqual(call_vectorcall_method(TestInstance()), "success") + self.assertEqual(args_captured, [("foo",)]) + self.assertEqual(kwargs_captured, [{"baz": "bar"}]) class A: def method_two_args(self, x, y): diff --git a/Lib/test/test_capi/__init__.py b/Lib/test/test_capi/__init__.py new file mode 100644 index 00000000000000..4b16ecc31156a5 --- /dev/null +++ b/Lib/test/test_capi/__init__.py @@ -0,0 +1,5 @@ +import os +from test.support import load_package_tests + +def load_tests(*args): + return load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_capi/__main__.py b/Lib/test/test_capi/__main__.py new file mode 100644 index 00000000000000..05d01775ddf43c --- /dev/null +++ b/Lib/test/test_capi/__main__.py @@ -0,0 +1,3 @@ +import unittest + +unittest.main('test.test_capi') diff --git a/Lib/test/test_getargs2.py b/Lib/test/test_capi/test_getargs.py similarity index 100% rename from Lib/test/test_getargs2.py rename to Lib/test/test_capi/test_getargs.py diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi/test_misc.py similarity index 81% rename from Lib/test/test_capi.py rename to Lib/test/test_capi/test_misc.py index 2c6fe34d3b788c..1d30adaee9218f 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi/test_misc.py @@ -402,6 +402,98 @@ def items(self): self.assertRaises(TypeError, _testcapi.get_mapping_values, bad_mapping) self.assertRaises(TypeError, _testcapi.get_mapping_items, bad_mapping) + def test_mapping_has_key(self): + dct = {'a': 1} + self.assertTrue(_testcapi.mapping_has_key(dct, 'a')) + self.assertFalse(_testcapi.mapping_has_key(dct, 'b')) + + class SubDict(dict): + pass + + dct2 = SubDict({'a': 1}) + self.assertTrue(_testcapi.mapping_has_key(dct2, 'a')) + self.assertFalse(_testcapi.mapping_has_key(dct2, 'b')) + + def test_sequence_set_slice(self): + # Correct case: + data = [1, 2, 3, 4, 5] + data_copy = data.copy() + + _testcapi.sequence_set_slice(data, 1, 3, [8, 9]) + data_copy[1:3] = [8, 9] + self.assertEqual(data, data_copy) + self.assertEqual(data, [1, 8, 9, 4, 5]) + + # Custom class: + class Custom: + def __setitem__(self, index, value): + self.index = index + self.value = value + + c = Custom() + _testcapi.sequence_set_slice(c, 0, 5, 'abc') + self.assertEqual(c.index, slice(0, 5)) + self.assertEqual(c.value, 'abc') + + # Immutable sequences must raise: + bad_seq1 = (1, 2, 3, 4) + with self.assertRaises(TypeError): + _testcapi.sequence_set_slice(bad_seq1, 1, 3, (8, 9)) + self.assertEqual(bad_seq1, (1, 2, 3, 4)) + + bad_seq2 = 'abcd' + with self.assertRaises(TypeError): + _testcapi.sequence_set_slice(bad_seq2, 1, 3, 'xy') + self.assertEqual(bad_seq2, 'abcd') + + # Not a sequence: + with self.assertRaises(TypeError): + _testcapi.sequence_set_slice(None, 1, 3, 'xy') + + mapping = {1: 'a', 2: 'b', 3: 'c'} + with self.assertRaises(TypeError): + _testcapi.sequence_set_slice(mapping, 1, 3, 'xy') + self.assertEqual(mapping, {1: 'a', 2: 'b', 3: 'c'}) + + def test_sequence_del_slice(self): + # Correct case: + data = [1, 2, 3, 4, 5] + data_copy = data.copy() + + _testcapi.sequence_del_slice(data, 1, 3) + del data_copy[1:3] + self.assertEqual(data, data_copy) + self.assertEqual(data, [1, 4, 5]) + + # Custom class: + class Custom: + def __delitem__(self, index): + self.index = index + + c = Custom() + _testcapi.sequence_del_slice(c, 0, 5) + self.assertEqual(c.index, slice(0, 5)) + + # Immutable sequences must raise: + bad_seq1 = (1, 2, 3, 4) + with self.assertRaises(TypeError): + _testcapi.sequence_del_slice(bad_seq1, 1, 3) + self.assertEqual(bad_seq1, (1, 2, 3, 4)) + + bad_seq2 = 'abcd' + with self.assertRaises(TypeError): + _testcapi.sequence_del_slice(bad_seq2, 1, 3) + self.assertEqual(bad_seq2, 'abcd') + + # Not a sequence: + with self.assertRaises(TypeError): + _testcapi.sequence_del_slice(None, 1, 3) + + mapping = {1: 'a', 2: 'b', 3: 'c'} + with self.assertRaises(TypeError): + _testcapi.sequence_del_slice(mapping, 1, 3) + self.assertEqual(mapping, {1: 'a', 2: 'b', 3: 'c'}) + @unittest.skipUnless(hasattr(_testcapi, 'negative_refcount'), 'need _testcapi.negative_refcount') def test_negative_refcount(self): @@ -878,6 +970,205 @@ def __init__(self): _testcapi.clear_managed_dict(c) self.assertEqual(c.__dict__, {}) + def test_eval_get_func_name(self): + def function_example(): ... + + class A: + def method_example(self): ... + + self.assertEqual(_testcapi.eval_get_func_name(function_example), + "function_example") + self.assertEqual(_testcapi.eval_get_func_name(A.method_example), + "method_example") + self.assertEqual(_testcapi.eval_get_func_name(A().method_example), + "method_example") + self.assertEqual(_testcapi.eval_get_func_name(sum), "sum") # c function + self.assertEqual(_testcapi.eval_get_func_name(A), "type") + + def test_eval_get_func_desc(self): + def function_example(): ... + + class A: + def method_example(self): ... + + self.assertEqual(_testcapi.eval_get_func_desc(function_example), + "()") + self.assertEqual(_testcapi.eval_get_func_desc(A.method_example), + "()") + self.assertEqual(_testcapi.eval_get_func_desc(A().method_example), + "()") + self.assertEqual(_testcapi.eval_get_func_desc(sum), "()") # c function + self.assertEqual(_testcapi.eval_get_func_desc(A), " object") + + def test_function_get_code(self): + import types + + def some(): + pass + + code = _testcapi.function_get_code(some) + self.assertIsInstance(code, types.CodeType) + self.assertEqual(code, some.__code__) + + with self.assertRaises(SystemError): + _testcapi.function_get_code(None) # not a function + + def test_function_get_globals(self): + def some(): + pass + + globals_ = _testcapi.function_get_globals(some) + self.assertIsInstance(globals_, dict) + self.assertEqual(globals_, some.__globals__) + + with self.assertRaises(SystemError): + _testcapi.function_get_globals(None) # not a function + + def test_function_get_module(self): + def some(): + pass + + module = _testcapi.function_get_module(some) + self.assertIsInstance(module, str) + self.assertEqual(module, some.__module__) + + with self.assertRaises(SystemError): + _testcapi.function_get_module(None) # not a function + + def test_function_get_defaults(self): + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + defaults = _testcapi.function_get_defaults(some) + self.assertEqual(defaults, ('p', 0, None)) + self.assertEqual(defaults, some.__defaults__) + + with self.assertRaises(SystemError): + _testcapi.function_get_defaults(None) # not a function + + def test_function_set_defaults(self): + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + old_defaults = ('p', 0, None) + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_defaults(some, 1) # not tuple or None + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_defaults(1, ()) # not a function + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + + new_defaults = ('q', 1, None) + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + + # Empty tuple is fine: + new_defaults = () + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + + class tuplesub(tuple): ... # tuple subclasses must work + + new_defaults = tuplesub(((1, 2), ['a', 'b'], None)) + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + + # `None` is special, it sets `defaults` to `NULL`, + # it needs special handling in `_testcapi`: + _testcapi.function_set_defaults(some, None) + self.assertEqual(_testcapi.function_get_defaults(some), None) + self.assertEqual(some.__defaults__, None) + + def test_function_get_kw_defaults(self): + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + defaults = _testcapi.function_get_kw_defaults(some) + self.assertEqual(defaults, {'kw2': True}) + self.assertEqual(defaults, some.__kwdefaults__) + + with self.assertRaises(SystemError): + _testcapi.function_get_kw_defaults(None) # not a function + + def test_function_set_kw_defaults(self): + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + old_defaults = {'kw2': True} + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_kw_defaults(some, 1) # not dict or None + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_kw_defaults(1, {}) # not a function + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + new_defaults = {'kw2': (1, 2, 3)} + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + # Empty dict is fine: + new_defaults = {} + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + class dictsub(dict): ... # dict subclasses must work + + new_defaults = dictsub({'kw2': None}) + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + # `None` is special, it sets `kwdefaults` to `NULL`, + # it needs special handling in `_testcapi`: + _testcapi.function_set_kw_defaults(some, None) + self.assertEqual(_testcapi.function_get_kw_defaults(some), None) + self.assertEqual(some.__kwdefaults__, None) + class TestPendingCalls(unittest.TestCase): @@ -1032,6 +1323,46 @@ def test_py_config_isoloated_per_interpreter(self): # test fails, assume that the environment in this process may # be altered and suspect. + @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") + def test_configured_settings(self): + """ + The config with which an interpreter is created corresponds + 1-to-1 with the new interpreter's settings. This test verifies + that they match. + """ + import json + + THREADS = 1<<10 + DAEMON_THREADS = 1<<11 + FORK = 1<<15 + EXEC = 1<<16 + + features = ['fork', 'exec', 'threads', 'daemon_threads'] + kwlist = [f'allow_{n}' for n in features] + for config, expected in { + (True, True, True, True): FORK | EXEC | THREADS | DAEMON_THREADS, + (False, False, False, False): 0, + (False, False, True, False): THREADS, + }.items(): + kwargs = dict(zip(kwlist, config)) + expected = { + 'feature_flags': expected, + } + with self.subTest(config): + r, w = os.pipe() + script = textwrap.dedent(f''' + import _testinternalcapi, json, os + settings = _testinternalcapi.get_interp_settings() + with os.fdopen({w}, "w") as stdin: + json.dump(settings, stdin) + ''') + with os.fdopen(r) as stdout: + support.run_in_subinterp_with_config(script, **kwargs) + out = stdout.read() + settings = json.loads(out) + + self.assertEqual(settings, expected) + def test_mutate_exception(self): """ Exceptions saved in global module state get shared between @@ -1344,27 +1675,6 @@ class Subclass(BaseException, self.module.StateAccessType): self.assertIs(Subclass().get_defining_module(), self.module) -class Test_FrameAPI(unittest.TestCase): - - def getframe(self): - return sys._getframe() - - def getgenframe(self): - yield sys._getframe() - - def test_frame_getters(self): - frame = self.getframe() - self.assertEqual(frame.f_locals, _testcapi.frame_getlocals(frame)) - self.assertIs(frame.f_globals, _testcapi.frame_getglobals(frame)) - self.assertIs(frame.f_builtins, _testcapi.frame_getbuiltins(frame)) - self.assertEqual(frame.f_lasti, _testcapi.frame_getlasti(frame)) - - def test_frame_get_generator(self): - gen = self.getgenframe() - frame = next(gen) - self.assertIs(gen, _testcapi.frame_getgenerator(frame)) - - SUFFICIENT_TO_DEOPT_AND_SPECIALIZE = 100 class Test_Pep523API(unittest.TestCase): diff --git a/Lib/test/test_structmembers.py b/Lib/test/test_capi/test_structmembers.py similarity index 66% rename from Lib/test/test_structmembers.py rename to Lib/test/test_capi/test_structmembers.py index 07d2f623f7156e..2cf46b203478dc 100644 --- a/Lib/test/test_structmembers.py +++ b/Lib/test/test_capi/test_structmembers.py @@ -4,32 +4,42 @@ # Skip this test if the _testcapi module isn't available. import_helper.import_module('_testcapi') -from _testcapi import _test_structmembersType, \ - CHAR_MAX, CHAR_MIN, UCHAR_MAX, \ - SHRT_MAX, SHRT_MIN, USHRT_MAX, \ - INT_MAX, INT_MIN, UINT_MAX, \ - LONG_MAX, LONG_MIN, ULONG_MAX, \ - LLONG_MAX, LLONG_MIN, ULLONG_MAX, \ - PY_SSIZE_T_MAX, PY_SSIZE_T_MIN - -ts=_test_structmembersType(False, # T_BOOL - 1, # T_BYTE - 2, # T_UBYTE - 3, # T_SHORT - 4, # T_USHORT - 5, # T_INT - 6, # T_UINT - 7, # T_LONG - 8, # T_ULONG - 23, # T_PYSSIZET - 9.99999,# T_FLOAT - 10.1010101010, # T_DOUBLE - "hi" # T_STRING_INPLACE - ) - -class ReadWriteTests(unittest.TestCase): +from _testcapi import (_test_structmembersType_OldAPI, + _test_structmembersType_NewAPI, + CHAR_MAX, CHAR_MIN, UCHAR_MAX, + SHRT_MAX, SHRT_MIN, USHRT_MAX, + INT_MAX, INT_MIN, UINT_MAX, + LONG_MAX, LONG_MIN, ULONG_MAX, + LLONG_MAX, LLONG_MIN, ULLONG_MAX, + PY_SSIZE_T_MAX, PY_SSIZE_T_MIN, + ) + +# There are two classes: one using and another using +# `Py_`-prefixed API. They should behave the same in Python + +def _make_test_object(cls): + return cls(False, # T_BOOL + 1, # T_BYTE + 2, # T_UBYTE + 3, # T_SHORT + 4, # T_USHORT + 5, # T_INT + 6, # T_UINT + 7, # T_LONG + 8, # T_ULONG + 23, # T_PYSSIZET + 9.99999,# T_FLOAT + 10.1010101010, # T_DOUBLE + "hi", # T_STRING_INPLACE + ) + + +class ReadWriteTests: + def setUp(self): + self.ts = _make_test_object(self.cls) def test_bool(self): + ts = self.ts ts.T_BOOL = True self.assertEqual(ts.T_BOOL, True) ts.T_BOOL = False @@ -37,6 +47,7 @@ def test_bool(self): self.assertRaises(TypeError, setattr, ts, 'T_BOOL', 1) def test_byte(self): + ts = self.ts ts.T_BYTE = CHAR_MAX self.assertEqual(ts.T_BYTE, CHAR_MAX) ts.T_BYTE = CHAR_MIN @@ -45,6 +56,7 @@ def test_byte(self): self.assertEqual(ts.T_UBYTE, UCHAR_MAX) def test_short(self): + ts = self.ts ts.T_SHORT = SHRT_MAX self.assertEqual(ts.T_SHORT, SHRT_MAX) ts.T_SHORT = SHRT_MIN @@ -53,6 +65,7 @@ def test_short(self): self.assertEqual(ts.T_USHORT, USHRT_MAX) def test_int(self): + ts = self.ts ts.T_INT = INT_MAX self.assertEqual(ts.T_INT, INT_MAX) ts.T_INT = INT_MIN @@ -61,6 +74,7 @@ def test_int(self): self.assertEqual(ts.T_UINT, UINT_MAX) def test_long(self): + ts = self.ts ts.T_LONG = LONG_MAX self.assertEqual(ts.T_LONG, LONG_MAX) ts.T_LONG = LONG_MIN @@ -69,13 +83,17 @@ def test_long(self): self.assertEqual(ts.T_ULONG, ULONG_MAX) def test_py_ssize_t(self): + ts = self.ts ts.T_PYSSIZET = PY_SSIZE_T_MAX self.assertEqual(ts.T_PYSSIZET, PY_SSIZE_T_MAX) ts.T_PYSSIZET = PY_SSIZE_T_MIN self.assertEqual(ts.T_PYSSIZET, PY_SSIZE_T_MIN) - @unittest.skipUnless(hasattr(ts, "T_LONGLONG"), "long long not present") def test_longlong(self): + ts = self.ts + if not hasattr(ts, "T_LONGLONG"): + self.skipTest("long long not present") + ts.T_LONGLONG = LLONG_MAX self.assertEqual(ts.T_LONGLONG, LLONG_MAX) ts.T_LONGLONG = LLONG_MIN @@ -91,6 +109,7 @@ def test_longlong(self): self.assertEqual(ts.T_ULONGLONG, 4) def test_bad_assignments(self): + ts = self.ts integer_attributes = [ 'T_BOOL', 'T_BYTE', 'T_UBYTE', @@ -109,37 +128,57 @@ def test_bad_assignments(self): self.assertRaises(TypeError, setattr, ts, attr, nonint) def test_inplace_string(self): + ts = self.ts self.assertEqual(ts.T_STRING_INPLACE, "hi") self.assertRaises(TypeError, setattr, ts, "T_STRING_INPLACE", "s") self.assertRaises(TypeError, delattr, ts, "T_STRING_INPLACE") +class ReadWriteTests_OldAPI(ReadWriteTests, unittest.TestCase): + cls = _test_structmembersType_OldAPI + +class ReadWriteTests_NewAPI(ReadWriteTests, unittest.TestCase): + cls = _test_structmembersType_NewAPI -class TestWarnings(unittest.TestCase): +class TestWarnings: + def setUp(self): + self.ts = _make_test_object(self.cls) def test_byte_max(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_BYTE = CHAR_MAX+1 def test_byte_min(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_BYTE = CHAR_MIN-1 def test_ubyte_max(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_UBYTE = UCHAR_MAX+1 def test_short_max(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_SHORT = SHRT_MAX+1 def test_short_min(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_SHORT = SHRT_MIN-1 def test_ushort_max(self): + ts = self.ts with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_USHORT = USHRT_MAX+1 +class TestWarnings_OldAPI(TestWarnings, unittest.TestCase): + cls = _test_structmembersType_OldAPI + +class TestWarnings_NewAPI(TestWarnings, unittest.TestCase): + cls = _test_structmembersType_NewAPI + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_unicode.py b/Lib/test/test_capi/test_unicode.py new file mode 100644 index 00000000000000..07b77d3e04bbe0 --- /dev/null +++ b/Lib/test/test_capi/test_unicode.py @@ -0,0 +1,515 @@ +import unittest +import sys +from test import support +from test.support import import_helper + +try: + import _testcapi +except ImportError: + _testcapi = None + + +class CAPITest(unittest.TestCase): + + # Test PyUnicode_FromFormat() + def test_from_format(self): + import_helper.import_module('ctypes') + from ctypes import ( + c_char_p, + pythonapi, py_object, sizeof, + c_int, c_long, c_longlong, c_ssize_t, + c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p) + name = "PyUnicode_FromFormat" + _PyUnicode_FromFormat = getattr(pythonapi, name) + _PyUnicode_FromFormat.argtypes = (c_char_p,) + _PyUnicode_FromFormat.restype = py_object + + def PyUnicode_FromFormat(format, *args): + cargs = tuple( + py_object(arg) if isinstance(arg, str) else arg + for arg in args) + return _PyUnicode_FromFormat(format, *cargs) + + def check_format(expected, format, *args): + text = PyUnicode_FromFormat(format, *args) + self.assertEqual(expected, text) + + # ascii format, non-ascii argument + check_format('ascii\x7f=unicode\xe9', + b'ascii\x7f=%U', 'unicode\xe9') + + # non-ascii format, ascii argument: ensure that PyUnicode_FromFormatV() + # raises an error + self.assertRaisesRegex(ValueError, + r'^PyUnicode_FromFormatV\(\) expects an ASCII-encoded format ' + 'string, got a non-ASCII byte: 0xe9$', + PyUnicode_FromFormat, b'unicode\xe9=%s', 'ascii') + + # test "%c" + check_format('\uabcd', + b'%c', c_int(0xabcd)) + check_format('\U0010ffff', + b'%c', c_int(0x10ffff)) + with self.assertRaises(OverflowError): + PyUnicode_FromFormat(b'%c', c_int(0x110000)) + # Issue #18183 + check_format('\U00010000\U00100000', + b'%c%c', c_int(0x10000), c_int(0x100000)) + + # test "%" + check_format('%', + b'%%') + check_format('%s', + b'%%s') + check_format('[%]', + b'[%%]') + check_format('%abc', + b'%%%s', b'abc') + + # truncated string + check_format('abc', + b'%.3s', b'abcdef') + check_format('abc[\ufffd', + b'%.5s', 'abc[\u20ac]'.encode('utf8')) + check_format("'\\u20acABC'", + b'%A', '\u20acABC') + check_format("'\\u20", + b'%.5A', '\u20acABCDEF') + check_format("'\u20acABC'", + b'%R', '\u20acABC') + check_format("'\u20acA", + b'%.3R', '\u20acABCDEF') + check_format('\u20acAB', + b'%.3S', '\u20acABCDEF') + check_format('\u20acAB', + b'%.3U', '\u20acABCDEF') + check_format('\u20acAB', + b'%.3V', '\u20acABCDEF', None) + check_format('abc[\ufffd', + b'%.5V', None, 'abc[\u20ac]'.encode('utf8')) + + # following tests comes from #7330 + # test width modifier and precision modifier with %S + check_format("repr= abc", + b'repr=%5S', 'abc') + check_format("repr=ab", + b'repr=%.2S', 'abc') + check_format("repr= ab", + b'repr=%5.2S', 'abc') + + # test width modifier and precision modifier with %R + check_format("repr= 'abc'", + b'repr=%8R', 'abc') + check_format("repr='ab", + b'repr=%.3R', 'abc') + check_format("repr= 'ab", + b'repr=%5.3R', 'abc') + + # test width modifier and precision modifier with %A + check_format("repr= 'abc'", + b'repr=%8A', 'abc') + check_format("repr='ab", + b'repr=%.3A', 'abc') + check_format("repr= 'ab", + b'repr=%5.3A', 'abc') + + # test width modifier and precision modifier with %s + check_format("repr= abc", + b'repr=%5s', b'abc') + check_format("repr=ab", + b'repr=%.2s', b'abc') + check_format("repr= ab", + b'repr=%5.2s', b'abc') + + # test width modifier and precision modifier with %U + check_format("repr= abc", + b'repr=%5U', 'abc') + check_format("repr=ab", + b'repr=%.2U', 'abc') + check_format("repr= ab", + b'repr=%5.2U', 'abc') + + # test width modifier and precision modifier with %V + check_format("repr= abc", + b'repr=%5V', 'abc', b'123') + check_format("repr=ab", + b'repr=%.2V', 'abc', b'123') + check_format("repr= ab", + b'repr=%5.2V', 'abc', b'123') + check_format("repr= 123", + b'repr=%5V', None, b'123') + check_format("repr=12", + b'repr=%.2V', None, b'123') + check_format("repr= 12", + b'repr=%5.2V', None, b'123') + + # test integer formats (%i, %d, %u) + check_format('010', + b'%03i', c_int(10)) + check_format('0010', + b'%0.4i', c_int(10)) + check_format('-123', + b'%i', c_int(-123)) + check_format('-123', + b'%li', c_long(-123)) + check_format('-123', + b'%lli', c_longlong(-123)) + check_format('-123', + b'%zi', c_ssize_t(-123)) + + check_format('-123', + b'%d', c_int(-123)) + check_format('-123', + b'%ld', c_long(-123)) + check_format('-123', + b'%lld', c_longlong(-123)) + check_format('-123', + b'%zd', c_ssize_t(-123)) + + check_format('123', + b'%u', c_uint(123)) + check_format('123', + b'%lu', c_ulong(123)) + check_format('123', + b'%llu', c_ulonglong(123)) + check_format('123', + b'%zu', c_size_t(123)) + + # test long output + min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1)) + max_longlong = -min_longlong - 1 + check_format(str(min_longlong), + b'%lld', c_longlong(min_longlong)) + check_format(str(max_longlong), + b'%lld', c_longlong(max_longlong)) + max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1 + check_format(str(max_ulonglong), + b'%llu', c_ulonglong(max_ulonglong)) + PyUnicode_FromFormat(b'%p', c_void_p(-1)) + + # test padding (width and/or precision) + check_format('123'.rjust(10, '0'), + b'%010i', c_int(123)) + check_format('123'.rjust(100), + b'%100i', c_int(123)) + check_format('123'.rjust(100, '0'), + b'%.100i', c_int(123)) + check_format('123'.rjust(80, '0').rjust(100), + b'%100.80i', c_int(123)) + + check_format('123'.rjust(10, '0'), + b'%010u', c_uint(123)) + check_format('123'.rjust(100), + b'%100u', c_uint(123)) + check_format('123'.rjust(100, '0'), + b'%.100u', c_uint(123)) + check_format('123'.rjust(80, '0').rjust(100), + b'%100.80u', c_uint(123)) + + check_format('123'.rjust(10, '0'), + b'%010x', c_int(0x123)) + check_format('123'.rjust(100), + b'%100x', c_int(0x123)) + check_format('123'.rjust(100, '0'), + b'%.100x', c_int(0x123)) + check_format('123'.rjust(80, '0').rjust(100), + b'%100.80x', c_int(0x123)) + + # test %A + check_format(r"%A:'abc\xe9\uabcd\U0010ffff'", + b'%%A:%A', 'abc\xe9\uabcd\U0010ffff') + + # test %V + check_format('repr=abc', + b'repr=%V', 'abc', b'xyz') + + # test %p + # We cannot test the exact result, + # because it returns a hex representation of a C pointer, + # which is going to be different each time. But, we can test the format. + p_format_regex = r'^0x[a-zA-Z0-9]{3,}$' + p_format1 = PyUnicode_FromFormat(b'%p', 'abc') + self.assertIsInstance(p_format1, str) + self.assertRegex(p_format1, p_format_regex) + + p_format2 = PyUnicode_FromFormat(b'%p %p', '123456', b'xyz') + self.assertIsInstance(p_format2, str) + self.assertRegex(p_format2, + r'0x[a-zA-Z0-9]{3,} 0x[a-zA-Z0-9]{3,}') + + # Extra args are ignored: + p_format3 = PyUnicode_FromFormat(b'%p', '123456', None, b'xyz') + self.assertIsInstance(p_format3, str) + self.assertRegex(p_format3, p_format_regex) + + # Test string decode from parameter of %s using utf-8. + # b'\xe4\xba\xba\xe6\xb0\x91' is utf-8 encoded byte sequence of + # '\u4eba\u6c11' + check_format('repr=\u4eba\u6c11', + b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91') + + #Test replace error handler. + check_format('repr=abc\ufffd', + b'repr=%V', None, b'abc\xff') + + # Issue #33817: empty strings + check_format('', + b'') + check_format('', + b'%s', b'') + + # check for crashes + for fmt in (b'%', b'%0', b'%01', b'%.', b'%.1', + b'%0%s', b'%1%s', b'%.%s', b'%.1%s', b'%1abc', + b'%l', b'%ll', b'%z', b'%ls', b'%lls', b'%zs'): + with self.subTest(fmt=fmt): + self.assertRaisesRegex(SystemError, 'invalid format string', + PyUnicode_FromFormat, fmt, b'abc') + self.assertRaisesRegex(SystemError, 'invalid format string', + PyUnicode_FromFormat, b'%+i', c_int(10)) + + # Test PyUnicode_AsWideChar() + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_aswidechar(self): + from _testcapi import unicode_aswidechar + import_helper.import_module('ctypes') + from ctypes import c_wchar, sizeof + + wchar, size = unicode_aswidechar('abcdef', 2) + self.assertEqual(size, 2) + self.assertEqual(wchar, 'ab') + + wchar, size = unicode_aswidechar('abc', 3) + self.assertEqual(size, 3) + self.assertEqual(wchar, 'abc') + + wchar, size = unicode_aswidechar('abc', 4) + self.assertEqual(size, 3) + self.assertEqual(wchar, 'abc\0') + + wchar, size = unicode_aswidechar('abc', 10) + self.assertEqual(size, 3) + self.assertEqual(wchar, 'abc\0') + + wchar, size = unicode_aswidechar('abc\0def', 20) + self.assertEqual(size, 7) + self.assertEqual(wchar, 'abc\0def\0') + + nonbmp = chr(0x10ffff) + if sizeof(c_wchar) == 2: + buflen = 3 + nchar = 2 + else: # sizeof(c_wchar) == 4 + buflen = 2 + nchar = 1 + wchar, size = unicode_aswidechar(nonbmp, buflen) + self.assertEqual(size, nchar) + self.assertEqual(wchar, nonbmp + '\0') + + # Test PyUnicode_AsWideCharString() + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_aswidecharstring(self): + from _testcapi import unicode_aswidecharstring + import_helper.import_module('ctypes') + from ctypes import c_wchar, sizeof + + wchar, size = unicode_aswidecharstring('abc') + self.assertEqual(size, 3) + self.assertEqual(wchar, 'abc\0') + + wchar, size = unicode_aswidecharstring('abc\0def') + self.assertEqual(size, 7) + self.assertEqual(wchar, 'abc\0def\0') + + nonbmp = chr(0x10ffff) + if sizeof(c_wchar) == 2: + nchar = 2 + else: # sizeof(c_wchar) == 4 + nchar = 1 + wchar, size = unicode_aswidecharstring(nonbmp) + self.assertEqual(size, nchar) + self.assertEqual(wchar, nonbmp + '\0') + + # Test PyUnicode_AsUCS4() + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_asucs4(self): + from _testcapi import unicode_asucs4 + for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', + 'a\ud800b\udfffc', '\ud834\udd1e']: + l = len(s) + self.assertEqual(unicode_asucs4(s, l, True), s+'\0') + self.assertEqual(unicode_asucs4(s, l, False), s+'\uffff') + self.assertEqual(unicode_asucs4(s, l+1, True), s+'\0\uffff') + self.assertEqual(unicode_asucs4(s, l+1, False), s+'\0\uffff') + self.assertRaises(SystemError, unicode_asucs4, s, l-1, True) + self.assertRaises(SystemError, unicode_asucs4, s, l-2, False) + s = '\0'.join([s, s]) + self.assertEqual(unicode_asucs4(s, len(s), True), s+'\0') + self.assertEqual(unicode_asucs4(s, len(s), False), s+'\uffff') + + # Test PyUnicode_AsUTF8() + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_asutf8(self): + from _testcapi import unicode_asutf8 + + bmp = '\u0100' + bmp2 = '\uffff' + nonbmp = chr(0x10ffff) + + self.assertEqual(unicode_asutf8(bmp), b'\xc4\x80') + self.assertEqual(unicode_asutf8(bmp2), b'\xef\xbf\xbf') + self.assertEqual(unicode_asutf8(nonbmp), b'\xf4\x8f\xbf\xbf') + self.assertRaises(UnicodeEncodeError, unicode_asutf8, 'a\ud800b\udfffc') + + # Test PyUnicode_AsUTF8AndSize() + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_asutf8andsize(self): + from _testcapi import unicode_asutf8andsize + + bmp = '\u0100' + bmp2 = '\uffff' + nonbmp = chr(0x10ffff) + + self.assertEqual(unicode_asutf8andsize(bmp), (b'\xc4\x80', 2)) + self.assertEqual(unicode_asutf8andsize(bmp2), (b'\xef\xbf\xbf', 3)) + self.assertEqual(unicode_asutf8andsize(nonbmp), (b'\xf4\x8f\xbf\xbf', 4)) + self.assertRaises(UnicodeEncodeError, unicode_asutf8andsize, 'a\ud800b\udfffc') + + # Test PyUnicode_Count() + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_count(self): + from _testcapi import unicode_count + + st = 'abcabd' + self.assertEqual(unicode_count(st, 'a', 0, len(st)), 2) + self.assertEqual(unicode_count(st, 'ab', 0, len(st)), 2) + self.assertEqual(unicode_count(st, 'abc', 0, len(st)), 1) + self.assertEqual(unicode_count(st, 'а', 0, len(st)), 0) # cyrillic "a" + # start < end + self.assertEqual(unicode_count(st, 'a', 3, len(st)), 1) + self.assertEqual(unicode_count(st, 'a', 4, len(st)), 0) + self.assertEqual(unicode_count(st, 'a', 0, sys.maxsize), 2) + # start >= end + self.assertEqual(unicode_count(st, 'abc', 0, 0), 0) + self.assertEqual(unicode_count(st, 'a', 3, 2), 0) + self.assertEqual(unicode_count(st, 'a', sys.maxsize, 5), 0) + # negative + self.assertEqual(unicode_count(st, 'ab', -len(st), -1), 2) + self.assertEqual(unicode_count(st, 'a', -len(st), -3), 1) + # wrong args + self.assertRaises(TypeError, unicode_count, 'a', 'a') + self.assertRaises(TypeError, unicode_count, 'a', 'a', 1) + self.assertRaises(TypeError, unicode_count, 1, 'a', 0, 1) + self.assertRaises(TypeError, unicode_count, 'a', 1, 0, 1) + # empty string + self.assertEqual(unicode_count('abc', '', 0, 3), 4) + self.assertEqual(unicode_count('abc', '', 1, 3), 3) + self.assertEqual(unicode_count('', '', 0, 1), 1) + self.assertEqual(unicode_count('', 'a', 0, 1), 0) + # different unicode kinds + for uni in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": + for ch in uni: + self.assertEqual(unicode_count(uni, ch, 0, len(uni)), 1) + self.assertEqual(unicode_count(st, ch, 0, len(st)), 0) + + # subclasses should still work + class MyStr(str): + pass + + self.assertEqual(unicode_count(MyStr('aab'), 'a', 0, 3), 2) + + # Test PyUnicode_FindChar() + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_findchar(self): + from _testcapi import unicode_findchar + + for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": + for i, ch in enumerate(str): + self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), 1), i) + self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), -1), i) + + str = "!>_= end + self.assertEqual(unicode_findchar(str, ord('!'), 0, 0, 1), -1) + self.assertEqual(unicode_findchar(str, ord('!'), len(str), 0, 1), -1) + # negative + self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, 1), 0) + self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, -1), 0) + + # Test PyUnicode_CopyCharacters() + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_copycharacters(self): + from _testcapi import unicode_copycharacters + + strings = [ + 'abcde', '\xa1\xa2\xa3\xa4\xa5', + '\u4f60\u597d\u4e16\u754c\uff01', + '\U0001f600\U0001f601\U0001f602\U0001f603\U0001f604' + ] + + for idx, from_ in enumerate(strings): + # wide -> narrow: exceed maxchar limitation + for to in strings[:idx]: + self.assertRaises( + SystemError, + unicode_copycharacters, to, 0, from_, 0, 5 + ) + # same kind + for from_start in range(5): + self.assertEqual( + unicode_copycharacters(from_, 0, from_, from_start, 5), + (from_[from_start:from_start+5].ljust(5, '\0'), + 5-from_start) + ) + for to_start in range(5): + self.assertEqual( + unicode_copycharacters(from_, to_start, from_, to_start, 5), + (from_[to_start:to_start+5].rjust(5, '\0'), + 5-to_start) + ) + # narrow -> wide + # Tests omitted since this creates invalid strings. + + s = strings[0] + self.assertRaises(IndexError, unicode_copycharacters, s, 6, s, 0, 5) + self.assertRaises(IndexError, unicode_copycharacters, s, -1, s, 0, 5) + self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, 6, 5) + self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, -1, 5) + self.assertRaises(SystemError, unicode_copycharacters, s, 1, s, 0, 5) + self.assertRaises(SystemError, unicode_copycharacters, s, 0, s, 0, -1) + self.assertRaises(SystemError, unicode_copycharacters, s, 0, b'', 0, 0) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_pep393_utf8_caching_bug(self): + # Issue #25709: Problem with string concatenation and utf-8 cache + from _testcapi import getargs_s_hash + for k in 0x24, 0xa4, 0x20ac, 0x1f40d: + s = '' + for i in range(5): + # Due to CPython specific optimization the 's' string can be + # resized in-place. + s += chr(k) + # Parsing with the "s#" format code calls indirectly + # PyUnicode_AsUTF8AndSize() which creates the UTF-8 + # encoded string cached in the Unicode object. + self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1)) + # Check that the second call returns the same result + self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1)) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_watchers.py b/Lib/test/test_capi/test_watchers.py new file mode 100644 index 00000000000000..5e4f42a86006bd --- /dev/null +++ b/Lib/test/test_capi/test_watchers.py @@ -0,0 +1,433 @@ +import unittest + +from contextlib import contextmanager, ExitStack +from test.support import catch_unraisable_exception, import_helper + + +# Skip this test if the _testcapi module isn't available. +_testcapi = import_helper.import_module('_testcapi') + + +class TestDictWatchers(unittest.TestCase): + # types of watchers testcapimodule can add: + EVENTS = 0 # appends dict events as strings to global event list + ERROR = 1 # unconditionally sets and signals a RuntimeException + SECOND = 2 # always appends "second" to global event list + + def add_watcher(self, kind=EVENTS): + return _testcapi.add_dict_watcher(kind) + + def clear_watcher(self, watcher_id): + _testcapi.clear_dict_watcher(watcher_id) + + @contextmanager + def watcher(self, kind=EVENTS): + wid = self.add_watcher(kind) + try: + yield wid + finally: + self.clear_watcher(wid) + + def assert_events(self, expected): + actual = _testcapi.get_dict_watcher_events() + self.assertEqual(actual, expected) + + def watch(self, wid, d): + _testcapi.watch_dict(wid, d) + + def unwatch(self, wid, d): + _testcapi.unwatch_dict(wid, d) + + def test_set_new_item(self): + d = {} + with self.watcher() as wid: + self.watch(wid, d) + d["foo"] = "bar" + self.assert_events(["new:foo:bar"]) + + def test_set_existing_item(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + d["foo"] = "baz" + self.assert_events(["mod:foo:baz"]) + + def test_clone(self): + d = {} + d2 = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + d.update(d2) + self.assert_events(["clone"]) + + def test_no_event_if_not_watched(self): + d = {} + with self.watcher() as wid: + d["foo"] = "bar" + self.assert_events([]) + + def test_del(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + del d["foo"] + self.assert_events(["del:foo"]) + + def test_pop(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + d.pop("foo") + self.assert_events(["del:foo"]) + + def test_clear(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + d.clear() + self.assert_events(["clear"]) + + def test_dealloc(self): + d = {"foo": "bar"} + with self.watcher() as wid: + self.watch(wid, d) + del d + self.assert_events(["dealloc"]) + + def test_unwatch(self): + d = {} + with self.watcher() as wid: + self.watch(wid, d) + d["foo"] = "bar" + self.unwatch(wid, d) + d["hmm"] = "baz" + self.assert_events(["new:foo:bar"]) + + def test_error(self): + d = {} + with self.watcher(kind=self.ERROR) as wid: + self.watch(wid, d) + with catch_unraisable_exception() as cm: + d["foo"] = "bar" + self.assertIs(cm.unraisable.object, d) + self.assertEqual(str(cm.unraisable.exc_value), "boom!") + self.assert_events([]) + + def test_two_watchers(self): + d1 = {} + d2 = {} + with self.watcher() as wid1: + with self.watcher(kind=self.SECOND) as wid2: + self.watch(wid1, d1) + self.watch(wid2, d2) + d1["foo"] = "bar" + d2["hmm"] = "baz" + self.assert_events(["new:foo:bar", "second"]) + + def test_watch_non_dict(self): + with self.watcher() as wid: + with self.assertRaisesRegex(ValueError, r"Cannot watch non-dictionary"): + self.watch(wid, 1) + + def test_watch_out_of_range_watcher_id(self): + d = {} + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID -1"): + self.watch(-1, d) + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID 8"): + self.watch(8, d) # DICT_MAX_WATCHERS = 8 + + def test_watch_unassigned_watcher_id(self): + d = {} + with self.assertRaisesRegex(ValueError, r"No dict watcher set for ID 1"): + self.watch(1, d) + + def test_unwatch_non_dict(self): + with self.watcher() as wid: + with self.assertRaisesRegex(ValueError, r"Cannot watch non-dictionary"): + self.unwatch(wid, 1) + + def test_unwatch_out_of_range_watcher_id(self): + d = {} + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID -1"): + self.unwatch(-1, d) + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID 8"): + self.unwatch(8, d) # DICT_MAX_WATCHERS = 8 + + def test_unwatch_unassigned_watcher_id(self): + d = {} + with self.assertRaisesRegex(ValueError, r"No dict watcher set for ID 1"): + self.unwatch(1, d) + + def test_clear_out_of_range_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID -1"): + self.clear_watcher(-1) + with self.assertRaisesRegex(ValueError, r"Invalid dict watcher ID 8"): + self.clear_watcher(8) # DICT_MAX_WATCHERS = 8 + + def test_clear_unassigned_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"No dict watcher set for ID 1"): + self.clear_watcher(1) + + +class TestTypeWatchers(unittest.TestCase): + # types of watchers testcapimodule can add: + TYPES = 0 # appends modified types to global event list + ERROR = 1 # unconditionally sets and signals a RuntimeException + WRAP = 2 # appends modified type wrapped in list to global event list + + # duplicating the C constant + TYPE_MAX_WATCHERS = 8 + + def add_watcher(self, kind=TYPES): + return _testcapi.add_type_watcher(kind) + + def clear_watcher(self, watcher_id): + _testcapi.clear_type_watcher(watcher_id) + + @contextmanager + def watcher(self, kind=TYPES): + wid = self.add_watcher(kind) + try: + yield wid + finally: + self.clear_watcher(wid) + + def assert_events(self, expected): + actual = _testcapi.get_type_modified_events() + self.assertEqual(actual, expected) + + def watch(self, wid, t): + _testcapi.watch_type(wid, t) + + def unwatch(self, wid, t): + _testcapi.unwatch_type(wid, t) + + def test_watch_type(self): + class C: pass + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + self.assert_events([C]) + + def test_event_aggregation(self): + class C: pass + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + C.bar = "baz" + # only one event registered for both modifications + self.assert_events([C]) + + def test_lookup_resets_aggregation(self): + class C: pass + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + # lookup resets type version tag + self.assertEqual(C.foo, "bar") + C.bar = "baz" + # both events registered + self.assert_events([C, C]) + + def test_unwatch_type(self): + class C: pass + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + self.assertEqual(C.foo, "bar") + self.assert_events([C]) + self.unwatch(wid, C) + C.bar = "baz" + self.assert_events([C]) + + def test_clear_watcher(self): + class C: pass + # outer watcher is unused, it's just to keep events list alive + with self.watcher() as _: + with self.watcher() as wid: + self.watch(wid, C) + C.foo = "bar" + self.assertEqual(C.foo, "bar") + self.assert_events([C]) + C.bar = "baz" + # Watcher on C has been cleared, no new event + self.assert_events([C]) + + def test_watch_type_subclass(self): + class C: pass + class D(C): pass + with self.watcher() as wid: + self.watch(wid, D) + C.foo = "bar" + self.assert_events([D]) + + def test_error(self): + class C: pass + with self.watcher(kind=self.ERROR) as wid: + self.watch(wid, C) + with catch_unraisable_exception() as cm: + C.foo = "bar" + self.assertIs(cm.unraisable.object, C) + self.assertEqual(str(cm.unraisable.exc_value), "boom!") + self.assert_events([]) + + def test_two_watchers(self): + class C1: pass + class C2: pass + with self.watcher() as wid1: + with self.watcher(kind=self.WRAP) as wid2: + self.assertNotEqual(wid1, wid2) + self.watch(wid1, C1) + self.watch(wid2, C2) + C1.foo = "bar" + C2.hmm = "baz" + self.assert_events([C1, [C2]]) + + def test_watch_non_type(self): + with self.watcher() as wid: + with self.assertRaisesRegex(ValueError, r"Cannot watch non-type"): + self.watch(wid, 1) + + def test_watch_out_of_range_watcher_id(self): + class C: pass + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID -1"): + self.watch(-1, C) + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID 8"): + self.watch(self.TYPE_MAX_WATCHERS, C) + + def test_watch_unassigned_watcher_id(self): + class C: pass + with self.assertRaisesRegex(ValueError, r"No type watcher set for ID 1"): + self.watch(1, C) + + def test_unwatch_non_type(self): + with self.watcher() as wid: + with self.assertRaisesRegex(ValueError, r"Cannot watch non-type"): + self.unwatch(wid, 1) + + def test_unwatch_out_of_range_watcher_id(self): + class C: pass + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID -1"): + self.unwatch(-1, C) + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID 8"): + self.unwatch(self.TYPE_MAX_WATCHERS, C) + + def test_unwatch_unassigned_watcher_id(self): + class C: pass + with self.assertRaisesRegex(ValueError, r"No type watcher set for ID 1"): + self.unwatch(1, C) + + def test_clear_out_of_range_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID -1"): + self.clear_watcher(-1) + with self.assertRaisesRegex(ValueError, r"Invalid type watcher ID 8"): + self.clear_watcher(self.TYPE_MAX_WATCHERS) + + def test_clear_unassigned_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"No type watcher set for ID 1"): + self.clear_watcher(1) + + def test_no_more_ids_available(self): + contexts = [self.watcher() for i in range(self.TYPE_MAX_WATCHERS)] + with ExitStack() as stack: + for ctx in contexts: + stack.enter_context(ctx) + with self.assertRaisesRegex(RuntimeError, r"no more type watcher IDs"): + self.add_watcher() + + +class TestFuncWatchers(unittest.TestCase): + @contextmanager + def add_watcher(self, func): + wid = _testcapi.add_func_watcher(func) + try: + yield + finally: + _testcapi.clear_func_watcher(wid) + + def test_func_events_dispatched(self): + events = [] + def watcher(*args): + events.append(args) + + with self.add_watcher(watcher): + def myfunc(): + pass + self.assertIn((_testcapi.PYFUNC_EVENT_CREATE, myfunc, None), events) + myfunc_id = id(myfunc) + + new_code = self.test_func_events_dispatched.__code__ + myfunc.__code__ = new_code + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_CODE, myfunc, new_code), events) + + new_defaults = (123,) + myfunc.__defaults__ = new_defaults + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_DEFAULTS, myfunc, new_defaults), events) + + new_defaults = (456,) + _testcapi.set_func_defaults_via_capi(myfunc, new_defaults) + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_DEFAULTS, myfunc, new_defaults), events) + + new_kwdefaults = {"self": 123} + myfunc.__kwdefaults__ = new_kwdefaults + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_KWDEFAULTS, myfunc, new_kwdefaults), events) + + new_kwdefaults = {"self": 456} + _testcapi.set_func_kwdefaults_via_capi(myfunc, new_kwdefaults) + self.assertIn((_testcapi.PYFUNC_EVENT_MODIFY_KWDEFAULTS, myfunc, new_kwdefaults), events) + + # Clear events reference to func + events = [] + del myfunc + self.assertIn((_testcapi.PYFUNC_EVENT_DESTROY, myfunc_id, None), events) + + def test_multiple_watchers(self): + events0 = [] + def first_watcher(*args): + events0.append(args) + + events1 = [] + def second_watcher(*args): + events1.append(args) + + with self.add_watcher(first_watcher): + with self.add_watcher(second_watcher): + def myfunc(): + pass + + event = (_testcapi.PYFUNC_EVENT_CREATE, myfunc, None) + self.assertIn(event, events0) + self.assertIn(event, events1) + + def test_watcher_raises_error(self): + class MyError(Exception): + pass + + def watcher(*args): + raise MyError("testing 123") + + with self.add_watcher(watcher): + with catch_unraisable_exception() as cm: + def myfunc(): + pass + + self.assertIs(cm.unraisable.object, myfunc) + self.assertIsInstance(cm.unraisable.exc_value, MyError) + + def test_clear_out_of_range_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"invalid func watcher ID -1"): + _testcapi.clear_func_watcher(-1) + with self.assertRaisesRegex(ValueError, r"invalid func watcher ID 8"): + _testcapi.clear_func_watcher(8) # FUNC_MAX_WATCHERS = 8 + + def test_clear_unassigned_watcher_id(self): + with self.assertRaisesRegex(ValueError, r"no func watcher set for ID 1"): + _testcapi.clear_func_watcher(1) + + def test_allocate_too_many_watchers(self): + with self.assertRaisesRegex(RuntimeError, r"no more func watcher IDs"): + _testcapi.allocate_too_many_func_watchers() + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_check_c_globals.py b/Lib/test/test_check_c_globals.py index 898807a5e69259..670be52422f799 100644 --- a/Lib/test/test_check_c_globals.py +++ b/Lib/test/test_check_c_globals.py @@ -2,6 +2,11 @@ import test.test_tools from test.support.warnings_helper import save_restore_warnings_filters + +# TODO: gh-92584: c-analyzer uses distutils which was removed in Python 3.12 +raise unittest.SkipTest("distutils has been removed in Python 3.12") + + test.test_tools.skip_if_missing('c-analyzer') with test.test_tools.imports_under_tool('c-analyzer'): # gh-95349: Save/restore warnings filters to leave them unchanged. diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py index 4aa9691a4829d1..4abf739cf52ca3 100644 --- a/Lib/test/test_clinic.py +++ b/Lib/test/test_clinic.py @@ -3,7 +3,7 @@ # Licensed to the PSF under a contributor agreement. from test import support, test_tools -from test.support import os_helper +from test.support import import_helper, os_helper from unittest import TestCase import collections import inspect @@ -153,7 +153,7 @@ def test_right_only(self): def test_have_left_options_but_required_is_empty(self): def fn(): clinic.permute_optional_groups(['a'], [], []) - self.assertRaises(AssertionError, fn) + self.assertRaises(ValueError, fn) class ClinicLinearFormatTest(TestCase): @@ -730,6 +730,15 @@ def test_parameters_not_permitted_after_slash_for_now(self): x: int """) + def test_parameters_no_more_than_one_vararg(self): + s = self.parse_function_should_fail(""" +module foo +foo.bar + *vararg1: object + *vararg2: object +""") + self.assertEqual(s, "Error on line 0:\nToo many var args\n") + def test_function_not_at_column_0(self): function = self.parse_function(""" module foo @@ -820,5 +829,461 @@ def test_external(self): self.assertEqual(new_mtime_ns, old_mtime_ns) +ac_tester = import_helper.import_module('_testclinic') + + +class ClinicFunctionalTest(unittest.TestCase): + locals().update((name, getattr(ac_tester, name)) + for name in dir(ac_tester) if name.startswith('test_')) + + def test_objects_converter(self): + with self.assertRaises(TypeError): + ac_tester.objects_converter() + self.assertEqual(ac_tester.objects_converter(1, 2), (1, 2)) + self.assertEqual(ac_tester.objects_converter([], 'whatever class'), ([], 'whatever class')) + self.assertEqual(ac_tester.objects_converter(1), (1, None)) + + def test_bytes_object_converter(self): + with self.assertRaises(TypeError): + ac_tester.bytes_object_converter(1) + self.assertEqual(ac_tester.bytes_object_converter(b'BytesObject'), (b'BytesObject',)) + + def test_byte_array_object_converter(self): + with self.assertRaises(TypeError): + ac_tester.byte_array_object_converter(1) + byte_arr = bytearray(b'ByteArrayObject') + self.assertEqual(ac_tester.byte_array_object_converter(byte_arr), (byte_arr,)) + + def test_unicode_converter(self): + with self.assertRaises(TypeError): + ac_tester.unicode_converter(1) + self.assertEqual(ac_tester.unicode_converter('unicode'), ('unicode',)) + + def test_bool_converter(self): + with self.assertRaises(TypeError): + ac_tester.bool_converter(False, False, 'not a int') + self.assertEqual(ac_tester.bool_converter(), (True, True, True)) + self.assertEqual(ac_tester.bool_converter('', [], 5), (False, False, True)) + self.assertEqual(ac_tester.bool_converter(('not empty',), {1: 2}, 0), (True, True, False)) + + def test_char_converter(self): + with self.assertRaises(TypeError): + ac_tester.char_converter(1) + with self.assertRaises(TypeError): + ac_tester.char_converter(b'ab') + chars = [b'A', b'\a', b'\b', b'\t', b'\n', b'\v', b'\f', b'\r', b'"', b"'", b'?', b'\\', b'\000', b'\377'] + expected = tuple(ord(c) for c in chars) + self.assertEqual(ac_tester.char_converter(), expected) + chars = [b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b'0', b'a', b'b', b'c', b'd'] + expected = tuple(ord(c) for c in chars) + self.assertEqual(ac_tester.char_converter(*chars), expected) + + def test_unsigned_char_converter(self): + from _testcapi import UCHAR_MAX + with self.assertRaises(OverflowError): + ac_tester.unsigned_char_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_char_converter(UCHAR_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_char_converter(0, UCHAR_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_char_converter([]) + self.assertEqual(ac_tester.unsigned_char_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_char_converter(0, 0, UCHAR_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_char_converter(0, 0, (UCHAR_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_short_converter(self): + from _testcapi import SHRT_MIN, SHRT_MAX + with self.assertRaises(OverflowError): + ac_tester.short_converter(SHRT_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.short_converter(SHRT_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.short_converter([]) + self.assertEqual(ac_tester.short_converter(-1234), (-1234,)) + self.assertEqual(ac_tester.short_converter(4321), (4321,)) + + def test_unsigned_short_converter(self): + from _testcapi import USHRT_MAX + with self.assertRaises(ValueError): + ac_tester.unsigned_short_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_short_converter(USHRT_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_short_converter(0, USHRT_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_short_converter([]) + self.assertEqual(ac_tester.unsigned_short_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_short_converter(0, 0, USHRT_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_short_converter(0, 0, (USHRT_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_int_converter(self): + from _testcapi import INT_MIN, INT_MAX + with self.assertRaises(OverflowError): + ac_tester.int_converter(INT_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.int_converter(INT_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.int_converter(1, 2, 3) + with self.assertRaises(TypeError): + ac_tester.int_converter([]) + self.assertEqual(ac_tester.int_converter(), (12, 34, 45)) + self.assertEqual(ac_tester.int_converter(1, 2, '3'), (1, 2, ord('3'))) + + def test_unsigned_int_converter(self): + from _testcapi import UINT_MAX + with self.assertRaises(ValueError): + ac_tester.unsigned_int_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_int_converter(UINT_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_int_converter(0, UINT_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_int_converter([]) + self.assertEqual(ac_tester.unsigned_int_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_int_converter(0, 0, UINT_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_int_converter(0, 0, (UINT_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_long_converter(self): + from _testcapi import LONG_MIN, LONG_MAX + with self.assertRaises(OverflowError): + ac_tester.long_converter(LONG_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.long_converter(LONG_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.long_converter([]) + self.assertEqual(ac_tester.long_converter(), (12,)) + self.assertEqual(ac_tester.long_converter(-1234), (-1234,)) + + def test_unsigned_long_converter(self): + from _testcapi import ULONG_MAX + with self.assertRaises(ValueError): + ac_tester.unsigned_long_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_long_converter(ULONG_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_long_converter(0, ULONG_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_long_converter([]) + self.assertEqual(ac_tester.unsigned_long_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_long_converter(0, 0, ULONG_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_long_converter(0, 0, (ULONG_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_long_long_converter(self): + from _testcapi import LLONG_MIN, LLONG_MAX + with self.assertRaises(OverflowError): + ac_tester.long_long_converter(LLONG_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.long_long_converter(LLONG_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.long_long_converter([]) + self.assertEqual(ac_tester.long_long_converter(), (12,)) + self.assertEqual(ac_tester.long_long_converter(-1234), (-1234,)) + + def test_unsigned_long_long_converter(self): + from _testcapi import ULLONG_MAX + with self.assertRaises(ValueError): + ac_tester.unsigned_long_long_converter(-1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_long_long_converter(ULLONG_MAX + 1) + with self.assertRaises(OverflowError): + ac_tester.unsigned_long_long_converter(0, ULLONG_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.unsigned_long_long_converter([]) + self.assertEqual(ac_tester.unsigned_long_long_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.unsigned_long_long_converter(0, 0, ULLONG_MAX + 1), (0, 0, 0)) + self.assertEqual(ac_tester.unsigned_long_long_converter(0, 0, (ULLONG_MAX + 1) * 3 + 123), (0, 0, 123)) + + def test_py_ssize_t_converter(self): + from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX + with self.assertRaises(OverflowError): + ac_tester.py_ssize_t_converter(PY_SSIZE_T_MIN - 1) + with self.assertRaises(OverflowError): + ac_tester.py_ssize_t_converter(PY_SSIZE_T_MAX + 1) + with self.assertRaises(TypeError): + ac_tester.py_ssize_t_converter([]) + self.assertEqual(ac_tester.py_ssize_t_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.py_ssize_t_converter(1, 2, None), (1, 2, 56)) + + def test_slice_index_converter(self): + from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX + with self.assertRaises(TypeError): + ac_tester.slice_index_converter([]) + self.assertEqual(ac_tester.slice_index_converter(), (12, 34, 56)) + self.assertEqual(ac_tester.slice_index_converter(1, 2, None), (1, 2, 56)) + self.assertEqual(ac_tester.slice_index_converter(PY_SSIZE_T_MAX, PY_SSIZE_T_MAX + 1, PY_SSIZE_T_MAX + 1234), + (PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX)) + self.assertEqual(ac_tester.slice_index_converter(PY_SSIZE_T_MIN, PY_SSIZE_T_MIN - 1, PY_SSIZE_T_MIN - 1234), + (PY_SSIZE_T_MIN, PY_SSIZE_T_MIN, PY_SSIZE_T_MIN)) + + def test_size_t_converter(self): + with self.assertRaises(ValueError): + ac_tester.size_t_converter(-1) + with self.assertRaises(TypeError): + ac_tester.size_t_converter([]) + self.assertEqual(ac_tester.size_t_converter(), (12,)) + + def test_float_converter(self): + with self.assertRaises(TypeError): + ac_tester.float_converter([]) + self.assertEqual(ac_tester.float_converter(), (12.5,)) + self.assertEqual(ac_tester.float_converter(-0.5), (-0.5,)) + + def test_double_converter(self): + with self.assertRaises(TypeError): + ac_tester.double_converter([]) + self.assertEqual(ac_tester.double_converter(), (12.5,)) + self.assertEqual(ac_tester.double_converter(-0.5), (-0.5,)) + + def test_py_complex_converter(self): + with self.assertRaises(TypeError): + ac_tester.py_complex_converter([]) + self.assertEqual(ac_tester.py_complex_converter(complex(1, 2)), (complex(1, 2),)) + self.assertEqual(ac_tester.py_complex_converter(complex('-1-2j')), (complex('-1-2j'),)) + self.assertEqual(ac_tester.py_complex_converter(-0.5), (-0.5,)) + self.assertEqual(ac_tester.py_complex_converter(10), (10,)) + + def test_str_converter(self): + with self.assertRaises(TypeError): + ac_tester.str_converter(1) + with self.assertRaises(TypeError): + ac_tester.str_converter('a', 'b', 'c') + with self.assertRaises(ValueError): + ac_tester.str_converter('a', b'b\0b', 'c') + self.assertEqual(ac_tester.str_converter('a', b'b', 'c'), ('a', 'b', 'c')) + self.assertEqual(ac_tester.str_converter('a', b'b', b'c'), ('a', 'b', 'c')) + self.assertEqual(ac_tester.str_converter('a', b'b', 'c\0c'), ('a', 'b', 'c\0c')) + + def test_str_converter_encoding(self): + with self.assertRaises(TypeError): + ac_tester.str_converter_encoding(1) + self.assertEqual(ac_tester.str_converter_encoding('a', 'b', 'c'), ('a', 'b', 'c')) + with self.assertRaises(TypeError): + ac_tester.str_converter_encoding('a', b'b\0b', 'c') + self.assertEqual(ac_tester.str_converter_encoding('a', b'b', bytearray([ord('c')])), ('a', 'b', 'c')) + self.assertEqual(ac_tester.str_converter_encoding('a', b'b', bytearray([ord('c'), 0, ord('c')])), + ('a', 'b', 'c\x00c')) + self.assertEqual(ac_tester.str_converter_encoding('a', b'b', b'c\x00c'), ('a', 'b', 'c\x00c')) + + def test_py_buffer_converter(self): + with self.assertRaises(TypeError): + ac_tester.py_buffer_converter('a', 'b') + self.assertEqual(ac_tester.py_buffer_converter('abc', bytearray([1, 2, 3])), (b'abc', b'\x01\x02\x03')) + + def test_keywords(self): + self.assertEqual(ac_tester.keywords(1, 2), (1, 2)) + self.assertEqual(ac_tester.keywords(1, b=2), (1, 2)) + self.assertEqual(ac_tester.keywords(a=1, b=2), (1, 2)) + + def test_keywords_kwonly(self): + with self.assertRaises(TypeError): + ac_tester.keywords_kwonly(1, 2) + self.assertEqual(ac_tester.keywords_kwonly(1, b=2), (1, 2)) + self.assertEqual(ac_tester.keywords_kwonly(a=1, b=2), (1, 2)) + + def test_keywords_opt(self): + self.assertEqual(ac_tester.keywords_opt(1), (1, None, None)) + self.assertEqual(ac_tester.keywords_opt(1, 2), (1, 2, None)) + self.assertEqual(ac_tester.keywords_opt(1, 2, 3), (1, 2, 3)) + self.assertEqual(ac_tester.keywords_opt(1, b=2), (1, 2, None)) + self.assertEqual(ac_tester.keywords_opt(1, 2, c=3), (1, 2, 3)) + self.assertEqual(ac_tester.keywords_opt(a=1, c=3), (1, None, 3)) + self.assertEqual(ac_tester.keywords_opt(a=1, b=2, c=3), (1, 2, 3)) + + def test_keywords_opt_kwonly(self): + self.assertEqual(ac_tester.keywords_opt_kwonly(1), (1, None, None, None)) + self.assertEqual(ac_tester.keywords_opt_kwonly(1, 2), (1, 2, None, None)) + with self.assertRaises(TypeError): + ac_tester.keywords_opt_kwonly(1, 2, 3) + self.assertEqual(ac_tester.keywords_opt_kwonly(1, b=2), (1, 2, None, None)) + self.assertEqual(ac_tester.keywords_opt_kwonly(1, 2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.keywords_opt_kwonly(a=1, c=3), (1, None, 3, None)) + self.assertEqual(ac_tester.keywords_opt_kwonly(a=1, b=2, c=3, d=4), (1, 2, 3, 4)) + + def test_keywords_kwonly_opt(self): + self.assertEqual(ac_tester.keywords_kwonly_opt(1), (1, None, None)) + with self.assertRaises(TypeError): + ac_tester.keywords_kwonly_opt(1, 2) + self.assertEqual(ac_tester.keywords_kwonly_opt(1, b=2), (1, 2, None)) + self.assertEqual(ac_tester.keywords_kwonly_opt(a=1, c=3), (1, None, 3)) + self.assertEqual(ac_tester.keywords_kwonly_opt(a=1, b=2, c=3), (1, 2, 3)) + + def test_posonly_keywords(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords(1) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords(a=1, b=2) + self.assertEqual(ac_tester.posonly_keywords(1, 2), (1, 2)) + self.assertEqual(ac_tester.posonly_keywords(1, b=2), (1, 2)) + + def test_posonly_kwonly(self): + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly(1) + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly(1, 2) + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly(a=1, b=2) + self.assertEqual(ac_tester.posonly_kwonly(1, b=2), (1, 2)) + + def test_posonly_keywords_kwonly(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly(1) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly(1, 2, 3) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly(a=1, b=2, c=3) + self.assertEqual(ac_tester.posonly_keywords_kwonly(1, 2, c=3), (1, 2, 3)) + self.assertEqual(ac_tester.posonly_keywords_kwonly(1, b=2, c=3), (1, 2, 3)) + + def test_posonly_keywords_opt(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt(1) + self.assertEqual(ac_tester.posonly_keywords_opt(1, 2), (1, 2, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt(1, 2, 3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_keywords_opt(1, 2, 3, 4), (1, 2, 3, 4)) + self.assertEqual(ac_tester.posonly_keywords_opt(1, b=2), (1, 2, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt(1, 2, c=3), (1, 2, 3, None)) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt(a=1, b=2, c=3, d=4) + self.assertEqual(ac_tester.posonly_keywords_opt(1, b=2, c=3, d=4), (1, 2, 3, 4)) + + def test_posonly_opt_keywords_opt(self): + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1), (1, None, None, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2), (1, 2, None, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2, 3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2, 3, 4), (1, 2, 3, 4)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_keywords_opt(1, b=2) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt(1, 2, c=3, d=4), (1, 2, 3, 4)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_keywords_opt(a=1, b=2, c=3, d=4) + + def test_posonly_kwonly_opt(self): + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly_opt(1) + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly_opt(1, 2) + self.assertEqual(ac_tester.posonly_kwonly_opt(1, b=2), (1, 2, None, None)) + self.assertEqual(ac_tester.posonly_kwonly_opt(1, b=2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_kwonly_opt(1, b=2, c=3, d=4), (1, 2, 3, 4)) + with self.assertRaises(TypeError): + ac_tester.posonly_kwonly_opt(a=1, b=2, c=3, d=4) + + def test_posonly_opt_kwonly_opt(self): + self.assertEqual(ac_tester.posonly_opt_kwonly_opt(1), (1, None, None, None)) + self.assertEqual(ac_tester.posonly_opt_kwonly_opt(1, 2), (1, 2, None, None)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_kwonly_opt(1, 2, 3) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_kwonly_opt(1, b=2) + self.assertEqual(ac_tester.posonly_opt_kwonly_opt(1, 2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_kwonly_opt(1, 2, c=3, d=4), (1, 2, 3, 4)) + + def test_posonly_keywords_kwonly_opt(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(1) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(1, 2) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(1, b=2) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(1, 2, 3) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_kwonly_opt(a=1, b=2, c=3) + self.assertEqual(ac_tester.posonly_keywords_kwonly_opt(1, 2, c=3), (1, 2, 3, None, None)) + self.assertEqual(ac_tester.posonly_keywords_kwonly_opt(1, b=2, c=3), (1, 2, 3, None, None)) + self.assertEqual(ac_tester.posonly_keywords_kwonly_opt(1, 2, c=3, d=4), (1, 2, 3, 4, None)) + self.assertEqual(ac_tester.posonly_keywords_kwonly_opt(1, 2, c=3, d=4, e=5), (1, 2, 3, 4, 5)) + + def test_posonly_keywords_opt_kwonly_opt(self): + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt_kwonly_opt(1) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2), (1, 2, None, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, b=2), (1, 2, None, None, None)) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, 3, 4) + with self.assertRaises(TypeError): + ac_tester.posonly_keywords_opt_kwonly_opt(a=1, b=2) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, c=3), (1, 2, 3, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, b=2, c=3), (1, 2, 3, None, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, 3, d=4), (1, 2, 3, 4, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, c=3, d=4), (1, 2, 3, 4, None)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, 3, d=4, e=5), (1, 2, 3, 4, 5)) + self.assertEqual(ac_tester.posonly_keywords_opt_kwonly_opt(1, 2, c=3, d=4, e=5), (1, 2, 3, 4, 5)) + + def test_posonly_opt_keywords_opt_kwonly_opt(self): + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1), (1, None, None, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2), (1, 2, None, None)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, b=2) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, 3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, c=3), (1, 2, 3, None)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, 3, d=4), (1, 2, 3, 4)) + self.assertEqual(ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, c=3, d=4), (1, 2, 3, 4)) + with self.assertRaises(TypeError): + ac_tester.posonly_opt_keywords_opt_kwonly_opt(1, 2, 3, 4) + + def test_keyword_only_parameter(self): + with self.assertRaises(TypeError): + ac_tester.keyword_only_parameter() + with self.assertRaises(TypeError): + ac_tester.keyword_only_parameter(1) + self.assertEqual(ac_tester.keyword_only_parameter(a=1), (1,)) + + def test_posonly_vararg(self): + with self.assertRaises(TypeError): + ac_tester.posonly_vararg() + self.assertEqual(ac_tester.posonly_vararg(1, 2), (1, 2, ())) + self.assertEqual(ac_tester.posonly_vararg(1, b=2), (1, 2, ())) + self.assertEqual(ac_tester.posonly_vararg(1, 2, 3, 4), (1, 2, (3, 4))) + + def test_vararg_and_posonly(self): + with self.assertRaises(TypeError): + ac_tester.vararg_and_posonly() + with self.assertRaises(TypeError): + ac_tester.vararg_and_posonly(1, b=2) + self.assertEqual(ac_tester.vararg_and_posonly(1, 2, 3, 4), (1, (2, 3, 4))) + + def test_vararg(self): + with self.assertRaises(TypeError): + ac_tester.vararg() + with self.assertRaises(TypeError): + ac_tester.vararg(1, b=2) + self.assertEqual(ac_tester.vararg(1, 2, 3, 4), (1, (2, 3, 4))) + + def test_vararg_with_default(self): + with self.assertRaises(TypeError): + ac_tester.vararg_with_default() + self.assertEqual(ac_tester.vararg_with_default(1, b=False), (1, (), False)) + self.assertEqual(ac_tester.vararg_with_default(1, 2, 3, 4), (1, (2, 3, 4), False)) + self.assertEqual(ac_tester.vararg_with_default(1, 2, 3, 4, b=True), (1, (2, 3, 4), True)) + + def test_vararg_with_only_defaults(self): + self.assertEqual(ac_tester.vararg_with_only_defaults(), ((), None)) + self.assertEqual(ac_tester.vararg_with_only_defaults(b=2), ((), 2)) + self.assertEqual(ac_tester.vararg_with_only_defaults(1, b=2), ((1, ), 2)) + self.assertEqual(ac_tester.vararg_with_only_defaults(1, 2, 3, 4), ((1, 2, 3, 4), None)) + self.assertEqual(ac_tester.vararg_with_only_defaults(1, 2, 3, 4, b=5), ((1, 2, 3, 4), 5)) + + def test_gh_32092_oob(self): + ac_tester.gh_32092_oob(1, 2, 3, 4, kw1=5, kw2=6) + + def test_gh_32092_kw_pass(self): + ac_tester.gh_32092_kw_pass(1, 2, 3) + + def test_gh_99233_refcount(self): + arg = '*A unique string is not referenced by anywhere else.*' + arg_refcount_origin = sys.getrefcount(arg) + ac_tester.gh_99233_refcount(arg) + arg_refcount_after = sys.getrefcount(arg) + self.assertEqual(arg_refcount_origin, arg_refcount_after) + + def test_gh_99240_double_free(self): + expected_error = r'gh_99240_double_free\(\) argument 2 must be encoded string without null bytes, not str' + with self.assertRaisesRegex(TypeError, expected_error): + ac_tester.gh_99240_double_free('a', '\0b') + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_cmath.py b/Lib/test/test_cmath.py index 4bdec6d2d83848..9fa08dc4ff3fa7 100644 --- a/Lib/test/test_cmath.py +++ b/Lib/test/test_cmath.py @@ -192,14 +192,7 @@ def test_user_object(self): # end up being passed to the cmath functions # usual case: new-style class implementing __complex__ - class MyComplex(object): - def __init__(self, value): - self.value = value - def __complex__(self): - return self.value - - # old-style class implementing __complex__ - class MyComplexOS: + class MyComplex: def __init__(self, value): self.value = value def __complex__(self): @@ -208,18 +201,13 @@ def __complex__(self): # classes for which __complex__ raises an exception class SomeException(Exception): pass - class MyComplexException(object): - def __complex__(self): - raise SomeException - class MyComplexExceptionOS: + class MyComplexException: def __complex__(self): raise SomeException # some classes not providing __float__ or __complex__ class NeitherComplexNorFloat(object): pass - class NeitherComplexNorFloatOS: - pass class Index: def __int__(self): return 2 def __index__(self): return 2 @@ -228,48 +216,32 @@ def __int__(self): return 2 # other possible combinations of __float__ and __complex__ # that should work - class FloatAndComplex(object): + class FloatAndComplex: def __float__(self): return flt_arg def __complex__(self): return cx_arg - class FloatAndComplexOS: - def __float__(self): - return flt_arg - def __complex__(self): - return cx_arg - class JustFloat(object): - def __float__(self): - return flt_arg - class JustFloatOS: + class JustFloat: def __float__(self): return flt_arg for f in self.test_functions: # usual usage self.assertEqual(f(MyComplex(cx_arg)), f(cx_arg)) - self.assertEqual(f(MyComplexOS(cx_arg)), f(cx_arg)) # other combinations of __float__ and __complex__ self.assertEqual(f(FloatAndComplex()), f(cx_arg)) - self.assertEqual(f(FloatAndComplexOS()), f(cx_arg)) self.assertEqual(f(JustFloat()), f(flt_arg)) - self.assertEqual(f(JustFloatOS()), f(flt_arg)) self.assertEqual(f(Index()), f(int(Index()))) # TypeError should be raised for classes not providing # either __complex__ or __float__, even if they provide - # __int__ or __index__. An old-style class - # currently raises AttributeError instead of a TypeError; - # this could be considered a bug. + # __int__ or __index__: self.assertRaises(TypeError, f, NeitherComplexNorFloat()) self.assertRaises(TypeError, f, MyInt()) - self.assertRaises(Exception, f, NeitherComplexNorFloatOS()) # non-complex return value from __complex__ -> TypeError for bad_complex in non_complexes: self.assertRaises(TypeError, f, MyComplex(bad_complex)) - self.assertRaises(TypeError, f, MyComplexOS(bad_complex)) # exceptions in __complex__ should be propagated correctly self.assertRaises(SomeException, f, MyComplexException()) - self.assertRaises(SomeException, f, MyComplexExceptionOS()) def test_input_type(self): # ints should be acceptable inputs to all cmath diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py index 57f3648eb7017c..e3add0c1ee926c 100644 --- a/Lib/test/test_codecs.py +++ b/Lib/test/test_codecs.py @@ -708,7 +708,8 @@ def test_decoder_state(self): "spamspam", self.spambe) def test_bug691291(self): - # Files are always opened in binary mode, even if no binary mode was + # If encoding is not None, then + # files are always opened in binary mode, even if no binary mode was # specified. This means that no automatic conversion of '\n' is done # on reading and writing. s1 = 'Hello\r\nworld\r\n' @@ -1551,6 +1552,12 @@ def test_builtin_encode(self): self.assertEqual("pyth\xf6n.org".encode("idna"), b"xn--pythn-mua.org") self.assertEqual("pyth\xf6n.org.".encode("idna"), b"xn--pythn-mua.org.") + def test_builtin_decode_length_limit(self): + with self.assertRaisesRegex(UnicodeError, "way too long"): + (b"xn--016c"+b"a"*1100).decode("idna") + with self.assertRaisesRegex(UnicodeError, "too long"): + (b"xn--016c"+b"a"*70).decode("idna") + def test_stream(self): r = codecs.getreader("idna")(io.BytesIO(b"abc")) r.read(3) diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 133096d25a44bc..d7b51be642e46f 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -310,8 +310,8 @@ def test_filename(self): def test_warning(self): # Test that the warning is only returned once. with warnings_helper.check_warnings( - (".*literal", SyntaxWarning), - (".*invalid", DeprecationWarning), + ('"is" with a literal', SyntaxWarning), + ("invalid escape sequence", SyntaxWarning), ) as w: compile_command(r"'\e' is 0") self.assertEqual(len(w.warnings), 2) @@ -321,9 +321,9 @@ def test_warning(self): warnings.simplefilter('error', SyntaxWarning) compile_command('1 is 1', symbol='exec') - # Check DeprecationWarning treated as an SyntaxError + # Check SyntaxWarning treated as an SyntaxError with warnings.catch_warnings(), self.assertRaises(SyntaxError): - warnings.simplefilter('error', DeprecationWarning) + warnings.simplefilter('error', SyntaxWarning) compile_command(r"'\e'", symbol='exec') def test_incomplete_warning(self): @@ -337,7 +337,7 @@ def test_invalid_warning(self): warnings.simplefilter('always') self.assertInvalid("'\\e' 1") self.assertEqual(len(w), 1) - self.assertEqual(w[0].category, DeprecationWarning) + self.assertEqual(w[0].category, SyntaxWarning) self.assertRegex(str(w[0].message), 'invalid escape sequence') self.assertEqual(w[0].filename, '') diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py index 1e398d6c3c7a3f..bfe18c7fc50330 100644 --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -810,6 +810,8 @@ def throw(self, typ, val=None, tb=None): def __await__(self): yield + self.validate_abstract_methods(Awaitable, '__await__') + non_samples = [None, int(), gen(), object()] for x in non_samples: self.assertNotIsInstance(x, Awaitable) @@ -860,6 +862,8 @@ def throw(self, typ, val=None, tb=None): def __await__(self): yield + self.validate_abstract_methods(Coroutine, '__await__', 'send', 'throw') + non_samples = [None, int(), gen(), object(), Bar()] for x in non_samples: self.assertNotIsInstance(x, Coroutine) @@ -1602,6 +1606,7 @@ def __len__(self): containers = [ seq, ItemsView({1: nan, 2: obj}), + KeysView({1: nan, 2: obj}), ValuesView({1: nan, 2: obj}) ] for container in containers: @@ -1865,6 +1870,8 @@ def test_MutableMapping_subclass(self): mymap['red'] = 5 self.assertIsInstance(mymap.keys(), Set) self.assertIsInstance(mymap.keys(), KeysView) + self.assertIsInstance(mymap.values(), Collection) + self.assertIsInstance(mymap.values(), ValuesView) self.assertIsInstance(mymap.items(), Set) self.assertIsInstance(mymap.items(), ItemsView) @@ -1940,6 +1947,7 @@ def test_ByteString(self): self.assertFalse(issubclass(sample, ByteString)) self.assertNotIsInstance(memoryview(b""), ByteString) self.assertFalse(issubclass(memoryview, ByteString)) + self.validate_abstract_methods(ByteString, '__getitem__', '__len__') def test_MutableSequence(self): for sample in [tuple, str, bytes]: diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index 8bf8470ff16feb..998ce57927f1a9 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -494,9 +494,8 @@ def test_compile_filename(self): code = compile('pass', filename, 'exec') self.assertEqual(code.co_filename, 'file.py') for filename in bytearray(b'file.py'), memoryview(b'file.py'): - with self.assertWarns(DeprecationWarning): - code = compile('pass', filename, 'exec') - self.assertEqual(code.co_filename, 'file.py') + with self.assertRaises(TypeError): + compile('pass', filename, 'exec') self.assertRaises(TypeError, compile, 'pass', list(b'file.py'), 'exec') @support.cpython_only @@ -671,7 +670,7 @@ def test_merge_code_attrs(self): self.assertIs(f1.__code__.co_linetable, f2.__code__.co_linetable) @support.cpython_only - def test_strip_unused_consts(self): + def test_remove_unused_consts(self): def f(): "docstring" if True: @@ -680,7 +679,41 @@ def f(): return "unused" self.assertEqual(f.__code__.co_consts, - ("docstring", True, "used")) + ("docstring", "used")) + + @support.cpython_only + def test_remove_unused_consts_no_docstring(self): + # the first item (None for no docstring in this case) is + # always retained. + def f(): + if True: + return "used" + else: + return "unused" + + self.assertEqual(f.__code__.co_consts, + (None, "used")) + + @support.cpython_only + def test_remove_unused_consts_extended_args(self): + N = 1000 + code = ["def f():\n"] + code.append("\ts = ''\n") + code.append("\tfor i in range(1):\n") + for i in range(N): + code.append(f"\t\tif True: s += 't{i}'\n") + code.append(f"\t\tif False: s += 'f{i}'\n") + code.append("\treturn s\n") + + code = "".join(code) + g = {} + eval(compile(code, "file.py", "exec"), g) + exec(code, g) + f = g['f'] + expected = tuple([None, '', 1] + [f't{i}' for i in range(N)]) + self.assertEqual(f.__code__.co_consts, expected) + expected = "".join(expected[3:]) + self.assertEqual(expected, f()) # Stripping unused constants is not a strict requirement for the # Python semantics, it's a more an implementation detail. @@ -1113,6 +1146,17 @@ def test_compare_positions(self): with self.subTest(source): self.assertEqual(actual_positions, expected_positions) + def test_if_expression_expression_empty_block(self): + # See regression in gh-99708 + exprs = [ + "assert (False if 1 else True)", + "def f():\n\tif not (False if 1 else True): raise AssertionError", + "def f():\n\tif not (False if 1 else True): return 12", + ] + for expr in exprs: + with self.subTest(expr=expr): + compile(expr, "", "exec") + @requires_debug_ranges() class TestSourcePositions(unittest.TestCase): @@ -1131,7 +1175,7 @@ def check_positions_against_ast(self, snippet): class SourceOffsetVisitor(ast.NodeVisitor): def generic_visit(self, node): super().generic_visit(node) - if not isinstance(node, ast.expr) and not isinstance(node, ast.stmt): + if not isinstance(node, (ast.expr, ast.stmt, ast.pattern)): return lines.add(node.lineno) end_lines.add(node.end_lineno) @@ -1208,6 +1252,301 @@ def test_multiline_expression(self): self.assertOpcodeSourcePositionIs(compiled_code, 'CALL', line=1, end_line=3, column=0, end_column=1) + def test_multiline_boolean_expression(self): + snippet = """\ +if (a or + (b and not c) or + not ( + d > 0)): + x = 42 +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + # jump if a is true: + self.assertOpcodeSourcePositionIs(compiled_code, 'POP_JUMP_IF_TRUE', + line=1, end_line=1, column=4, end_column=5, occurrence=1) + # jump if b is false: + self.assertOpcodeSourcePositionIs(compiled_code, 'POP_JUMP_IF_FALSE', + line=2, end_line=2, column=5, end_column=6, occurrence=1) + # jump if c is false: + self.assertOpcodeSourcePositionIs(compiled_code, 'POP_JUMP_IF_FALSE', + line=2, end_line=2, column=15, end_column=16, occurrence=2) + # compare d and 0 + self.assertOpcodeSourcePositionIs(compiled_code, 'COMPARE_OP', + line=4, end_line=4, column=8, end_column=13, occurrence=1) + # jump if comparison it True + self.assertOpcodeSourcePositionIs(compiled_code, 'POP_JUMP_IF_TRUE', + line=4, end_line=4, column=8, end_column=13, occurrence=2) + + def test_multiline_assert(self): + snippet = """\ +assert (a > 0 and + bb > 0 and + ccc == 4), "error msg" +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'LOAD_ASSERTION_ERROR', + line=1, end_line=3, column=0, end_column=30, occurrence=1) + # The "error msg": + self.assertOpcodeSourcePositionIs(compiled_code, 'LOAD_CONST', + line=3, end_line=3, column=19, end_column=30, occurrence=4) + self.assertOpcodeSourcePositionIs(compiled_code, 'CALL', + line=1, end_line=3, column=0, end_column=30, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'RAISE_VARARGS', + line=1, end_line=3, column=0, end_column=30, occurrence=1) + + def test_multiline_generator_expression(self): + snippet = """\ +((x, + 2*x) + for x + in [1,2,3] if (x > 0 + and x < 100 + and x != 50)) +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + compiled_code = compiled_code.co_consts[0] + self.assertIsInstance(compiled_code, types.CodeType) + self.assertOpcodeSourcePositionIs(compiled_code, 'YIELD_VALUE', + line=1, end_line=2, column=1, end_column=8, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', + line=1, end_line=2, column=1, end_column=8, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + line=1, end_line=6, column=0, end_column=32, occurrence=1) + + def test_multiline_async_generator_expression(self): + snippet = """\ +((x, + 2*x) + async for x + in [1,2,3] if (x > 0 + and x < 100 + and x != 50)) +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + compiled_code = compiled_code.co_consts[0] + self.assertIsInstance(compiled_code, types.CodeType) + self.assertOpcodeSourcePositionIs(compiled_code, 'YIELD_VALUE', + line=1, end_line=2, column=1, end_column=8, occurrence=2) + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + line=1, end_line=6, column=0, end_column=32, occurrence=1) + + def test_multiline_list_comprehension(self): + snippet = """\ +[(x, + 2*x) + for x + in [1,2,3] if (x > 0 + and x < 100 + and x != 50)] +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + compiled_code = compiled_code.co_consts[0] + self.assertIsInstance(compiled_code, types.CodeType) + self.assertOpcodeSourcePositionIs(compiled_code, 'LIST_APPEND', + line=1, end_line=2, column=1, end_column=8, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', + line=1, end_line=2, column=1, end_column=8, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + line=1, end_line=6, column=0, end_column=32, occurrence=1) + + def test_multiline_async_list_comprehension(self): + snippet = """\ +async def f(): + [(x, + 2*x) + async for x + in [1,2,3] if (x > 0 + and x < 100 + and x != 50)] +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + g = {} + eval(compiled_code, g) + compiled_code = g['f'].__code__.co_consts[1] + self.assertIsInstance(compiled_code, types.CodeType) + self.assertOpcodeSourcePositionIs(compiled_code, 'LIST_APPEND', + line=2, end_line=3, column=5, end_column=12, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', + line=2, end_line=3, column=5, end_column=12, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + line=2, end_line=7, column=4, end_column=36, occurrence=1) + + def test_multiline_set_comprehension(self): + snippet = """\ +{(x, + 2*x) + for x + in [1,2,3] if (x > 0 + and x < 100 + and x != 50)} +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + compiled_code = compiled_code.co_consts[0] + self.assertIsInstance(compiled_code, types.CodeType) + self.assertOpcodeSourcePositionIs(compiled_code, 'SET_ADD', + line=1, end_line=2, column=1, end_column=8, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', + line=1, end_line=2, column=1, end_column=8, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + line=1, end_line=6, column=0, end_column=32, occurrence=1) + + def test_multiline_async_set_comprehension(self): + snippet = """\ +async def f(): + {(x, + 2*x) + async for x + in [1,2,3] if (x > 0 + and x < 100 + and x != 50)} +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + g = {} + eval(compiled_code, g) + compiled_code = g['f'].__code__.co_consts[1] + self.assertIsInstance(compiled_code, types.CodeType) + self.assertOpcodeSourcePositionIs(compiled_code, 'SET_ADD', + line=2, end_line=3, column=5, end_column=12, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', + line=2, end_line=3, column=5, end_column=12, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + line=2, end_line=7, column=4, end_column=36, occurrence=1) + + def test_multiline_dict_comprehension(self): + snippet = """\ +{x: + 2*x + for x + in [1,2,3] if (x > 0 + and x < 100 + and x != 50)} +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + compiled_code = compiled_code.co_consts[0] + self.assertIsInstance(compiled_code, types.CodeType) + self.assertOpcodeSourcePositionIs(compiled_code, 'MAP_ADD', + line=1, end_line=2, column=1, end_column=7, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', + line=1, end_line=2, column=1, end_column=7, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + line=1, end_line=6, column=0, end_column=32, occurrence=1) + + def test_multiline_async_dict_comprehension(self): + snippet = """\ +async def f(): + {x: + 2*x + async for x + in [1,2,3] if (x > 0 + and x < 100 + and x != 50)} +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + g = {} + eval(compiled_code, g) + compiled_code = g['f'].__code__.co_consts[1] + self.assertIsInstance(compiled_code, types.CodeType) + self.assertOpcodeSourcePositionIs(compiled_code, 'MAP_ADD', + line=2, end_line=3, column=5, end_column=11, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', + line=2, end_line=3, column=5, end_column=11, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + line=2, end_line=7, column=4, end_column=36, occurrence=1) + + def test_matchcase_sequence(self): + snippet = """\ +match x: + case a, b: + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_SEQUENCE', + line=2, end_line=2, column=9, end_column=13, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'UNPACK_SEQUENCE', + line=2, end_line=2, column=9, end_column=13, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=13, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=13, occurrence=2) + + def test_matchcase_sequence_wildcard(self): + snippet = """\ +match x: + case a, *b, c: + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_SEQUENCE', + line=2, end_line=2, column=9, end_column=17, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'UNPACK_EX', + line=2, end_line=2, column=9, end_column=17, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=17, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=17, occurrence=2) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=17, occurrence=3) + + def test_matchcase_mapping(self): + snippet = """\ +match x: + case {"a" : a, "b": b}: + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_MAPPING', + line=2, end_line=2, column=9, end_column=26, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_KEYS', + line=2, end_line=2, column=9, end_column=26, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=26, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=26, occurrence=2) + + def test_matchcase_mapping_wildcard(self): + snippet = """\ +match x: + case {"a" : a, "b": b, **c}: + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_MAPPING', + line=2, end_line=2, column=9, end_column=31, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_KEYS', + line=2, end_line=2, column=9, end_column=31, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=31, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=31, occurrence=2) + + def test_matchcase_class(self): + snippet = """\ +match x: + case C(a, b): + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_CLASS', + line=2, end_line=2, column=9, end_column=16, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'UNPACK_SEQUENCE', + line=2, end_line=2, column=9, end_column=16, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=16, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'STORE_NAME', + line=2, end_line=2, column=9, end_column=16, occurrence=2) + + def test_matchcase_or(self): + snippet = """\ +match x: + case C(1) | C(2): + pass +""" + compiled_code, _ = self.check_positions_against_ast(snippet) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_CLASS', + line=2, end_line=2, column=9, end_column=13, occurrence=1) + self.assertOpcodeSourcePositionIs(compiled_code, 'MATCH_CLASS', + line=2, end_line=2, column=16, end_column=20, occurrence=2) + def test_very_long_line_end_offset(self): # Make sure we get the correct column offset for offsets # too large to store in a byte. @@ -1445,6 +1784,13 @@ def test_stack_3050(self): # This raised on 3.10.0 to 3.10.5 compile(code, "", "single") + def test_stack_3050_2(self): + M = 3050 + args = ", ".join(f"arg{i}:type{i}" for i in range(M)) + code = f"def f({args}):\n pass" + # This raised on 3.10.0 to 3.10.5 + compile(code, "", "single") + class TestStackSizeStability(unittest.TestCase): # Check that repeating certain snippets doesn't increase the stack size diff --git a/Lib/test/test_compiler_codegen.py b/Lib/test/test_compiler_codegen.py new file mode 100644 index 00000000000000..f2e14c1e628c01 --- /dev/null +++ b/Lib/test/test_compiler_codegen.py @@ -0,0 +1,50 @@ + +from test.support.bytecode_helper import CodegenTestCase + +# Tests for the code-generation stage of the compiler. +# Examine the un-optimized code generated from the AST. + +class IsolatedCodeGenTests(CodegenTestCase): + + def codegen_test(self, snippet, expected_insts): + import ast + a = ast.parse(snippet, "my_file.py", "exec"); + insts = self.generate_code(a) + self.assertInstructionsMatch(insts, expected_insts) + + def test_if_expression(self): + snippet = "42 if True else 24" + false_lbl = self.Label() + expected = [ + ('RESUME', 0, 0), + ('LOAD_CONST', 0, 1), + ('POP_JUMP_IF_FALSE', false_lbl := self.Label(), 1), + ('LOAD_CONST', 1, 1), + ('JUMP', exit_lbl := self.Label()), + false_lbl, + ('LOAD_CONST', 2, 1), + exit_lbl, + ('POP_TOP', None), + ] + self.codegen_test(snippet, expected) + + def test_for_loop(self): + snippet = "for x in l:\n\tprint(x)" + false_lbl = self.Label() + expected = [ + ('RESUME', 0, 0), + ('LOAD_NAME', 0, 1), + ('GET_ITER', None, 1), + loop_lbl := self.Label(), + ('FOR_ITER', exit_lbl := self.Label(), 1), + ('STORE_NAME', None, 1), + ('PUSH_NULL', None, 2), + ('LOAD_NAME', None, 2), + ('LOAD_NAME', None, 2), + ('CALL', None, 2), + ('POP_TOP', None), + ('JUMP', loop_lbl), + exit_lbl, + ('END_FOR', None), + ] + self.codegen_test(snippet, expected) diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py index e046577b935e92..51ba151505fb54 100644 --- a/Lib/test/test_complex.py +++ b/Lib/test/test_complex.py @@ -306,15 +306,10 @@ def test_conjugate(self): self.assertClose(complex(5.3, 9.8).conjugate(), 5.3-9.8j) def test_constructor(self): - class OS: + class NS: def __init__(self, value): self.value = value def __complex__(self): return self.value - class NS(object): - def __init__(self, value): self.value = value - def __complex__(self): return self.value - self.assertEqual(complex(OS(1+10j)), 1+10j) self.assertEqual(complex(NS(1+10j)), 1+10j) - self.assertRaises(TypeError, complex, OS(None)) self.assertRaises(TypeError, complex, NS(None)) self.assertRaises(TypeError, complex, {}) self.assertRaises(TypeError, complex, NS(1.5)) diff --git a/Lib/test/test_copy.py b/Lib/test/test_copy.py index f4d91c16868986..cc95a319d35d78 100644 --- a/Lib/test/test_copy.py +++ b/Lib/test/test_copy.py @@ -88,9 +88,7 @@ def __getattribute__(self, name): # Type-specific _copy_xxx() methods def test_copy_atomic(self): - class Classic: - pass - class NewStyle(object): + class NewStyle: pass def f(): pass @@ -100,7 +98,7 @@ class WithMetaclass(metaclass=abc.ABCMeta): 42, 2**100, 3.14, True, False, 1j, "hello", "hello\u1234", f.__code__, b"world", bytes(range(256)), range(10), slice(1, 10, 2), - NewStyle, Classic, max, WithMetaclass, property()] + NewStyle, max, WithMetaclass, property()] for x in tests: self.assertIs(copy.copy(x), x) @@ -350,15 +348,13 @@ def __getattribute__(self, name): # Type-specific _deepcopy_xxx() methods def test_deepcopy_atomic(self): - class Classic: - pass - class NewStyle(object): + class NewStyle: pass def f(): pass tests = [None, 42, 2**100, 3.14, True, False, 1j, "hello", "hello\u1234", f.__code__, - NewStyle, range(10), Classic, max, property()] + NewStyle, range(10), max, property()] for x in tests: self.assertIs(copy.deepcopy(x), x) diff --git a/Lib/test/test_coroutines.py b/Lib/test/test_coroutines.py index 9a2279d353ffa4..f91c9cc47741b5 100644 --- a/Lib/test/test_coroutines.py +++ b/Lib/test/test_coroutines.py @@ -1287,7 +1287,7 @@ async def __aexit__(self, *exc): async def func(): async with CM(): - assert (1, ) == 1 + self.assertEqual((1, ), 1) with self.assertRaises(AssertionError): run_async(func()) diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index a2b00430c7dd60..d64bff13a44e87 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -362,6 +362,11 @@ def test_read_quoting(self): self._read_test(['1,@,3,@,5'], [['1', ',3,', '5']], quotechar='@') self._read_test(['1,\0,3,\0,5'], [['1', ',3,', '5']], quotechar='\0') + def test_read_skipinitialspace(self): + self._read_test(['no space, space, spaces,\ttab'], + [['no space', 'space', 'spaces', '\ttab']], + skipinitialspace=True) + def test_read_bigfield(self): # This exercises the buffer realloc functionality and field size # limits. diff --git a/Lib/test/test_ctypes/test_struct_fields.py b/Lib/test/test_ctypes/test_struct_fields.py index ee8415f3e630c2..fefeaea1496a3e 100644 --- a/Lib/test/test_ctypes/test_struct_fields.py +++ b/Lib/test/test_ctypes/test_struct_fields.py @@ -54,6 +54,15 @@ class X(Structure): x.char = b'a\0b\0' self.assertEqual(bytes(x), b'a\x00###') + def test_gh99275(self): + class BrokenStructure(Structure): + def __init_subclass__(cls, **kwargs): + cls._fields_ = [] # This line will fail, `stgdict` is not ready + + with self.assertRaisesRegex(TypeError, + 'ctypes state is not initialized'): + class Subclass(BrokenStructure): ... + # __set__ and __get__ should raise a TypeError in case their self # argument is not a ctype instance. def test___set__(self): diff --git a/Lib/test/test_ctypes/test_structures.py b/Lib/test/test_ctypes/test_structures.py index 13c0470ba2238c..df39dc7f50d3f7 100644 --- a/Lib/test/test_ctypes/test_structures.py +++ b/Lib/test/test_ctypes/test_structures.py @@ -332,13 +332,13 @@ class Person(Structure): cls, msg = self.get_except(Person, b"Someone", (1, 2)) self.assertEqual(cls, RuntimeError) self.assertEqual(msg, - "(Phone) : " + "(Phone) TypeError: " "expected bytes, int found") cls, msg = self.get_except(Person, b"Someone", (b"a", b"b", b"c")) self.assertEqual(cls, RuntimeError) self.assertEqual(msg, - "(Phone) : too many initializers") + "(Phone) TypeError: too many initializers") def test_huge_field_name(self): # issue12881: segfault with large structure field names diff --git a/Lib/test/test_dataclasses.py b/Lib/test/test_dataclasses.py index 328dcdcb0bce78..a09f36c3dac1ce 100644 --- a/Lib/test/test_dataclasses.py +++ b/Lib/test/test_dataclasses.py @@ -12,9 +12,9 @@ import weakref import unittest from unittest.mock import Mock -from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional, Protocol +from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional, Protocol, DefaultDict from typing import get_type_hints -from collections import deque, OrderedDict, namedtuple +from collections import deque, OrderedDict, namedtuple, defaultdict from functools import total_ordering import typing # Needed for the string "typing.ClassVar[int]" to work as an annotation. @@ -257,6 +257,14 @@ class C: c = C('foo') self.assertEqual(c.object, 'foo') + def test_field_named_BUILTINS_frozen(self): + # gh-96151 + @dataclass(frozen=True) + class C: + BUILTINS: int + c = C(5) + self.assertEqual(c.BUILTINS, 5) + def test_field_named_like_builtin(self): # Attribute names can shadow built-in names # since code generation is used. @@ -1677,6 +1685,23 @@ class C: self.assertIsNot(d['f'], t) self.assertEqual(d['f'].my_a(), 6) + def test_helper_asdict_defaultdict(self): + # Ensure asdict() does not throw exceptions when a + # defaultdict is a member of a dataclass + + @dataclass + class C: + mp: DefaultDict[str, List] + + + dd = defaultdict(list) + dd["x"].append(12) + c = C(mp=dd) + d = asdict(c) + + assert d == {"mp": {"x": [12]}} + assert d["mp"] is not c.mp # make sure defaultdict is copied + def test_helper_astuple(self): # Basic tests for astuple(), it should return a new tuple. @dataclass @@ -3945,7 +3970,7 @@ class Date(A): day: 'int' self.assertTrue(inspect.isabstract(Date)) - msg = 'class Date without an implementation for abstract method foo' + msg = "class Date without an implementation for abstract method 'foo'" self.assertRaisesRegex(TypeError, msg, Date) diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index 037c859e97d41b..cbc020d1d3904a 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -1317,6 +1317,15 @@ class X(object): with self.assertRaisesRegex(AttributeError, "'X' object has no attribute 'a'"): X().a + # Test string subclass in `__slots__`, see gh-98783 + class SubStr(str): + pass + class X(object): + __slots__ = (SubStr('x'),) + X().x = 1 + with self.assertRaisesRegex(AttributeError, "'X' object has no attribute 'a'"): + X().a + def test_slots_special(self): # Testing __dict__ and __weakref__ in __slots__... class D(object): @@ -3252,12 +3261,8 @@ def __get__(self, object, otype): if otype: otype = otype.__name__ return 'object=%s; type=%s' % (object, otype) - class OldClass: + class NewClass: __doc__ = DocDescr() - class NewClass(object): - __doc__ = DocDescr() - self.assertEqual(OldClass.__doc__, 'object=None; type=OldClass') - self.assertEqual(OldClass().__doc__, 'object=OldClass instance; type=OldClass') self.assertEqual(NewClass.__doc__, 'object=None; type=NewClass') self.assertEqual(NewClass().__doc__, 'object=NewClass instance; type=NewClass') @@ -3589,6 +3594,16 @@ def __repr__(self): self.assertEqual(o.__str__(), '41') self.assertEqual(o.__repr__(), 'A repr') + def test_repr_with_module_str_subclass(self): + # gh-98783 + class StrSub(str): + pass + class Some: + pass + Some.__module__ = StrSub('example') + self.assertIsInstance(repr(Some), str) # should not crash + self.assertIsInstance(repr(Some()), str) # should not crash + def test_keyword_arguments(self): # Testing keyword arguments to __init__, __call__... def f(a): return a diff --git a/Lib/test/test_dictviews.py b/Lib/test/test_dictviews.py index 7c48d800cd88bd..924f4a6829e19c 100644 --- a/Lib/test/test_dictviews.py +++ b/Lib/test/test_dictviews.py @@ -338,6 +338,9 @@ def test_abc_registry(self): self.assertIsInstance(d.values(), collections.abc.ValuesView) self.assertIsInstance(d.values(), collections.abc.MappingView) self.assertIsInstance(d.values(), collections.abc.Sized) + self.assertIsInstance(d.values(), collections.abc.Collection) + self.assertIsInstance(d.values(), collections.abc.Iterable) + self.assertIsInstance(d.values(), collections.abc.Container) self.assertIsInstance(d.items(), collections.abc.ItemsView) self.assertIsInstance(d.items(), collections.abc.MappingView) diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index fc2862c61baadb..950af3ceb24fea 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -148,7 +148,8 @@ def bug708901(): %3d JUMP_BACKWARD 4 (to 30) -%3d >> LOAD_CONST 0 (None) +%3d >> END_FOR + LOAD_CONST 0 (None) RETURN_VALUE """ % (bug708901.__code__.co_firstlineno, bug708901.__code__.co_firstlineno + 1, @@ -167,13 +168,13 @@ def bug1333982(x=[]): %3d RESUME 0 %3d LOAD_ASSERTION_ERROR - LOAD_CONST 2 ( at 0x..., file "%s", line %d>) + LOAD_CONST 1 ( at 0x..., file "%s", line %d>) MAKE_FUNCTION 0 LOAD_FAST 0 (x) GET_ITER CALL 0 -%3d LOAD_CONST 3 (1) +%3d LOAD_CONST 2 (1) %3d BINARY_OP 0 (+) CALL 0 @@ -542,8 +543,10 @@ async def _asyncwith(c): >> COPY 3 POP_EXCEPT RERAISE 1 + >> STOPITERATION_ERROR + RERAISE 1 ExceptionTable: -6 rows +12 rows """ % (_asyncwith.__code__.co_firstlineno, _asyncwith.__code__.co_firstlineno + 1, _asyncwith.__code__.co_firstlineno + 2, @@ -699,7 +702,8 @@ def foo(x): BINARY_OP 0 (+) LIST_APPEND 2 JUMP_BACKWARD 9 (to 8) - >> RETURN_VALUE + >> END_FOR + RETURN_VALUE """ % (dis_nested_1, __file__, _h.__code__.co_firstlineno + 3, @@ -711,7 +715,7 @@ def load_test(x, y=0): return a, b dis_load_test_quickened_code = """\ -%3d 0 RESUME_QUICK 0 +%3d 0 RESUME 0 %3d 2 LOAD_FAST__LOAD_FAST 0 (x) 4 LOAD_FAST 1 (y) @@ -731,13 +735,13 @@ def loop_test(): load_test(i) dis_loop_test_quickened_code = """\ -%3d RESUME_QUICK 0 +%3d RESUME 0 %3d BUILD_LIST 0 LOAD_CONST 1 ((1, 2, 3)) LIST_EXTEND 1 LOAD_CONST 2 (3) - BINARY_OP_ADAPTIVE 5 (*) + BINARY_OP 5 (*) GET_ITER >> FOR_ITER_LIST 15 (to 50) STORE_FAST 0 (i) @@ -746,9 +750,10 @@ def loop_test(): LOAD_FAST 0 (i) CALL_PY_WITH_DEFAULTS 1 POP_TOP - JUMP_BACKWARD_QUICK 17 (to 16) + JUMP_BACKWARD 17 (to 16) -%3d >> LOAD_CONST 0 (None) +%3d >> END_FOR + LOAD_CONST 0 (None) RETURN_VALUE """ % (loop_test.__code__.co_firstlineno, loop_test.__code__.co_firstlineno + 1, @@ -771,7 +776,7 @@ def extended_arg_quick(): """% (extended_arg_quick.__code__.co_firstlineno, extended_arg_quick.__code__.co_firstlineno + 1,) -QUICKENING_WARMUP_DELAY = 8 +ADAPTIVE_WARMUP_DELAY = 2 class DisTestBase(unittest.TestCase): "Common utilities for DisTests and TestDisTraceback" @@ -1076,7 +1081,7 @@ def check(expected, **kwargs): check(dis_nested_2) @staticmethod - def code_quicken(f, times=QUICKENING_WARMUP_DELAY): + def code_quicken(f, times=ADAPTIVE_WARMUP_DELAY): for _ in range(times): f() @@ -1089,7 +1094,7 @@ def test_super_instructions(self): @cpython_only def test_binary_specialize(self): binary_op_quicken = """\ - 0 0 RESUME_QUICK 0 + 0 0 RESUME 0 1 2 LOAD_NAME 0 (a) 4 LOAD_NAME 1 (b) @@ -1107,7 +1112,7 @@ def test_binary_specialize(self): self.do_disassembly_compare(got, binary_op_quicken % "BINARY_OP_ADD_UNICODE 0 (+)", True) binary_subscr_quicken = """\ - 0 0 RESUME_QUICK 0 + 0 0 RESUME 0 1 2 LOAD_NAME 0 (a) 4 LOAD_CONST 0 (0) @@ -1127,7 +1132,7 @@ def test_binary_specialize(self): @cpython_only def test_load_attr_specialize(self): load_attr_quicken = """\ - 0 0 RESUME_QUICK 0 + 0 0 RESUME 0 1 2 LOAD_CONST 0 ('a') 4 LOAD_ATTR_SLOT 0 (__class__) @@ -1141,7 +1146,7 @@ def test_load_attr_specialize(self): @cpython_only def test_call_specialize(self): call_quicken = """\ - 0 RESUME_QUICK 0 + 0 RESUME 0 1 PUSH_NULL LOAD_NAME 0 (str) @@ -1187,7 +1192,7 @@ def test_show_caches(self): for quickened in (False, True): for adaptive in (False, True): with self.subTest(f"{quickened=}, {adaptive=}"): - if quickened and adaptive: + if adaptive: pattern = r"^(\w+: \d+)?$" else: pattern = r"^(\w+: 0)?$" @@ -1195,11 +1200,10 @@ def test_show_caches(self): for cache in caches: self.assertRegex(cache, pattern) total_caches = 23 - empty_caches = 8 if adaptive and quickened else total_caches + empty_caches = 8 self.assertEqual(caches.count(""), empty_caches) self.assertEqual(len(caches), total_caches) - class DisWithFileTests(DisTests): # Run the tests again, using the file arg instead of print @@ -1212,7 +1216,7 @@ def get_disassembly(self, func, lasti=-1, wrapper=True, **kwargs): return output.getvalue() -if sys.flags.optimize: +if dis.code_info.__doc__ is None: code_info_consts = "0: None" else: code_info_consts = "0: 'Formatted details of methods, functions, or code.'" @@ -1442,9 +1446,9 @@ def jumpy(): # End fodder for opinfo generation tests expected_outer_line = 1 _line_offset = outer.__code__.co_firstlineno - 1 -code_object_f = outer.__code__.co_consts[3] +code_object_f = outer.__code__.co_consts[1] expected_f_line = code_object_f.co_firstlineno - _line_offset -code_object_inner = code_object_f.co_consts[3] +code_object_inner = code_object_f.co_consts[1] expected_inner_line = code_object_inner.co_firstlineno - _line_offset expected_jumpy_line = 1 @@ -1481,21 +1485,21 @@ def _prepare_test_cases(): Instruction(opname='MAKE_CELL', opcode=135, arg=0, argval='a', argrepr='a', offset=0, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='MAKE_CELL', opcode=135, arg=1, argval='b', argrepr='b', offset=2, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='RESUME', opcode=151, arg=0, argval=0, argrepr='', offset=4, starts_line=1, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=(3, 4), argrepr='(3, 4)', offset=6, starts_line=2, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=(3, 4), argrepr='(3, 4)', offset=6, starts_line=2, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=0, argval='a', argrepr='a', offset=8, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=1, argval='b', argrepr='b', offset=10, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='BUILD_TUPLE', opcode=102, arg=2, argval=2, argrepr='', offset=12, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_f, argrepr=repr(code_object_f), offset=14, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=code_object_f, argrepr=repr(code_object_f), offset=14, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='MAKE_FUNCTION', opcode=132, arg=9, argval=9, argrepr='defaults, closure', offset=16, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='f', argrepr='f', offset=18, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='NULL + print', offset=20, starts_line=7, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=0, argval='a', argrepr='a', offset=32, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=1, argval='b', argrepr='b', offset=34, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=38, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=1, argrepr='1', offset=38, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='BUILD_LIST', opcode=103, arg=0, argval=0, argrepr='', offset=40, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='BUILD_MAP', opcode=105, arg=0, argval=0, argrepr='', offset=42, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Hello world!', argrepr="'Hello world!'", offset=44, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='Hello world!', argrepr="'Hello world!'", offset=44, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=7, argval=7, argrepr='', offset=46, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=56, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=58, starts_line=8, is_jump_target=False, positions=None), @@ -1507,13 +1511,13 @@ def _prepare_test_cases(): Instruction(opname='MAKE_CELL', opcode=135, arg=0, argval='c', argrepr='c', offset=2, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='MAKE_CELL', opcode=135, arg=1, argval='d', argrepr='d', offset=4, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='RESUME', opcode=151, arg=0, argval=0, argrepr='', offset=6, starts_line=2, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval=(5, 6), argrepr='(5, 6)', offset=8, starts_line=3, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=(5, 6), argrepr='(5, 6)', offset=8, starts_line=3, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=3, argval='a', argrepr='a', offset=10, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=4, argval='b', argrepr='b', offset=12, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=0, argval='c', argrepr='c', offset=14, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CLOSURE', opcode=136, arg=1, argval='d', argrepr='d', offset=16, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='BUILD_TUPLE', opcode=102, arg=4, argval=4, argrepr='', offset=18, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_inner, argrepr=repr(code_object_inner), offset=20, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=code_object_inner, argrepr=repr(code_object_inner), offset=20, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='MAKE_FUNCTION', opcode=132, arg=9, argval=9, argrepr='defaults, closure', offset=22, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='inner', argrepr='inner', offset=24, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='NULL + print', offset=26, starts_line=5, is_jump_target=False, positions=None), @@ -1566,97 +1570,98 @@ def _prepare_test_cases(): Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=1, argval=88, argrepr='to 88', offset=84, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='JUMP_BACKWARD', opcode=140, arg=30, argval=28, argrepr='to 28', offset=86, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=88, starts_line=8, is_jump_target=True, positions=None), - Instruction(opname='JUMP_FORWARD', opcode=110, arg=13, argval=118, argrepr='to 118', offset=90, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=92, starts_line=10, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=104, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=106, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=116, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST_CHECK', opcode=127, arg=0, argval='i', argrepr='i', offset=118, starts_line=11, is_jump_target=True, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=35, argval=192, argrepr='to 192', offset=120, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=122, starts_line=12, is_jump_target=True, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=134, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=136, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=146, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=148, starts_line=13, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=150, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='BINARY_OP', opcode=122, arg=23, argval=23, argrepr='-=', offset=152, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=156, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=158, starts_line=14, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=160, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=162, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=172, argrepr='to 172', offset=168, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=27, argval=118, argrepr='to 118', offset=170, starts_line=15, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=172, starts_line=16, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=174, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=176, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=186, argrepr='to 186', offset=182, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_FORWARD', opcode=110, arg=16, argval=218, argrepr='to 218', offset=184, starts_line=17, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=186, starts_line=11, is_jump_target=True, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=192, argrepr='to 192', offset=188, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=35, argval=122, argrepr='to 122', offset=190, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=192, starts_line=19, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=204, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=206, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=216, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='NOP', opcode=9, arg=None, argval=None, argrepr='', offset=218, starts_line=20, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=220, starts_line=21, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=222, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='BINARY_OP', opcode=122, arg=11, argval=11, argrepr='/', offset=224, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=228, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=230, starts_line=25, is_jump_target=False, positions=None), - Instruction(opname='BEFORE_WITH', opcode=53, arg=None, argval=None, argrepr='', offset=232, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=234, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=236, starts_line=26, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Never reach this', argrepr="'Never reach this'", offset=248, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=250, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=260, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=262, starts_line=25, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=264, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_FORWARD', opcode=110, arg=14, argval=120, argrepr='to 120', offset=90, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='END_FOR', opcode=4, arg=None, argval=None, argrepr='', offset=92, starts_line=3, is_jump_target=True, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=94, starts_line=10, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=106, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=108, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=118, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST_CHECK', opcode=127, arg=0, argval='i', argrepr='i', offset=120, starts_line=11, is_jump_target=True, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=35, argval=194, argrepr='to 194', offset=122, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=124, starts_line=12, is_jump_target=True, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=136, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=138, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=148, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=150, starts_line=13, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=152, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='BINARY_OP', opcode=122, arg=23, argval=23, argrepr='-=', offset=154, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=158, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=160, starts_line=14, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=162, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=164, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=174, argrepr='to 174', offset=170, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=27, argval=120, argrepr='to 120', offset=172, starts_line=15, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=174, starts_line=16, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=176, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=178, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=188, argrepr='to 188', offset=184, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_FORWARD', opcode=110, arg=16, argval=220, argrepr='to 220', offset=186, starts_line=17, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=188, starts_line=11, is_jump_target=True, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=194, argrepr='to 194', offset=190, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=35, argval=124, argrepr='to 124', offset=192, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=194, starts_line=19, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=206, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=208, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=218, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='NOP', opcode=9, arg=None, argval=None, argrepr='', offset=220, starts_line=20, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=222, starts_line=21, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=224, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='BINARY_OP', opcode=122, arg=11, argval=11, argrepr='/', offset=226, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=230, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=232, starts_line=25, is_jump_target=False, positions=None), + Instruction(opname='BEFORE_WITH', opcode=53, arg=None, argval=None, argrepr='', offset=234, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=236, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=238, starts_line=26, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Never reach this', argrepr="'Never reach this'", offset=250, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=252, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=262, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=264, starts_line=25, is_jump_target=False, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=266, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=2, argval=2, argrepr='', offset=268, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=278, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=280, starts_line=28, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=292, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=294, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=304, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=306, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=308, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=310, starts_line=25, is_jump_target=False, positions=None), - Instruction(opname='WITH_EXCEPT_START', opcode=49, arg=None, argval=None, argrepr='', offset=312, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=1, argval=318, argrepr='to 318', offset=314, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=2, argval=2, argrepr='', offset=316, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=318, starts_line=None, is_jump_target=True, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=320, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=322, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=268, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=2, argval=2, argrepr='', offset=270, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=280, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=282, starts_line=28, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=294, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=296, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=306, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=308, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=310, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=312, starts_line=25, is_jump_target=False, positions=None), + Instruction(opname='WITH_EXCEPT_START', opcode=49, arg=None, argval=None, argrepr='', offset=314, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=1, argval=320, argrepr='to 320', offset=316, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=2, argval=2, argrepr='', offset=318, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=320, starts_line=None, is_jump_target=True, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=322, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=324, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=24, argval=280, argrepr='to 280', offset=326, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=328, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=330, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=332, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=334, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=336, starts_line=22, is_jump_target=False, positions=None), - Instruction(opname='CHECK_EXC_MATCH', opcode=36, arg=None, argval=None, argrepr='', offset=348, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=16, argval=384, argrepr='to 384', offset=350, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=352, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=354, starts_line=23, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=366, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=368, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=378, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=380, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=52, argval=280, argrepr='to 280', offset=382, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=384, starts_line=22, is_jump_target=True, positions=None), - Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=386, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=388, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=390, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=392, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=394, starts_line=28, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=406, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=408, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=418, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=420, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=422, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=424, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=426, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=326, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=24, argval=282, argrepr='to 282', offset=328, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=330, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=332, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=334, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=336, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=338, starts_line=22, is_jump_target=False, positions=None), + Instruction(opname='CHECK_EXC_MATCH', opcode=36, arg=None, argval=None, argrepr='', offset=350, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=16, argval=386, argrepr='to 386', offset=352, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=354, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=356, starts_line=23, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=368, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=370, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=380, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=382, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=52, argval=282, argrepr='to 282', offset=384, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=386, starts_line=22, is_jump_target=True, positions=None), + Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=388, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=390, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=392, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=394, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=396, starts_line=28, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=408, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=410, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=420, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=422, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=424, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=426, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=428, starts_line=None, is_jump_target=False, positions=None), ] # One last piece of inspect fodder to check the default line number handling diff --git a/Lib/test/test_distutils.py b/Lib/test/test_distutils.py deleted file mode 100644 index 28320fb5c0bfd1..00000000000000 --- a/Lib/test/test_distutils.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Tests for distutils. - -The tests for distutils are defined in the distutils.tests package; -the test_suite() function there returns a test suite that's ready to -be run. -""" - -import unittest -from test import support -from test.support import warnings_helper - -with warnings_helper.check_warnings( - ("The distutils package is deprecated", DeprecationWarning), quiet=True): - - import distutils.tests - - -def load_tests(*_): - # used by unittest - return distutils.tests.test_suite() - - -def tearDownModule(): - support.reap_children() - -if support.check_sanitizer(address=True): - raise unittest.SkipTest("Exposes ASAN flakiness in GitHub CI") - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index c5aeb9459848e4..2dda7ccf7bf80c 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -340,33 +340,43 @@ def test_finalize_structseq(self): out, err = self.run_embedded_interpreter("test_repeated_init_exec", code) self.assertEqual(out, 'Tests passed\n' * INIT_LOOPS) - def test_quickened_static_code_gets_unquickened_at_Py_FINALIZE(self): - # https://github.com/python/cpython/issues/92031 + def test_simple_initialization_api(self): + # _testembed now uses Py_InitializeFromConfig by default + # This case specifically checks Py_Initialize(Ex) still works + out, err = self.run_embedded_interpreter("test_repeated_simple_init") + self.assertEqual(out, 'Finalized\n' * INIT_LOOPS) - # Do these imports outside of the code string to avoid using - # importlib too much from within the code string, so that - # _handle_fromlist doesn't get quickened until we intend it to. - from dis import _all_opmap - resume = _all_opmap["RESUME"] - resume_quick = _all_opmap["RESUME_QUICK"] - from test.test_dis import QUICKENING_WARMUP_DELAY + def test_specialized_static_code_gets_unspecialized_at_Py_FINALIZE(self): + # https://github.com/python/cpython/issues/92031 - code = textwrap.dedent(f"""\ + code = textwrap.dedent("""\ + import dis import importlib._bootstrap + import opcode + import test.test_dis + + def is_specialized(f): + for instruction in dis.get_instructions(f, adaptive=True): + opname = instruction.opname + if ( + opname in opcode._specialized_instructions + # Exclude superinstructions: + and "__" not in opname + ): + return True + return False + func = importlib._bootstrap._handle_fromlist - code = func.__code__ - # Assert initially unquickened. - # Use sets to account for byte order. - if set(code._co_code_adaptive[:2]) != set([{resume}, 0]): - raise AssertionError() + # "copy" the code to un-specialize it: + func.__code__ = func.__code__.replace() + + assert not is_specialized(func), "specialized instructions found" - for i in range({QUICKENING_WARMUP_DELAY}): + for i in range(test.test_dis.ADAPTIVE_WARMUP_DELAY): func(importlib._bootstrap, ["x"], lambda *args: None) - # Assert quickening worked - if set(code._co_code_adaptive[:2]) != set([{resume_quick}, 0]): - raise AssertionError() + assert is_specialized(func), "no specialized instructions found" print("Tests passed") """) @@ -496,7 +506,6 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): 'check_hash_pycs_mode': 'default', 'pathconfig_warnings': 1, '_init_main': 1, - '_isolated_interpreter': 0, 'use_frozen_modules': not support.Py_DEBUG, 'safe_path': 0, '_is_python_build': IGNORE_CONFIG, @@ -881,8 +890,6 @@ def test_init_from_config(self): 'check_hash_pycs_mode': 'always', 'pathconfig_warnings': 0, - - '_isolated_interpreter': 1, } self.check_all_configs("test_init_from_config", config, preconfig, api=API_COMPAT) @@ -1481,17 +1488,11 @@ def test_init_pyvenv_cfg(self): if not MS_WINDOWS: paths[-1] = lib_dynload else: - # Include DLLs directory as well - paths.insert(1, '.\\DLLs') - for index, path in enumerate(paths): - if index == 0: - # Because we copy the DLLs into tmpdir as well, the zip file - # entry in sys.path will be there. For a regular venv, it will - # usually be in the home directory. - paths[index] = os.path.join(tmpdir, os.path.basename(path)) - else: - paths[index] = os.path.join(pyvenv_home, os.path.basename(path)) - paths[-1] = pyvenv_home + paths = [ + os.path.join(tmpdir, os.path.basename(paths[0])), + pyvenv_home, + os.path.join(pyvenv_home, "Lib"), + ] executable = self.test_exe base_executable = os.path.join(pyvenv_home, os.path.basename(executable)) @@ -1508,12 +1509,12 @@ def test_init_pyvenv_cfg(self): config['base_prefix'] = pyvenv_home config['prefix'] = pyvenv_home config['stdlib_dir'] = os.path.join(pyvenv_home, 'Lib') - config['use_frozen_modules'] = not support.Py_DEBUG + config['use_frozen_modules'] = int(not support.Py_DEBUG) else: # cannot reliably assume stdlib_dir here because it # depends too much on our build. But it ought to be found config['stdlib_dir'] = self.IGNORE_CONFIG - config['use_frozen_modules'] = not support.Py_DEBUG + config['use_frozen_modules'] = int(not support.Py_DEBUG) env = self.copy_paths_by_env(config) self.check_all_configs("test_init_compat_config", config, @@ -1650,6 +1651,26 @@ def test_init_use_frozen_modules(self): self.check_all_configs("test_init_use_frozen_modules", config, api=API_PYTHON, env=env) + def test_init_main_interpreter_settings(self): + THREADS = 1<<10 + DAEMON_THREADS = 1<<11 + FORK = 1<<15 + EXEC = 1<<16 + expected = { + # All optional features should be enabled. + 'feature_flags': FORK | EXEC | THREADS | DAEMON_THREADS, + } + out, err = self.run_embedded_interpreter( + 'test_init_main_interpreter_settings', + ) + self.assertEqual(err, '') + try: + out = json.loads(out) + except json.JSONDecodeError: + self.fail(f'fail to decode stdout: {out!r}') + + self.assertEqual(out, expected) + class SetConfigTests(unittest.TestCase): def test_set_config(self): diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py index f50017d916f5e3..b6082cf02b18d7 100644 --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -14,7 +14,7 @@ from enum import Enum, IntEnum, StrEnum, EnumType, Flag, IntFlag, unique, auto from enum import STRICT, CONFORM, EJECT, KEEP, _simple_enum, _test_simple_enum from enum import verify, UNIQUE, CONTINUOUS, NAMED_FLAGS, ReprEnum -from enum import member, nonmember +from enum import member, nonmember, _iter_bits_lsb from io import StringIO from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL from test import support @@ -174,6 +174,10 @@ def test_is_private(self): for name in self.sunder_names + self.dunder_names + self.random_names: self.assertFalse(enum._is_private('MyEnum', name), '%r is a private name?') + def test_iter_bits_lsb(self): + self.assertEqual(list(_iter_bits_lsb(7)), [1, 2, 4]) + self.assertRaisesRegex(ValueError, '-8 is not a positive integer', list, _iter_bits_lsb(-8)) + # for subclassing tests @@ -1317,6 +1321,21 @@ def test_programmatic_function_type_from_subclass_with_start(self): self.assertIn(e, MinorEnum) self.assertIs(type(e), MinorEnum) + def test_programmatic_function_is_value_call(self): + class TwoPart(Enum): + ONE = 1, 1.0 + TWO = 2, 2.0 + THREE = 3, 3.0 + self.assertRaisesRegex(ValueError, '1 is not a valid .*TwoPart', TwoPart, 1) + self.assertIs(TwoPart((1, 1.0)), TwoPart.ONE) + self.assertIs(TwoPart(1, 1.0), TwoPart.ONE) + class ThreePart(Enum): + ONE = 1, 1.0, 'one' + TWO = 2, 2.0, 'two' + THREE = 3, 3.0, 'three' + self.assertIs(ThreePart((3, 3.0, 'three')), ThreePart.THREE) + self.assertIs(ThreePart(3, 3.0, 'three'), ThreePart.THREE) + def test_intenum_from_bytes(self): self.assertIs(IntStooges.from_bytes(b'\x00\x03', 'big'), IntStooges.MOE) with self.assertRaises(ValueError): @@ -1459,7 +1478,7 @@ class MoreColor(Color): class EvenMoreColor(Color, IntEnum): chartruese = 7 # - with self.assertRaisesRegex(TypeError, " cannot extend "): + with self.assertRaisesRegex(ValueError, r"\(.Foo., \(.pink., .black.\)\) is not a valid .*Color"): Color('Foo', ('pink', 'black')) def test_exclude_methods(self): @@ -3960,6 +3979,16 @@ class Sillier(IntEnum): triple = 3 value = 4 + def test_negative_alias(self): + @verify(NAMED_FLAGS) + class Color(Flag): + RED = 1 + GREEN = 2 + BLUE = 4 + WHITE = -1 + # no error means success + + class TestInternals(unittest.TestCase): sunder_names = '_bad_', '_good_', '_what_ho_' @@ -4116,6 +4145,50 @@ class Dupes(Enum): third = auto() self.assertEqual([Dupes.first, Dupes.second, Dupes.third], list(Dupes)) + def test_multiple_auto_on_line(self): + class Huh(Enum): + ONE = auto() + TWO = auto(), auto() + THREE = auto(), auto(), auto() + self.assertEqual(Huh.ONE.value, 1) + self.assertEqual(Huh.TWO.value, (2, 3)) + self.assertEqual(Huh.THREE.value, (4, 5, 6)) + # + class Hah(Enum): + def __new__(cls, value, abbr=None): + member = object.__new__(cls) + member._value_ = value + member.abbr = abbr or value[:3].lower() + return member + def _generate_next_value_(name, start, count, last): + return name + # + MONDAY = auto() + TUESDAY = auto() + WEDNESDAY = auto(), 'WED' + THURSDAY = auto(), 'Thu' + FRIDAY = auto() + self.assertEqual(Hah.MONDAY.value, 'MONDAY') + self.assertEqual(Hah.MONDAY.abbr, 'mon') + self.assertEqual(Hah.TUESDAY.value, 'TUESDAY') + self.assertEqual(Hah.TUESDAY.abbr, 'tue') + self.assertEqual(Hah.WEDNESDAY.value, 'WEDNESDAY') + self.assertEqual(Hah.WEDNESDAY.abbr, 'WED') + self.assertEqual(Hah.THURSDAY.value, 'THURSDAY') + self.assertEqual(Hah.THURSDAY.abbr, 'Thu') + self.assertEqual(Hah.FRIDAY.value, 'FRIDAY') + self.assertEqual(Hah.FRIDAY.abbr, 'fri') + # + class Huh(Enum): + def _generate_next_value_(name, start, count, last): + return count+1 + ONE = auto() + TWO = auto(), auto() + THREE = auto(), auto(), auto() + self.assertEqual(Huh.ONE.value, 1) + self.assertEqual(Huh.TWO.value, (2, 2)) + self.assertEqual(Huh.THREE.value, (3, 3, 3)) + class TestEnumTypeSubclassing(unittest.TestCase): pass @@ -4123,7 +4196,7 @@ class TestEnumTypeSubclassing(unittest.TestCase): Help on class Color in module %s: class Color(enum.Enum) - | Color(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None) + | Color(value, names=None, *values, module=None, qualname=None, type=None, start=1, boundary=None) | | Method resolution order: | Color @@ -4179,7 +4252,7 @@ class Color(enum.Enum) Help on class Color in module %s: class Color(enum.Enum) - | Color(value, names=None, *, module=None, qualname=None, type=None, start=1) + | Color(value, names=None, *values, module=None, qualname=None, type=None, start=1) | | Method resolution order: | Color @@ -4332,10 +4405,16 @@ class SimpleColor: CYAN = 1 MAGENTA = 2 YELLOW = 3 + @bltns.property + def zeroth(self): + return 'zeroed %s' % self.name class CheckedColor(Enum): CYAN = 1 MAGENTA = 2 YELLOW = 3 + @bltns.property + def zeroth(self): + return 'zeroed %s' % self.name self.assertTrue(_test_simple_enum(CheckedColor, SimpleColor) is None) SimpleColor.MAGENTA._value_ = 9 self.assertRaisesRegex( diff --git a/Lib/test/test_except_star.py b/Lib/test/test_except_star.py index dbe8eff32924ed..9de72dbd5a3264 100644 --- a/Lib/test/test_except_star.py +++ b/Lib/test/test_except_star.py @@ -1000,5 +1000,204 @@ def test_exc_info_restored(self): self.assertEqual(sys.exc_info(), (None, None, None)) +class TestExceptStar_WeirdLeafExceptions(ExceptStarTest): + # Test that except* works when leaf exceptions are + # unhashable or have a bad custom __eq__ + + class UnhashableExc(ValueError): + __hash__ = None + + class AlwaysEqualExc(ValueError): + def __eq__(self, other): + return True + + class NeverEqualExc(ValueError): + def __eq__(self, other): + return False + + class BrokenEqualExc(ValueError): + def __eq__(self, other): + raise RuntimeError() + + def setUp(self): + self.bad_types = [self.UnhashableExc, + self.AlwaysEqualExc, + self.NeverEqualExc, + self.BrokenEqualExc] + + def except_type(self, eg, type): + match, rest = None, None + try: + try: + raise eg + except* type as e: + match = e + except Exception as e: + rest = e + return match, rest + + def test_catch_unhashable_leaf_exception(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup("eg", [TypeError(1), Bad(2)]) + match, rest = self.except_type(eg, Bad) + self.assertExceptionIsLike( + match, ExceptionGroup("eg", [Bad(2)])) + self.assertExceptionIsLike( + rest, ExceptionGroup("eg", [TypeError(1)])) + + def test_propagate_unhashable_leaf(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup("eg", [TypeError(1), Bad(2)]) + match, rest = self.except_type(eg, TypeError) + self.assertExceptionIsLike( + match, ExceptionGroup("eg", [TypeError(1)])) + self.assertExceptionIsLike( + rest, ExceptionGroup("eg", [Bad(2)])) + + def test_catch_nothing_unhashable_leaf(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup("eg", [TypeError(1), Bad(2)]) + match, rest = self.except_type(eg, OSError) + self.assertIsNone(match) + self.assertExceptionIsLike(rest, eg) + + def test_catch_everything_unhashable_leaf(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup("eg", [TypeError(1), Bad(2)]) + match, rest = self.except_type(eg, Exception) + self.assertExceptionIsLike(match, eg) + self.assertIsNone(rest) + + def test_reraise_unhashable_leaf(self): + for Bad in self.bad_types: + with self.subTest(Bad): + eg = ExceptionGroup( + "eg", [TypeError(1), Bad(2), ValueError(3)]) + + try: + try: + raise eg + except* TypeError: + pass + except* Bad: + raise + except Exception as e: + exc = e + + self.assertExceptionIsLike( + exc, ExceptionGroup("eg", [Bad(2), ValueError(3)])) + + +class TestExceptStar_WeirdExceptionGroupSubclass(ExceptStarTest): + # Test that except* works with exception groups that are + # unhashable or have a bad custom __eq__ + + class UnhashableEG(ExceptionGroup): + __hash__ = None + + def derive(self, excs): + return type(self)(self.message, excs) + + class AlwaysEqualEG(ExceptionGroup): + def __eq__(self, other): + return True + + def derive(self, excs): + return type(self)(self.message, excs) + + class NeverEqualEG(ExceptionGroup): + def __eq__(self, other): + return False + + def derive(self, excs): + return type(self)(self.message, excs) + + class BrokenEqualEG(ExceptionGroup): + def __eq__(self, other): + raise RuntimeError() + + def derive(self, excs): + return type(self)(self.message, excs) + + def setUp(self): + self.bad_types = [self.UnhashableEG, + self.AlwaysEqualEG, + self.NeverEqualEG, + self.BrokenEqualEG] + + def except_type(self, eg, type): + match, rest = None, None + try: + try: + raise eg + except* type as e: + match = e + except Exception as e: + rest = e + return match, rest + + def test_catch_some_unhashable_exception_group_subclass(self): + for BadEG in self.bad_types: + with self.subTest(BadEG): + eg = BadEG("eg", + [TypeError(1), + BadEG("nested", [ValueError(2)])]) + + match, rest = self.except_type(eg, TypeError) + self.assertExceptionIsLike(match, BadEG("eg", [TypeError(1)])) + self.assertExceptionIsLike(rest, + BadEG("eg", [BadEG("nested", [ValueError(2)])])) + + def test_catch_none_unhashable_exception_group_subclass(self): + for BadEG in self.bad_types: + with self.subTest(BadEG): + + eg = BadEG("eg", + [TypeError(1), + BadEG("nested", [ValueError(2)])]) + + match, rest = self.except_type(eg, OSError) + self.assertIsNone(match) + self.assertExceptionIsLike(rest, eg) + + def test_catch_all_unhashable_exception_group_subclass(self): + for BadEG in self.bad_types: + with self.subTest(BadEG): + + eg = BadEG("eg", + [TypeError(1), + BadEG("nested", [ValueError(2)])]) + + match, rest = self.except_type(eg, Exception) + self.assertExceptionIsLike(match, eg) + self.assertIsNone(rest) + + def test_reraise_unhashable_eg(self): + for BadEG in self.bad_types: + with self.subTest(BadEG): + + eg = BadEG("eg", + [TypeError(1), ValueError(2), + BadEG("nested", [ValueError(3), OSError(4)])]) + + try: + try: + raise eg + except* ValueError: + pass + except* OSError: + raise + except Exception as e: + exc = e + + self.assertExceptionIsLike( + exc, BadEG("eg", [TypeError(1), + BadEG("nested", [OSError(4)])])) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_exception_group.py b/Lib/test/test_exception_group.py index aa28e16bedfa68..b11524e778e665 100644 --- a/Lib/test/test_exception_group.py +++ b/Lib/test/test_exception_group.py @@ -78,16 +78,30 @@ def test_BEG_wraps_BaseException__creates_BEG(self): beg = BaseExceptionGroup("beg", [ValueError(1), KeyboardInterrupt(2)]) self.assertIs(type(beg), BaseExceptionGroup) - def test_EG_subclass_wraps_anything(self): + def test_EG_subclass_wraps_non_base_exceptions(self): class MyEG(ExceptionGroup): pass self.assertIs( type(MyEG("eg", [ValueError(12), TypeError(42)])), MyEG) - self.assertIs( - type(MyEG("eg", [ValueError(12), KeyboardInterrupt(42)])), - MyEG) + + def test_EG_subclass_does_not_wrap_base_exceptions(self): + class MyEG(ExceptionGroup): + pass + + msg = "Cannot nest BaseExceptions in 'MyEG'" + with self.assertRaisesRegex(TypeError, msg): + MyEG("eg", [ValueError(12), KeyboardInterrupt(42)]) + + def test_BEG_and_E_subclass_does_not_wrap_base_exceptions(self): + class MyEG(BaseExceptionGroup, ValueError): + pass + + msg = "Cannot nest BaseExceptions in 'MyEG'" + with self.assertRaisesRegex(TypeError, msg): + MyEG("eg", [ValueError(12), KeyboardInterrupt(42)]) + def test_BEG_subclass_wraps_anything(self): class MyBEG(BaseExceptionGroup): diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index 5dda2fbfac374c..a7db22007dedce 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -1,9 +1,14 @@ +import gc import re import sys import textwrap import types import unittest import weakref +try: + import _testcapi +except ImportError: + _testcapi = None from test import support from test.support.script_helper import assert_python_ok @@ -261,5 +266,106 @@ def gen(): """) assert_python_ok("-c", code) + @support.cpython_only + def test_sneaky_frame_object(self): + + def trace(frame, event, arg): + """ + Don't actually do anything, just force a frame object to be created. + """ + + def callback(phase, info): + """ + Yo dawg, I heard you like frames, so I'm allocating a frame while + you're allocating a frame, so you can have a frame while you have a + frame! + """ + nonlocal sneaky_frame_object + sneaky_frame_object = sys._getframe().f_back.f_back + # We're done here: + gc.callbacks.remove(callback) + + def f(): + while True: + yield + + old_threshold = gc.get_threshold() + old_callbacks = gc.callbacks[:] + old_enabled = gc.isenabled() + old_trace = sys.gettrace() + try: + # Stop the GC for a second while we set things up: + gc.disable() + # Create a paused generator: + g = f() + next(g) + # Move all objects to the oldest generation, and tell the GC to run + # on the *very next* allocation: + gc.collect() + gc.set_threshold(1, 0, 0) + # Okay, so here's the nightmare scenario: + # - We're tracing the resumption of a generator, which creates a new + # frame object. + # - The allocation of this frame object triggers a collection + # *before* the frame object is actually created. + # - During the collection, we request the exact same frame object. + # This test does it with a GC callback, but in real code it would + # likely be a trace function, weakref callback, or finalizer. + # - The collection finishes, and the original frame object is + # created. We now have two frame objects fighting over ownership + # of the same interpreter frame! + sys.settrace(trace) + gc.callbacks.append(callback) + sneaky_frame_object = None + gc.enable() + next(g) + # g.gi_frame should be the the frame object from the callback (the + # one that was *requested* second, but *created* first): + self.assertIs(g.gi_frame, sneaky_frame_object) + finally: + gc.set_threshold(*old_threshold) + gc.callbacks[:] = old_callbacks + sys.settrace(old_trace) + if old_enabled: + gc.enable() + + +@unittest.skipIf(_testcapi is None, 'need _testcapi') +class TestCAPI(unittest.TestCase): + def getframe(self): + return sys._getframe() + + def test_frame_getters(self): + frame = self.getframe() + self.assertEqual(frame.f_locals, _testcapi.frame_getlocals(frame)) + self.assertIs(frame.f_globals, _testcapi.frame_getglobals(frame)) + self.assertIs(frame.f_builtins, _testcapi.frame_getbuiltins(frame)) + self.assertEqual(frame.f_lasti, _testcapi.frame_getlasti(frame)) + + def test_getvar(self): + current_frame = sys._getframe() + x = 1 + self.assertEqual(_testcapi.frame_getvar(current_frame, "x"), 1) + self.assertEqual(_testcapi.frame_getvarstring(current_frame, b"x"), 1) + with self.assertRaises(NameError): + _testcapi.frame_getvar(current_frame, "y") + with self.assertRaises(NameError): + _testcapi.frame_getvarstring(current_frame, b"y") + + # wrong name type + with self.assertRaises(TypeError): + _testcapi.frame_getvar(current_frame, b'x') + with self.assertRaises(TypeError): + _testcapi.frame_getvar(current_frame, 123) + + def getgenframe(self): + yield sys._getframe() + + def test_frame_get_generator(self): + gen = self.getgenframe() + frame = next(gen) + self.assertIs(gen, _testcapi.frame_getgenerator(frame)) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index bf3a5b0bbccdfb..318f38a6ed5b14 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -776,7 +776,7 @@ def test_backslashes_in_string_part(self): self.assertEqual(f'2\x203', '2 3') self.assertEqual(f'\x203', ' 3') - with self.assertWarns(DeprecationWarning): # invalid escape sequence + with self.assertWarns(SyntaxWarning): # invalid escape sequence value = eval(r"f'\{6*7}'") self.assertEqual(value, '\\42') self.assertEqual(f'\\{6*7}', '\\42') diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py index 082a90d46baedc..544228e3bab47b 100644 --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -21,13 +21,11 @@ from test.support import threading_helper from test.support import socket_helper from test.support import warnings_helper +from test.support import asynchat +from test.support import asyncore from test.support.socket_helper import HOST, HOSTv6 -asynchat = warnings_helper.import_deprecated('asynchat') -asyncore = warnings_helper.import_deprecated('asyncore') - - support.requires_working_socket(module=True) TIMEOUT = support.LOOPBACK_TIMEOUT @@ -984,11 +982,11 @@ def test_context(self): ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE - self.assertRaises(ValueError, ftplib.FTP_TLS, keyfile=CERTFILE, + self.assertRaises(TypeError, ftplib.FTP_TLS, keyfile=CERTFILE, context=ctx) - self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE, + self.assertRaises(TypeError, ftplib.FTP_TLS, certfile=CERTFILE, context=ctx) - self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE, + self.assertRaises(TypeError, ftplib.FTP_TLS, certfile=CERTFILE, keyfile=CERTFILE, context=ctx) self.client = ftplib.FTP_TLS(context=ctx, timeout=TIMEOUT) diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index a4b098a2a5b8ed..730ab1f595f22c 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -17,6 +17,7 @@ import gc from weakref import proxy import contextlib +from inspect import Signature from test.support import import_helper from test.support import threading_helper @@ -941,6 +942,10 @@ def mycmp(x, y): self.assertRaises(TypeError, hash, k) self.assertNotIsInstance(k, collections.abc.Hashable) + def test_cmp_to_signature(self): + self.assertEqual(str(Signature.from_callable(self.cmp_to_key)), + '(mycmp)') + @unittest.skipUnless(c_functools, 'requires the C _functools module') class TestCmpToKeyC(TestCmpToKey, unittest.TestCase): @@ -1853,6 +1858,13 @@ def test_staticmethod(x): for ref in refs: self.assertIsNone(ref()) + def test_common_signatures(self): + def orig(): ... + lru = self.module.lru_cache(1)(orig) + + self.assertEqual(str(Signature.from_callable(lru.cache_info)), '()') + self.assertEqual(str(Signature.from_callable(lru.cache_clear)), '()') + @py_functools.lru_cache() def py_cached_func(x, y): diff --git a/Lib/test/test_future.py b/Lib/test/test_future.py index 189cbdc4365b79..b8b591a1bcf2c6 100644 --- a/Lib/test/test_future.py +++ b/Lib/test/test_future.py @@ -60,7 +60,7 @@ def test_badfuture6(self): def test_badfuture7(self): with self.assertRaises(SyntaxError) as cm: from test import badsyntax_future7 - self.check_syntax_error(cm.exception, "badsyntax_future7", 3, 53) + self.check_syntax_error(cm.exception, "badsyntax_future7", 3, 54) def test_badfuture8(self): with self.assertRaises(SyntaxError) as cm: diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index 087f72768fa4bf..db7cb9ace6e5f3 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -542,48 +542,6 @@ def __getattr__(self, someattribute): self.assertEqual(gc.collect(), 2) self.assertEqual(len(gc.garbage), garbagelen) - def test_boom_new(self): - # boom__new and boom2_new are exactly like boom and boom2, except use - # new-style classes. - - class Boom_New(object): - def __getattr__(self, someattribute): - del self.attr - raise AttributeError - - a = Boom_New() - b = Boom_New() - a.attr = b - b.attr = a - - gc.collect() - garbagelen = len(gc.garbage) - del a, b - self.assertEqual(gc.collect(), 2) - self.assertEqual(len(gc.garbage), garbagelen) - - def test_boom2_new(self): - class Boom2_New(object): - def __init__(self): - self.x = 0 - - def __getattr__(self, someattribute): - self.x += 1 - if self.x > 1: - del self.attr - raise AttributeError - - a = Boom2_New() - b = Boom2_New() - a.attr = b - b.attr = a - - gc.collect() - garbagelen = len(gc.garbage) - del a, b - self.assertEqual(gc.collect(), 2) - self.assertEqual(len(gc.garbage), garbagelen) - def test_get_referents(self): alist = [1, 3, 5] got = gc.get_referents(alist) diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 42cc20c4676640..492b77a954d865 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -306,6 +306,26 @@ def gen(): self.assertEqual(next(g), "done") self.assertEqual(sys.exc_info(), (None, None, None)) + def test_nested_gen_except_loop(self): + def gen(): + for i in range(100): + self.assertIsInstance(sys.exception(), TypeError) + yield "doing" + + def outer(): + try: + raise TypeError + except: + for x in gen(): + yield x + + try: + raise ValueError + except Exception: + for x in outer(): + self.assertEqual(x, "doing") + self.assertEqual(sys.exception(), None) + def test_except_throw_exception_context(self): def gen(): try: diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py index 1afb7ea4f85d0e..6d0a556b1f7fe2 100644 --- a/Lib/test/test_genericalias.py +++ b/Lib/test/test_genericalias.py @@ -2,6 +2,7 @@ import unittest import pickle +from array import array import copy from collections import ( defaultdict, deque, OrderedDict, Counter, UserDict, UserList @@ -124,7 +125,8 @@ class BaseTest(unittest.TestCase): ShareableList, Future, _WorkItem, Morsel, - DictReader, DictWriter] + DictReader, DictWriter, + array] if ctypes is not None: generic_types.extend((ctypes.Array, ctypes.LibraryLoader)) if ValueProxy is not None: @@ -205,23 +207,11 @@ class MyList(list): self.assertEqual(repr(list[str]), 'list[str]') self.assertEqual(repr(list[()]), 'list[()]') self.assertEqual(repr(tuple[int, ...]), 'tuple[int, ...]') - x1 = tuple[ - tuple( # Effectively the same as starring; TODO - tuple[int] - ) - ] + x1 = tuple[*tuple[int]] self.assertEqual(repr(x1), 'tuple[*tuple[int]]') - x2 = tuple[ - tuple( # Ditto TODO - tuple[int, str] - ) - ] + x2 = tuple[*tuple[int, str]] self.assertEqual(repr(x2), 'tuple[*tuple[int, str]]') - x3 = tuple[ - tuple( # Ditto TODO - tuple[int, ...] - ) - ] + x3 = tuple[*tuple[int, ...]] self.assertEqual(repr(x3), 'tuple[*tuple[int, ...]]') self.assertTrue(repr(MyList[int]).endswith('.BaseTest.test_repr..MyList[int]')) self.assertEqual(repr(list[str]()), '[]') # instances should keep their normal repr @@ -275,42 +265,24 @@ def test_parameters(self): self.assertEqual(L5.__args__, (Callable[[K, V], K],)) self.assertEqual(L5.__parameters__, (K, V)) - T1 = tuple[ - tuple( # Ditto TODO - tuple[int] - ) - ] + T1 = tuple[*tuple[int]] self.assertEqual( T1.__args__, - tuple( # Ditto TODO - tuple[int] - ) + (*tuple[int],), ) self.assertEqual(T1.__parameters__, ()) - T2 = tuple[ - tuple( # Ditto TODO - tuple[T] - ) - ] + T2 = tuple[*tuple[T]] self.assertEqual( T2.__args__, - tuple( # Ditto TODO - tuple[T] - ) + (*tuple[T],), ) self.assertEqual(T2.__parameters__, (T,)) - T4 = tuple[ - tuple( # Ditto TODO - tuple[int, str] - ) - ] + T4 = tuple[*tuple[int, str]] self.assertEqual( T4.__args__, - tuple( # Ditto TODO - tuple[int, str] - ) + (*tuple[int, str],), ) self.assertEqual(T4.__parameters__, ()) @@ -345,18 +317,7 @@ def test_equality(self): self.assertEqual(list[int], list[int]) self.assertEqual(dict[str, int], dict[str, int]) self.assertEqual((*tuple[int],)[0], (*tuple[int],)[0]) - self.assertEqual( - tuple[ - tuple( # Effectively the same as starring; TODO - tuple[int] - ) - ], - tuple[ - tuple( # Ditto TODO - tuple[int] - ) - ] - ) + self.assertEqual(tuple[*tuple[int]], tuple[*tuple[int]]) self.assertNotEqual(dict[str, int], dict[str, str]) self.assertNotEqual(list, list[int]) self.assertNotEqual(list[int], list) diff --git a/Lib/test/test_getpath.py b/Lib/test/test_getpath.py index 1e46cb5780c942..bdcf4a37191682 100644 --- a/Lib/test/test_getpath.py +++ b/Lib/test/test_getpath.py @@ -238,6 +238,29 @@ def test_buildtree_pythonhome_win32(self): actual = getpath(ns, expected) self.assertEqual(expected, actual) + def test_no_dlls_win32(self): + "Test a layout on Windows with no DLLs directory." + ns = MockNTNamespace( + argv0=r"C:\Python\python.exe", + real_executable=r"C:\Python\python.exe", + ) + ns.add_known_xfile(r"C:\Python\python.exe") + ns.add_known_file(r"C:\Python\Lib\os.py") + expected = dict( + executable=r"C:\Python\python.exe", + base_executable=r"C:\Python\python.exe", + prefix=r"C:\Python", + exec_prefix=r"C:\Python", + module_search_paths_set=1, + module_search_paths=[ + r"C:\Python\python98.zip", + r"C:\Python\Lib", + r"C:\Python", + ], + ) + actual = getpath(ns, expected) + self.assertEqual(expected, actual) + def test_normal_posix(self): "Test a 'standard' install layout on *nix" ns = MockPosixNamespace( @@ -359,6 +382,70 @@ def test_venv_changed_name_posix(self): actual = getpath(ns, expected) self.assertEqual(expected, actual) + def test_venv_non_installed_zip_path_posix(self): + "Test a venv created from non-installed python has correct zip path.""" + ns = MockPosixNamespace( + argv0="/venv/bin/python", + PREFIX="/usr", + ENV_PATH="/venv/bin:/usr/bin", + ) + ns.add_known_xfile("/path/to/non-installed/bin/python") + ns.add_known_xfile("/venv/bin/python") + ns.add_known_link("/venv/bin/python", + "/path/to/non-installed/bin/python") + ns.add_known_file("/path/to/non-installed/lib/python9.8/os.py") + ns.add_known_dir("/path/to/non-installed/lib/python9.8/lib-dynload") + ns.add_known_file("/venv/pyvenv.cfg", [ + r"home = /path/to/non-installed" + ]) + expected = dict( + executable="/venv/bin/python", + prefix="/path/to/non-installed", + exec_prefix="/path/to/non-installed", + base_executable="/path/to/non-installed/bin/python", + base_prefix="/path/to/non-installed", + base_exec_prefix="/path/to/non-installed", + module_search_paths_set=1, + module_search_paths=[ + "/path/to/non-installed/lib/python98.zip", + "/path/to/non-installed/lib/python9.8", + "/path/to/non-installed/lib/python9.8/lib-dynload", + ], + ) + actual = getpath(ns, expected) + self.assertEqual(expected, actual) + + def test_venv_changed_name_copy_posix(self): + "Test a venv --copies layout on *nix that lacks a distributed 'python'" + ns = MockPosixNamespace( + argv0="python", + PREFIX="/usr", + ENV_PATH="/venv/bin:/usr/bin", + ) + ns.add_known_xfile("/usr/bin/python9") + ns.add_known_xfile("/venv/bin/python") + ns.add_known_file("/usr/lib/python9.8/os.py") + ns.add_known_dir("/usr/lib/python9.8/lib-dynload") + ns.add_known_file("/venv/pyvenv.cfg", [ + r"home = /usr/bin" + ]) + expected = dict( + executable="/venv/bin/python", + prefix="/usr", + exec_prefix="/usr", + base_executable="/usr/bin/python9", + base_prefix="/usr", + base_exec_prefix="/usr", + module_search_paths_set=1, + module_search_paths=[ + "/usr/lib/python98.zip", + "/usr/lib/python9.8", + "/usr/lib/python9.8/lib-dynload", + ], + ) + actual = getpath(ns, expected) + self.assertEqual(expected, actual) + def test_symlink_normal_posix(self): "Test a 'standard' install layout via symlink on *nix" ns = MockPosixNamespace( diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index b3d94e0a21cb6a..620a5b19109a8c 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -1978,7 +1978,7 @@ def test_local_unknown_cert(self): self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED') def test_local_good_hostname(self): - # The (valid) cert validates the HTTP hostname + # The (valid) cert validates the HTTPS hostname import ssl server = self.make_server(CERT_localhost) context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1991,7 +1991,7 @@ def test_local_good_hostname(self): self.assertEqual(resp.status, 404) def test_local_bad_hostname(self): - # The (valid) cert doesn't validate the HTTP hostname + # The (valid) cert doesn't validate the HTTPS hostname import ssl server = self.make_server(CERT_fakehostname) context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1999,38 +1999,21 @@ def test_local_bad_hostname(self): h = client.HTTPSConnection('localhost', server.port, context=context) with self.assertRaises(ssl.CertificateError): h.request('GET', '/') - # Same with explicit check_hostname=True - with warnings_helper.check_warnings(('', DeprecationWarning)): - h = client.HTTPSConnection('localhost', server.port, - context=context, check_hostname=True) + + # Same with explicit context.check_hostname=True + context.check_hostname = True + h = client.HTTPSConnection('localhost', server.port, context=context) with self.assertRaises(ssl.CertificateError): h.request('GET', '/') - # With check_hostname=False, the mismatching is ignored - context.check_hostname = False - with warnings_helper.check_warnings(('', DeprecationWarning)): - h = client.HTTPSConnection('localhost', server.port, - context=context, check_hostname=False) - h.request('GET', '/nonexistent') - resp = h.getresponse() - resp.close() - h.close() - self.assertEqual(resp.status, 404) - # The context's check_hostname setting is used if one isn't passed to - # HTTPSConnection. + + # With context.check_hostname=False, the mismatching is ignored context.check_hostname = False h = client.HTTPSConnection('localhost', server.port, context=context) h.request('GET', '/nonexistent') resp = h.getresponse() - self.assertEqual(resp.status, 404) resp.close() h.close() - # Passing check_hostname to HTTPSConnection should override the - # context's setting. - with warnings_helper.check_warnings(('', DeprecationWarning)): - h = client.HTTPSConnection('localhost', server.port, - context=context, check_hostname=True) - with self.assertRaises(ssl.CertificateError): - h.request('GET', '/') + self.assertEqual(resp.status, 404) @unittest.skipIf(not hasattr(client, 'HTTPSConnection'), 'http.client.HTTPSConnection not available') @@ -2066,11 +2049,9 @@ def test_tls13_pha(self): self.assertIs(h._context, context) self.assertFalse(h._context.post_handshake_auth) - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', 'key_file, cert_file and check_hostname are deprecated', - DeprecationWarning) - h = client.HTTPSConnection('localhost', 443, context=context, - cert_file=CERT_localhost) + context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT, cert_file=CERT_localhost) + context.post_handshake_auth = True + h = client.HTTPSConnection('localhost', 443, context=context) self.assertTrue(h._context.post_handshake_auth) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index b554bc0c79960d..7626d9572e1e96 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -573,15 +573,6 @@ def test_ssl_verified(self): ssl_context=ssl_context) client.shutdown() - # Mock the private method _connect(), so mark the test as specific - # to CPython stdlib - @cpython_only - def test_certfile_arg_warn(self): - with warnings_helper.check_warnings(('', DeprecationWarning)): - with mock.patch.object(self.imap_class, 'open'): - with mock.patch.object(self.imap_class, '_connect'): - self.imap_class('localhost', 143, certfile=CERTFILE) - class ThreadedNetworkedTests(unittest.TestCase): server_class = socketserver.TCPServer imap_class = imaplib.IMAP4 @@ -1070,18 +1061,6 @@ def test_logout(self): rs = _server.logout() self.assertEqual(rs[0], 'BYE', rs) - def test_ssl_context_certfile_exclusive(self): - with socket_helper.transient_internet(self.host): - self.assertRaises( - ValueError, self.imap_class, self.host, self.port, - certfile=CERTFILE, ssl_context=self.create_ssl_context()) - - def test_ssl_context_keyfile_exclusive(self): - with socket_helper.transient_internet(self.host): - self.assertRaises( - ValueError, self.imap_class, self.host, self.port, - keyfile=CERTFILE, ssl_context=self.create_ssl_context()) - if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py index 35b6afa91ebd4f..80abc720c3251a 100644 --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -1,3 +1,4 @@ +import gc import importlib import importlib.util import os @@ -378,6 +379,69 @@ def test_find_and_load_checked_pyc(self): mod = imp.load_module('mymod', file, path, description) self.assertEqual(mod.x, 42) + def test_issue98354(self): + # _imp.create_builtin should raise TypeError + # if 'name' attribute of 'spec' argument is not a 'str' instance + + create_builtin = support.get_attribute(_imp, "create_builtin") + + class FakeSpec: + def __init__(self, name): + self.name = self + spec = FakeSpec("time") + with self.assertRaises(TypeError): + create_builtin(spec) + + class FakeSpec2: + name = [1, 2, 3, 4] + spec = FakeSpec2() + with self.assertRaises(TypeError): + create_builtin(spec) + + import builtins + class UnicodeSubclass(str): + pass + class GoodSpec: + name = UnicodeSubclass("builtins") + spec = GoodSpec() + bltin = create_builtin(spec) + self.assertEqual(bltin, builtins) + + class UnicodeSubclassFakeSpec(str): + def __init__(self, name): + self.name = self + spec = UnicodeSubclassFakeSpec("builtins") + bltin = create_builtin(spec) + self.assertEqual(bltin, builtins) + + @support.cpython_only + def test_create_builtin_subinterp(self): + # gh-99578: create_builtin() behavior changes after the creation of the + # first sub-interpreter. Test both code paths, before and after the + # creation of a sub-interpreter. Previously, create_builtin() had + # a reference leak after the creation of the first sub-interpreter. + + import builtins + create_builtin = support.get_attribute(_imp, "create_builtin") + class Spec: + name = "builtins" + spec = Spec() + + def check_get_builtins(): + refcnt = sys.getrefcount(builtins) + mod = _imp.create_builtin(spec) + self.assertIs(mod, builtins) + self.assertEqual(sys.getrefcount(builtins), refcnt + 1) + # Check that a GC collection doesn't crash + gc.collect() + + check_get_builtins() + + ret = support.run_in_subinterp("import builtins") + self.assertEqual(ret, 0) + + check_get_builtins() + class ReloadTests(unittest.TestCase): diff --git a/Lib/test/test_importlib/extension/test_case_sensitivity.py b/Lib/test/test_importlib/extension/test_case_sensitivity.py index 366e565cf4b7aa..0bb74fff5fcf96 100644 --- a/Lib/test/test_importlib/extension/test_case_sensitivity.py +++ b/Lib/test/test_importlib/extension/test_case_sensitivity.py @@ -8,7 +8,7 @@ machinery = util.import_importlib('importlib.machinery') -@unittest.skipIf(util.EXTENSIONS.filename is None, '_testcapi not available') +@unittest.skipIf(util.EXTENSIONS.filename is None, f'{util.EXTENSIONS.name} not available') @util.case_insensitive_tests class ExtensionModuleCaseSensitivityTest(util.CASEOKTestBase): diff --git a/Lib/test/test_importlib/extension/test_loader.py b/Lib/test/test_importlib/extension/test_loader.py index 8570c6bc90cd07..d69192b56bacb6 100644 --- a/Lib/test/test_importlib/extension/test_loader.py +++ b/Lib/test/test_importlib/extension/test_loader.py @@ -13,9 +13,9 @@ from test.support.script_helper import assert_python_failure -class LoaderTests(abc.LoaderTests): +class LoaderTests: - """Test load_module() for extension modules.""" + """Test ExtensionFileLoader.""" def setUp(self): if not self.machinery.EXTENSION_SUFFIXES: @@ -32,15 +32,6 @@ def load_module(self, fullname): warnings.simplefilter("ignore", DeprecationWarning) return self.loader.load_module(fullname) - def test_load_module_API(self): - # Test the default argument for load_module(). - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - self.loader.load_module() - self.loader.load_module(None) - with self.assertRaises(ImportError): - self.load_module('XXX') - def test_equality(self): other = self.machinery.ExtensionFileLoader(util.EXTENSIONS.name, util.EXTENSIONS.file_path) @@ -51,6 +42,15 @@ def test_inequality(self): util.EXTENSIONS.file_path) self.assertNotEqual(self.loader, other) + def test_load_module_API(self): + # Test the default argument for load_module(). + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + self.loader.load_module() + self.loader.load_module(None) + with self.assertRaises(ImportError): + self.load_module('XXX') + def test_module(self): with util.uncache(util.EXTENSIONS.name): module = self.load_module(util.EXTENSIONS.name) @@ -68,12 +68,6 @@ def test_module(self): # No extension module in a package available for testing. test_lacking_parent = None - def test_module_reuse(self): - with util.uncache(util.EXTENSIONS.name): - module1 = self.load_module(util.EXTENSIONS.name) - module2 = self.load_module(util.EXTENSIONS.name) - self.assertIs(module1, module2) - # No easy way to trigger a failure after a successful import. test_state_after_failure = None @@ -83,6 +77,12 @@ def test_unloadable(self): self.load_module(name) self.assertEqual(cm.exception.name, name) + def test_module_reuse(self): + with util.uncache(util.EXTENSIONS.name): + module1 = self.load_module(util.EXTENSIONS.name) + module2 = self.load_module(util.EXTENSIONS.name) + self.assertIs(module1, module2) + def test_is_package(self): self.assertFalse(self.loader.is_package(util.EXTENSIONS.name)) for suffix in self.machinery.EXTENSION_SUFFIXES: @@ -90,10 +90,93 @@ def test_is_package(self): loader = self.machinery.ExtensionFileLoader('pkg', path) self.assertTrue(loader.is_package('pkg')) + (Frozen_LoaderTests, Source_LoaderTests ) = util.test_both(LoaderTests, machinery=machinery) + +class SinglePhaseExtensionModuleTests(abc.LoaderTests): + # Test loading extension modules without multi-phase initialization. + + def setUp(self): + if not self.machinery.EXTENSION_SUFFIXES: + raise unittest.SkipTest("Requires dynamic loading support.") + self.name = '_testsinglephase' + if self.name in sys.builtin_module_names: + raise unittest.SkipTest( + f"{self.name} is a builtin module" + ) + finder = self.machinery.FileFinder(None) + self.spec = importlib.util.find_spec(self.name) + assert self.spec + self.loader = self.machinery.ExtensionFileLoader( + self.name, self.spec.origin) + + def load_module(self): + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + return self.loader.load_module(self.name) + + def load_module_by_name(self, fullname): + # Load a module from the test extension by name. + origin = self.spec.origin + loader = self.machinery.ExtensionFileLoader(fullname, origin) + spec = importlib.util.spec_from_loader(fullname, loader) + module = importlib.util.module_from_spec(spec) + loader.exec_module(module) + return module + + def test_module(self): + # Test loading an extension module. + with util.uncache(self.name): + module = self.load_module() + for attr, value in [('__name__', self.name), + ('__file__', self.spec.origin), + ('__package__', '')]: + self.assertEqual(getattr(module, attr), value) + with self.assertRaises(AttributeError): + module.__path__ + self.assertIs(module, sys.modules[self.name]) + self.assertIsInstance(module.__loader__, + self.machinery.ExtensionFileLoader) + + # No extension module as __init__ available for testing. + test_package = None + + # No extension module in a package available for testing. + test_lacking_parent = None + + # No easy way to trigger a failure after a successful import. + test_state_after_failure = None + + def test_unloadable(self): + name = 'asdfjkl;' + with self.assertRaises(ImportError) as cm: + self.load_module_by_name(name) + self.assertEqual(cm.exception.name, name) + + def test_unloadable_nonascii(self): + # Test behavior with nonexistent module with non-ASCII name. + name = 'fo\xf3' + with self.assertRaises(ImportError) as cm: + self.load_module_by_name(name) + self.assertEqual(cm.exception.name, name) + + # It may make sense to add the equivalent to + # the following MultiPhaseExtensionModuleTests tests: + # + # * test_nonmodule + # * test_nonmodule_with_methods + # * test_bad_modules + # * test_nonascii + + +(Frozen_SinglePhaseExtensionModuleTests, + Source_SinglePhaseExtensionModuleTests + ) = util.test_both(SinglePhaseExtensionModuleTests, machinery=machinery) + + class MultiPhaseExtensionModuleTests(abc.LoaderTests): # Test loading extension modules with multi-phase initialization (PEP 489). diff --git a/Lib/test/test_importlib/import_/test_helpers.py b/Lib/test/test_importlib/import_/test_helpers.py new file mode 100644 index 00000000000000..550f88d1d7a651 --- /dev/null +++ b/Lib/test/test_importlib/import_/test_helpers.py @@ -0,0 +1,184 @@ +"""Tests for helper functions used by import.c .""" + +from importlib import _bootstrap_external, machinery +import os.path +from types import ModuleType, SimpleNamespace +import unittest +import warnings + +from .. import util + + +class FixUpModuleTests: + + def test_no_loader_but_spec(self): + loader = object() + name = "hello" + path = "hello.py" + spec = machinery.ModuleSpec(name, loader) + ns = {"__spec__": spec} + _bootstrap_external._fix_up_module(ns, name, path) + + expected = {"__spec__": spec, "__loader__": loader, "__file__": path, + "__cached__": None} + self.assertEqual(ns, expected) + + def test_no_loader_no_spec_but_sourceless(self): + name = "hello" + path = "hello.py" + ns = {} + _bootstrap_external._fix_up_module(ns, name, path, path) + + expected = {"__file__": path, "__cached__": path} + + for key, val in expected.items(): + with self.subTest(f"{key}: {val}"): + self.assertEqual(ns[key], val) + + spec = ns["__spec__"] + self.assertIsInstance(spec, machinery.ModuleSpec) + self.assertEqual(spec.name, name) + self.assertEqual(spec.origin, os.path.abspath(path)) + self.assertEqual(spec.cached, os.path.abspath(path)) + self.assertIsInstance(spec.loader, machinery.SourcelessFileLoader) + self.assertEqual(spec.loader.name, name) + self.assertEqual(spec.loader.path, path) + self.assertEqual(spec.loader, ns["__loader__"]) + + def test_no_loader_no_spec_but_source(self): + name = "hello" + path = "hello.py" + ns = {} + _bootstrap_external._fix_up_module(ns, name, path) + + expected = {"__file__": path, "__cached__": None} + + for key, val in expected.items(): + with self.subTest(f"{key}: {val}"): + self.assertEqual(ns[key], val) + + spec = ns["__spec__"] + self.assertIsInstance(spec, machinery.ModuleSpec) + self.assertEqual(spec.name, name) + self.assertEqual(spec.origin, os.path.abspath(path)) + self.assertIsInstance(spec.loader, machinery.SourceFileLoader) + self.assertEqual(spec.loader.name, name) + self.assertEqual(spec.loader.path, path) + self.assertEqual(spec.loader, ns["__loader__"]) + + +FrozenFixUpModuleTests, SourceFixUpModuleTests = util.test_both(FixUpModuleTests) + + +class TestBlessMyLoader(unittest.TestCase): + # GH#86298 is part of the migration away from module attributes and toward + # __spec__ attributes. There are several cases to test here. This will + # have to change in Python 3.14 when we actually remove/ignore __loader__ + # in favor of requiring __spec__.loader. + + def test_gh86298_no_loader_and_no_spec(self): + bar = ModuleType('bar') + del bar.__loader__ + del bar.__spec__ + # 2022-10-06(warsaw): For backward compatibility with the + # implementation in _warnings.c, this can't raise an + # AttributeError. See _bless_my_loader() in _bootstrap_external.py + # If working with a module: + ## self.assertRaises( + ## AttributeError, _bootstrap_external._bless_my_loader, + ## bar.__dict__) + self.assertIsNone(_bootstrap_external._bless_my_loader(bar.__dict__)) + + def test_gh86298_loader_is_none_and_no_spec(self): + bar = ModuleType('bar') + bar.__loader__ = None + del bar.__spec__ + # 2022-10-06(warsaw): For backward compatibility with the + # implementation in _warnings.c, this can't raise an + # AttributeError. See _bless_my_loader() in _bootstrap_external.py + # If working with a module: + ## self.assertRaises( + ## AttributeError, _bootstrap_external._bless_my_loader, + ## bar.__dict__) + self.assertIsNone(_bootstrap_external._bless_my_loader(bar.__dict__)) + + def test_gh86298_no_loader_and_spec_is_none(self): + bar = ModuleType('bar') + del bar.__loader__ + bar.__spec__ = None + self.assertRaises( + ValueError, + _bootstrap_external._bless_my_loader, bar.__dict__) + + def test_gh86298_loader_is_none_and_spec_is_none(self): + bar = ModuleType('bar') + bar.__loader__ = None + bar.__spec__ = None + self.assertRaises( + ValueError, + _bootstrap_external._bless_my_loader, bar.__dict__) + + def test_gh86298_loader_is_none_and_spec_loader_is_none(self): + bar = ModuleType('bar') + bar.__loader__ = None + bar.__spec__ = SimpleNamespace(loader=None) + self.assertRaises( + ValueError, + _bootstrap_external._bless_my_loader, bar.__dict__) + + def test_gh86298_no_spec(self): + bar = ModuleType('bar') + bar.__loader__ = object() + del bar.__spec__ + with warnings.catch_warnings(): + self.assertWarns( + DeprecationWarning, + _bootstrap_external._bless_my_loader, bar.__dict__) + + def test_gh86298_spec_is_none(self): + bar = ModuleType('bar') + bar.__loader__ = object() + bar.__spec__ = None + with warnings.catch_warnings(): + self.assertWarns( + DeprecationWarning, + _bootstrap_external._bless_my_loader, bar.__dict__) + + def test_gh86298_no_spec_loader(self): + bar = ModuleType('bar') + bar.__loader__ = object() + bar.__spec__ = SimpleNamespace() + with warnings.catch_warnings(): + self.assertWarns( + DeprecationWarning, + _bootstrap_external._bless_my_loader, bar.__dict__) + + def test_gh86298_loader_and_spec_loader_disagree(self): + bar = ModuleType('bar') + bar.__loader__ = object() + bar.__spec__ = SimpleNamespace(loader=object()) + with warnings.catch_warnings(): + self.assertWarns( + DeprecationWarning, + _bootstrap_external._bless_my_loader, bar.__dict__) + + def test_gh86298_no_loader_and_no_spec_loader(self): + bar = ModuleType('bar') + del bar.__loader__ + bar.__spec__ = SimpleNamespace() + self.assertRaises( + AttributeError, + _bootstrap_external._bless_my_loader, bar.__dict__) + + def test_gh86298_no_loader_with_spec_loader_okay(self): + bar = ModuleType('bar') + del bar.__loader__ + loader = object() + bar.__spec__ = SimpleNamespace(loader=loader) + self.assertEqual( + _bootstrap_external._bless_my_loader(bar.__dict__), + loader) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_importlib/resources/test_reader.py b/Lib/test/test_importlib/resources/test_reader.py index 9d20c976b82505..4fd9e6bbe4281c 100644 --- a/Lib/test/test_importlib/resources/test_reader.py +++ b/Lib/test/test_importlib/resources/test_reader.py @@ -75,6 +75,11 @@ def test_join_path(self): str(path.joinpath('imaginary'))[len(prefix) + 1 :], os.path.join('namespacedata01', 'imaginary'), ) + self.assertEqual(path.joinpath(), path) + + def test_join_path_compound(self): + path = MultiplexedPath(self.folder) + assert not path.joinpath('imaginary/foo.py').exists() def test_repr(self): self.assertEqual( diff --git a/Lib/test/test_importlib/resources/test_resource.py b/Lib/test/test_importlib/resources/test_resource.py index 1d6df0cc843173..f7e3abbdc805a7 100644 --- a/Lib/test/test_importlib/resources/test_resource.py +++ b/Lib/test/test_importlib/resources/test_resource.py @@ -111,6 +111,14 @@ def test_submodule_contents_by_name(self): {'__init__.py', 'binary.file'}, ) + def test_as_file_directory(self): + with resources.as_file(resources.files('ziptestdata')) as data: + assert data.name == 'ziptestdata' + assert data.is_dir() + assert data.joinpath('subdirectory').is_dir() + assert len(list(data.iterdir())) + assert not data.parent.exists() + class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): ZIP_MODULE = zipdata02 # type: ignore diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py index 8641b6cc683052..3c9149c4e45a92 100644 --- a/Lib/test/test_importlib/test_abc.py +++ b/Lib/test/test_importlib/test_abc.py @@ -771,13 +771,7 @@ def verify_code(self, code_object): class SourceOnlyLoaderTests(SourceLoaderTestHarness): - - """Test importlib.abc.SourceLoader for source-only loading. - - Reload testing is subsumed by the tests for - importlib.util.module_for_loader. - - """ + """Test importlib.abc.SourceLoader for source-only loading.""" def test_get_source(self): # Verify the source code is returned as a string. diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/test_main.py index d9d067c4b23d66..30b68b6ae7d86e 100644 --- a/Lib/test/test_importlib/test_main.py +++ b/Lib/test/test_importlib/test_main.py @@ -1,8 +1,6 @@ import re -import json import pickle import unittest -import warnings import importlib.metadata try: @@ -260,14 +258,6 @@ def test_hashable(self): """EntryPoints should be hashable""" hash(self.ep) - def test_json_dump(self): - """ - json should not expect to be able to dump an EntryPoint - """ - with self.assertRaises(Exception): - with warnings.catch_warnings(record=True): - json.dumps(self.ep) - def test_module(self): assert self.ep.module == 'value' diff --git a/Lib/test/test_importlib/test_metadata_api.py b/Lib/test/test_importlib/test_metadata_api.py index 69c78e9820c044..71c47e62d27124 100644 --- a/Lib/test/test_importlib/test_metadata_api.py +++ b/Lib/test/test_importlib/test_metadata_api.py @@ -124,62 +124,6 @@ def test_entry_points_missing_name(self): def test_entry_points_missing_group(self): assert entry_points(group='missing') == () - def test_entry_points_dict_construction(self): - """ - Prior versions of entry_points() returned simple lists and - allowed casting those lists into maps by name using ``dict()``. - Capture this now deprecated use-case. - """ - with suppress_known_deprecation() as caught: - eps = dict(entry_points(group='entries')) - - assert 'main' in eps - assert eps['main'] == entry_points(group='entries')['main'] - - # check warning - expected = next(iter(caught)) - assert expected.category is DeprecationWarning - assert "Construction of dict of EntryPoints is deprecated" in str(expected) - - def test_entry_points_by_index(self): - """ - Prior versions of Distribution.entry_points would return a - tuple that allowed access by index. - Capture this now deprecated use-case - See python/importlib_metadata#300 and bpo-44246. - """ - eps = distribution('distinfo-pkg').entry_points - with suppress_known_deprecation() as caught: - eps[0] - - # check warning - expected = next(iter(caught)) - assert expected.category is DeprecationWarning - assert "Accessing entry points by index is deprecated" in str(expected) - - def test_entry_points_groups_getitem(self): - """ - Prior versions of entry_points() returned a dict. Ensure - that callers using '.__getitem__()' are supported but warned to - migrate. - """ - with suppress_known_deprecation(): - entry_points()['entries'] == entry_points(group='entries') - - with self.assertRaises(KeyError): - entry_points()['missing'] - - def test_entry_points_groups_get(self): - """ - Prior versions of entry_points() returned a dict. Ensure - that callers using '.get()' are supported but warned to - migrate. - """ - with suppress_known_deprecation(): - entry_points().get('missing', 'default') == 'default' - entry_points().get('entries', 'default') == entry_points()['entries'] - entry_points().get('missing', ()) == () - def test_entry_points_allows_no_attributes(self): ep = entry_points().select(group='entries', name='main') with self.assertRaises(AttributeError): diff --git a/Lib/test/test_importlib/test_namespace_pkgs.py b/Lib/test/test_importlib/test_namespace_pkgs.py index f451f7547b35bb..65428c3d3ead9b 100644 --- a/Lib/test/test_importlib/test_namespace_pkgs.py +++ b/Lib/test/test_importlib/test_namespace_pkgs.py @@ -79,6 +79,10 @@ def test_cant_import_other(self): with self.assertRaises(ImportError): import foo.two + def test_simple_repr(self): + import foo.one + assert repr(foo).startswith(" C -> B -> D -> A # NOTE: `time` is already loaded and therefore doesn't threaten to deadlock. diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py index e70971e9d3bc84..08a615ecf5288b 100644 --- a/Lib/test/test_importlib/test_util.py +++ b/Lib/test/test_importlib/test_util.py @@ -121,184 +121,6 @@ def test___cached__(self): util=importlib_util) -class ModuleForLoaderTests: - - """Tests for importlib.util.module_for_loader.""" - - @classmethod - def module_for_loader(cls, func): - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - return cls.util.module_for_loader(func) - - def test_warning(self): - # Should raise a PendingDeprecationWarning when used. - with warnings.catch_warnings(): - warnings.simplefilter('error', DeprecationWarning) - with self.assertRaises(DeprecationWarning): - func = self.util.module_for_loader(lambda x: x) - - def return_module(self, name): - fxn = self.module_for_loader(lambda self, module: module) - return fxn(self, name) - - def raise_exception(self, name): - def to_wrap(self, module): - raise ImportError - fxn = self.module_for_loader(to_wrap) - try: - fxn(self, name) - except ImportError: - pass - - def test_new_module(self): - # Test that when no module exists in sys.modules a new module is - # created. - module_name = 'a.b.c' - with util.uncache(module_name): - module = self.return_module(module_name) - self.assertIn(module_name, sys.modules) - self.assertIsInstance(module, types.ModuleType) - self.assertEqual(module.__name__, module_name) - - def test_reload(self): - # Test that a module is reused if already in sys.modules. - class FakeLoader: - def is_package(self, name): - return True - @self.module_for_loader - def load_module(self, module): - return module - name = 'a.b.c' - module = types.ModuleType('a.b.c') - module.__loader__ = 42 - module.__package__ = 42 - with util.uncache(name): - sys.modules[name] = module - loader = FakeLoader() - returned_module = loader.load_module(name) - self.assertIs(returned_module, sys.modules[name]) - self.assertEqual(module.__loader__, loader) - self.assertEqual(module.__package__, name) - - def test_new_module_failure(self): - # Test that a module is removed from sys.modules if added but an - # exception is raised. - name = 'a.b.c' - with util.uncache(name): - self.raise_exception(name) - self.assertNotIn(name, sys.modules) - - def test_reload_failure(self): - # Test that a failure on reload leaves the module in-place. - name = 'a.b.c' - module = types.ModuleType(name) - with util.uncache(name): - sys.modules[name] = module - self.raise_exception(name) - self.assertIs(module, sys.modules[name]) - - def test_decorator_attrs(self): - def fxn(self, module): pass - wrapped = self.module_for_loader(fxn) - self.assertEqual(wrapped.__name__, fxn.__name__) - self.assertEqual(wrapped.__qualname__, fxn.__qualname__) - - def test_false_module(self): - # If for some odd reason a module is considered false, still return it - # from sys.modules. - class FalseModule(types.ModuleType): - def __bool__(self): return False - - name = 'mod' - module = FalseModule(name) - with util.uncache(name): - self.assertFalse(module) - sys.modules[name] = module - given = self.return_module(name) - self.assertIs(given, module) - - def test_attributes_set(self): - # __name__, __loader__, and __package__ should be set (when - # is_package() is defined; undefined implicitly tested elsewhere). - class FakeLoader: - def __init__(self, is_package): - self._pkg = is_package - def is_package(self, name): - return self._pkg - @self.module_for_loader - def load_module(self, module): - return module - - name = 'pkg.mod' - with util.uncache(name): - loader = FakeLoader(False) - module = loader.load_module(name) - self.assertEqual(module.__name__, name) - self.assertIs(module.__loader__, loader) - self.assertEqual(module.__package__, 'pkg') - - name = 'pkg.sub' - with util.uncache(name): - loader = FakeLoader(True) - module = loader.load_module(name) - self.assertEqual(module.__name__, name) - self.assertIs(module.__loader__, loader) - self.assertEqual(module.__package__, name) - - -(Frozen_ModuleForLoaderTests, - Source_ModuleForLoaderTests - ) = util.test_both(ModuleForLoaderTests, util=importlib_util) - - -class SetLoaderTests: - - """Tests importlib.util.set_loader().""" - - @property - def DummyLoader(self): - # Set DummyLoader on the class lazily. - class DummyLoader: - @self.util.set_loader - def load_module(self, module): - return self.module - self.__class__.DummyLoader = DummyLoader - return DummyLoader - - def test_no_attribute(self): - loader = self.DummyLoader() - loader.module = types.ModuleType('blah') - try: - del loader.module.__loader__ - except AttributeError: - pass - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - self.assertEqual(loader, loader.load_module('blah').__loader__) - - def test_attribute_is_None(self): - loader = self.DummyLoader() - loader.module = types.ModuleType('blah') - loader.module.__loader__ = None - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - self.assertEqual(loader, loader.load_module('blah').__loader__) - - def test_not_reset(self): - loader = self.DummyLoader() - loader.module = types.ModuleType('blah') - loader.module.__loader__ = 42 - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - self.assertEqual(42, loader.load_module('blah').__loader__) - - -(Frozen_SetLoaderTests, - Source_SetLoaderTests - ) = util.test_both(SetLoaderTests, util=importlib_util) - - class ResolveNameTests: """Tests importlib.util.resolve_name().""" diff --git a/Lib/test/test_importlib/util.py b/Lib/test/test_importlib/util.py index c07ac2a64c289f..9032fd18d3f95b 100644 --- a/Lib/test/test_importlib/util.py +++ b/Lib/test/test_importlib/util.py @@ -27,7 +27,7 @@ EXTENSIONS.ext = None EXTENSIONS.filename = None EXTENSIONS.file_path = None -EXTENSIONS.name = '_testcapi' +EXTENSIONS.name = '_testsinglephase' def _extension_details(): global EXTENSIONS @@ -298,7 +298,7 @@ def writes_bytecode_files(fxn): """Decorator to protect sys.dont_write_bytecode from mutation and to skip tests that require it to be set to False.""" if sys.dont_write_bytecode: - return lambda *args, **kwargs: None + return unittest.skip("relies on writing bytecode")(fxn) @functools.wraps(fxn) def wrapper(*args, **kwargs): original = sys.dont_write_bytecode diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index be9f29e04ae110..3f5c299ce681c5 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -1421,6 +1421,13 @@ def wrapper(a, b): self.assertEqual(inspect.get_annotations(isa.MyClassWithLocalAnnotations, eval_str=True), {'x': int}) +class TestFormatAnnotation(unittest.TestCase): + def test_typing_replacement(self): + from test.typinganndata.ann_module9 import ann, ann1 + self.assertEqual(inspect.formatannotation(ann), 'Union[List[str], int]') + self.assertEqual(inspect.formatannotation(ann1), 'Union[List[testModule.typing.A], int]') + + class TestIsDataDescriptor(unittest.TestCase): def test_custom_descriptors(self): @@ -2953,8 +2960,6 @@ def foo(a): pass self.assertEqual(str(inspect.signature(foo)), '(a)') def test_signature_on_decorated(self): - import functools - def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs) -> int: @@ -2966,6 +2971,8 @@ class Foo: def bar(self, a, b): pass + bar = decorator(Foo().bar) + self.assertEqual(self.signature(Foo.bar), ((('self', ..., ..., "positional_or_keyword"), ('a', ..., ..., "positional_or_keyword"), @@ -2984,6 +2991,11 @@ def bar(self, a, b): # from "func" to "wrapper", hence no # return_annotation + self.assertEqual(self.signature(bar), + ((('a', ..., ..., "positional_or_keyword"), + ('b', ..., ..., "positional_or_keyword")), + ...)) + # Test that we handle method wrappers correctly def decorator(func): @functools.wraps(func) @@ -3891,7 +3903,8 @@ def test(foo, *, bar): self.call(test, 1, bar=2, spam='ham') with self.assertRaisesRegex(TypeError, - "missing a required argument: 'bar'"): + "missing a required keyword-only " + "argument: 'bar'"): self.call(test, 1) def test(foo, *, bar, **bin): @@ -4358,8 +4371,11 @@ def test_details(self): 'unittest', '--details') output = out.decode() # Just a quick sanity check on the output + self.assertIn(module.__spec__.name, output) self.assertIn(module.__name__, output) + self.assertIn(module.__spec__.origin, output) self.assertIn(module.__file__, output) + self.assertIn(module.__spec__.cached, output) self.assertIn(module.__cached__, output) self.assertEqual(err, b'') diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py index 625c388cd947b9..334fea0774be51 100644 --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -2,10 +2,16 @@ import time import unittest +from unittest import mock from test import support from test.test_grammar import (VALID_UNDERSCORE_LITERALS, INVALID_UNDERSCORE_LITERALS) +try: + import _pylong +except ImportError: + _pylong = None + L = [ ('0', 0), ('1', 1), @@ -795,5 +801,85 @@ class IntSubclassStrDigitLimitsTests(IntStrDigitLimitsTests): int_class = IntSubclass +class PyLongModuleTests(unittest.TestCase): + # Tests of the functions in _pylong.py. Those get used when the + # number of digits in the input values are large enough. + + def setUp(self): + super().setUp() + self._previous_limit = sys.get_int_max_str_digits() + sys.set_int_max_str_digits(0) + + def tearDown(self): + sys.set_int_max_str_digits(self._previous_limit) + super().tearDown() + + def test_pylong_int_to_decimal(self): + n = (1 << 100_000) - 1 + suffix = '9883109375' + s = str(n) + assert s[-10:] == suffix + s = str(-n) + assert s[-10:] == suffix + s = '%d' % n + assert s[-10:] == suffix + s = b'%d' % n + assert s[-10:] == suffix.encode('ascii') + + def test_pylong_int_divmod(self): + n = (1 << 100_000) + a, b = divmod(n*3 + 1, n) + assert a == 3 and b == 1 + + def test_pylong_str_to_int(self): + v1 = 1 << 100_000 + s = str(v1) + v2 = int(s) + assert v1 == v2 + v3 = int(' -' + s) + assert -v1 == v3 + v4 = int(' +' + s + ' ') + assert v1 == v4 + with self.assertRaises(ValueError) as err: + int(s + 'z') + with self.assertRaises(ValueError) as err: + int(s + '_') + with self.assertRaises(ValueError) as err: + int('_' + s) + + @support.cpython_only # tests implementation details of CPython. + @unittest.skipUnless(_pylong, "_pylong module required") + @mock.patch.object(_pylong, "int_to_decimal_string") + def test_pylong_misbehavior_error_path_to_str( + self, mock_int_to_str): + with support.adjust_int_max_str_digits(20_000): + big_value = int('7'*19_999) + mock_int_to_str.return_value = None # not a str + with self.assertRaises(TypeError) as ctx: + str(big_value) + self.assertIn('_pylong.int_to_decimal_string did not', + str(ctx.exception)) + mock_int_to_str.side_effect = RuntimeError("testABC") + with self.assertRaises(RuntimeError): + str(big_value) + + @support.cpython_only # tests implementation details of CPython. + @unittest.skipUnless(_pylong, "_pylong module required") + @mock.patch.object(_pylong, "int_from_string") + def test_pylong_misbehavior_error_path_from_str( + self, mock_int_from_str): + big_value = '7'*19_999 + with support.adjust_int_max_str_digits(20_000): + mock_int_from_str.return_value = b'not an int' + with self.assertRaises(TypeError) as ctx: + int(big_value) + self.assertIn('_pylong.int_from_string did not', + str(ctx.exception)) + + mock_int_from_str.side_effect = RuntimeError("test123") + with self.assertRaises(RuntimeError): + int(big_value) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 24c93b969ea2b7..c927f15aafef72 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -888,6 +888,14 @@ def badopener(fname, flags): open('non-existent', 'r', opener=badopener) self.assertEqual(str(cm.exception), 'opener returned -2') + def test_opener_invalid_fd(self): + # Check that OSError is raised with error code EBADF if the + # opener returns an invalid file descriptor (see gh-82212). + fd = os_helper.make_bad_fd() + with self.assertRaises(OSError) as cm: + self.open('foo', opener=lambda name, flags: fd) + self.assertEqual(cm.exception.errno, errno.EBADF) + def test_fileio_closefd(self): # Issue #4841 with self.open(__file__, 'rb') as f1, \ @@ -1531,11 +1539,25 @@ def test_no_extraneous_read(self): def test_read_on_closed(self): # Issue #23796 - b = io.BufferedReader(io.BytesIO(b"12")) + b = self.BufferedReader(self.BytesIO(b"12")) b.read(1) b.close() - self.assertRaises(ValueError, b.peek) - self.assertRaises(ValueError, b.read1, 1) + with self.subTest('peek'): + self.assertRaises(ValueError, b.peek) + with self.subTest('read1'): + self.assertRaises(ValueError, b.read1, 1) + with self.subTest('read'): + self.assertRaises(ValueError, b.read) + with self.subTest('readinto'): + self.assertRaises(ValueError, b.readinto, bytearray()) + with self.subTest('readinto1'): + self.assertRaises(ValueError, b.readinto1, bytearray()) + with self.subTest('flush'): + self.assertRaises(ValueError, b.flush) + with self.subTest('truncate'): + self.assertRaises(ValueError, b.truncate) + with self.subTest('seek'): + self.assertRaises(ValueError, b.seek, 0) def test_truncate_on_read_only(self): rawio = self.MockFileIO(b"abc") @@ -1593,10 +1615,10 @@ def test_garbage_collection(self): def test_args_error(self): # Issue #17275 with self.assertRaisesRegex(TypeError, "BufferedReader"): - self.tp(io.BytesIO(), 1024, 1024, 1024) + self.tp(self.BytesIO(), 1024, 1024, 1024) def test_bad_readinto_value(self): - rawio = io.BufferedReader(io.BytesIO(b"12")) + rawio = self.tp(self.BytesIO(b"12")) rawio.readinto = lambda buf: -1 bufio = self.tp(rawio) with self.assertRaises(OSError) as cm: @@ -1604,7 +1626,7 @@ def test_bad_readinto_value(self): self.assertIsNone(cm.exception.__cause__) def test_bad_readinto_type(self): - rawio = io.BufferedReader(io.BytesIO(b"12")) + rawio = self.tp(self.BytesIO(b"12")) rawio.readinto = lambda buf: b'' bufio = self.tp(rawio) with self.assertRaises(OSError) as cm: @@ -1747,7 +1769,7 @@ def test_write_non_blocking(self): self.assertTrue(s.startswith(b"01234567A"), s) def test_write_and_rewind(self): - raw = io.BytesIO() + raw = self.BytesIO() bufio = self.tp(raw, 4) self.assertEqual(bufio.write(b"abcdef"), 6) self.assertEqual(bufio.tell(), 6) @@ -1957,7 +1979,7 @@ def test_garbage_collection(self): def test_args_error(self): # Issue #17275 with self.assertRaisesRegex(TypeError, "BufferedWriter"): - self.tp(io.BytesIO(), 1024, 1024, 1024) + self.tp(self.BytesIO(), 1024, 1024, 1024) class PyBufferedWriterTest(BufferedWriterTest): @@ -2433,7 +2455,7 @@ def test_garbage_collection(self): def test_args_error(self): # Issue #17275 with self.assertRaisesRegex(TypeError, "BufferedRandom"): - self.tp(io.BytesIO(), 1024, 1024, 1024) + self.tp(self.BytesIO(), 1024, 1024, 1024) class PyBufferedRandomTest(BufferedRandomTest): @@ -3465,7 +3487,7 @@ def test_illegal_encoder(self): # encode() is invalid shouldn't cause an assertion failure. rot13 = codecs.lookup("rot13") with support.swap_attr(rot13, '_is_text_encoding', True): - t = io.TextIOWrapper(io.BytesIO(b'foo'), encoding="rot13") + t = self.TextIOWrapper(self.BytesIO(b'foo'), encoding="rot13") self.assertRaises(TypeError, t.write, 'bar') def test_illegal_decoder(self): @@ -3571,7 +3593,7 @@ def seekable(self): return True t = self.TextIOWrapper(F(), encoding='utf-8') def test_reconfigure_locale(self): - wrapper = io.TextIOWrapper(io.BytesIO(b"test")) + wrapper = self.TextIOWrapper(self.BytesIO(b"test")) wrapper.reconfigure(encoding="locale") def test_reconfigure_encoding_read(self): @@ -3741,7 +3763,7 @@ def test_garbage_collection(self): # all data to disk. # The Python version has __del__, so it ends in gc.garbage instead. with warnings_helper.check_warnings(('', ResourceWarning)): - rawio = io.FileIO(os_helper.TESTFN, "wb") + rawio = self.FileIO(os_helper.TESTFN, "wb") b = self.BufferedWriter(rawio) t = self.TextIOWrapper(b, encoding="ascii") t.write("456def") diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index f469bfe185e65b..a0a740fba8e8e3 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -159,6 +159,44 @@ def test_accumulate(self): with self.assertRaises(TypeError): list(accumulate([10, 20], 100)) + def test_batched(self): + self.assertEqual(list(batched('ABCDEFG', 3)), + [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]) + self.assertEqual(list(batched('ABCDEFG', 2)), + [['A', 'B'], ['C', 'D'], ['E', 'F'], ['G']]) + self.assertEqual(list(batched('ABCDEFG', 1)), + [['A'], ['B'], ['C'], ['D'], ['E'], ['F'], ['G']]) + + with self.assertRaises(TypeError): # Too few arguments + list(batched('ABCDEFG')) + with self.assertRaises(TypeError): + list(batched('ABCDEFG', 3, None)) # Too many arguments + with self.assertRaises(TypeError): + list(batched(None, 3)) # Non-iterable input + with self.assertRaises(TypeError): + list(batched('ABCDEFG', 'hello')) # n is a string + with self.assertRaises(ValueError): + list(batched('ABCDEFG', 0)) # n is zero + with self.assertRaises(ValueError): + list(batched('ABCDEFG', -1)) # n is negative + + data = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + for n in range(1, 6): + for i in range(len(data)): + s = data[:i] + batches = list(batched(s, n)) + with self.subTest(s=s, n=n, batches=batches): + # Order is preserved and no data is lost + self.assertEqual(''.join(chain(*batches)), s) + # Each batch is an exact list + self.assertTrue(all(type(batch) is list for batch in batches)) + # All but the last batch is of size n + if batches: + last_batch = batches.pop() + self.assertTrue(all(len(batch) == n for batch in batches)) + self.assertTrue(len(last_batch) <= n) + batches.append(last_batch) + def test_chain(self): def chain2(*iterables): @@ -1737,6 +1775,31 @@ def test_takewhile(self): class TestPurePythonRoughEquivalents(unittest.TestCase): + def test_batched_recipe(self): + def batched_recipe(iterable, n): + "Batch data into lists of length n. The last batch may be shorter." + # batched('ABCDEFG', 3) --> ABC DEF G + if n < 1: + raise ValueError('n must be at least one') + it = iter(iterable) + while (batch := list(islice(it, n))): + yield batch + + for iterable, n in product( + ['', 'a', 'ab', 'abc', 'abcd', 'abcde', 'abcdef', 'abcdefg', None], + [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, None]): + with self.subTest(iterable=iterable, n=n): + try: + e1, r1 = None, list(batched(iterable, n)) + except Exception as e: + e1, r1 = type(e), None + try: + e2, r2 = None, list(batched_recipe(iterable, n)) + except Exception as e: + e2, r2 = type(e), None + self.assertEqual(r1, r2) + self.assertEqual(e1, e2) + @staticmethod def islice(iterable, *args): s = slice(*args) @@ -1788,6 +1851,10 @@ def test_accumulate(self): a = [] self.makecycle(accumulate([1,2,a,3]), a) + def test_batched(self): + a = [] + self.makecycle(batched([1,2,a,3], 2), a) + def test_chain(self): a = [] self.makecycle(chain(a), a) @@ -1945,6 +2012,20 @@ def __iter__(self): def __next__(self): 3 // 0 +class E2: + 'Test propagation of exceptions after two iterations' + def __init__(self, seqn): + self.seqn = seqn + self.i = 0 + def __iter__(self): + return self + def __next__(self): + if self.i == 2: + raise ZeroDivisionError + v = self.seqn[self.i] + self.i += 1 + return v + class S: 'Test immediate stop' def __init__(self, seqn): @@ -1972,6 +2053,19 @@ def test_accumulate(self): self.assertRaises(TypeError, accumulate, N(s)) self.assertRaises(ZeroDivisionError, list, accumulate(E(s))) + def test_batched(self): + s = 'abcde' + r = [['a', 'b'], ['c', 'd'], ['e']] + n = 2 + for g in (G, I, Ig, L, R): + with self.subTest(g=g): + self.assertEqual(list(batched(g(s), n)), r) + self.assertEqual(list(batched(S(s), 2)), []) + self.assertRaises(TypeError, batched, X(s), 2) + self.assertRaises(TypeError, batched, N(s), 2) + self.assertRaises(ZeroDivisionError, list, batched(E(s), 2)) + self.assertRaises(ZeroDivisionError, list, batched(E2(s), 4)) + def test_chain(self): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): diff --git a/Lib/test/test_launcher.py b/Lib/test/test_launcher.py index be0cd90c7906f5..47152d4a3c00e6 100644 --- a/Lib/test/test_launcher.py +++ b/Lib/test/test_launcher.py @@ -173,7 +173,7 @@ def find_py(cls): errors="ignore", ) as p: p.stdin.close() - version = next(p.stdout).splitlines()[0].rpartition(" ")[2] + version = next(p.stdout, "\n").splitlines()[0].rpartition(" ")[2] p.stdout.read() p.wait(10) if not sys.version.startswith(version): @@ -369,6 +369,13 @@ def test_filter_to_company(self): self.assertEqual(company, data["env.company"]) self.assertEqual("3.100", data["env.tag"]) + def test_filter_to_company_with_default(self): + company = "PythonTestSuite" + data = self.run_py([f"-V:{company}/"], env=dict(PY_PYTHON="3.0")) + self.assertEqual("X.Y.exe", data["LaunchCommand"]) + self.assertEqual(company, data["env.company"]) + self.assertEqual("3.100", data["env.tag"]) + def test_filter_to_tag(self): company = "PythonTestSuite" data = self.run_py([f"-V:3.100"]) @@ -460,6 +467,15 @@ def test_py3_default_env(self): self.assertEqual("3.100-arm64", data["SearchInfo.tag"]) self.assertEqual("X.Y-arm64.exe -X fake_arg_for_test -arg", data["stdout"].strip()) + def test_py_default_short_argv0(self): + with self.py_ini(TEST_PY_COMMANDS): + for argv0 in ['"py.exe"', 'py.exe', '"py"', 'py']: + with self.subTest(argv0): + data = self.run_py(["--version"], argv=f'{argv0} --version') + self.assertEqual("PythonTestSuite", data["SearchInfo.company"]) + self.assertEqual("3.100", data["SearchInfo.tag"]) + self.assertEqual(f'X.Y.exe --version', data["stdout"].strip()) + def test_py_default_in_list(self): data = self.run_py(["-0"], env=TEST_PY_ENV) default = None @@ -509,6 +525,14 @@ def test_py_shebang(self): self.assertEqual("3.100", data["SearchInfo.tag"]) self.assertEqual(f"X.Y.exe -prearg {script} -postarg", data["stdout"].strip()) + def test_python_shebang(self): + with self.py_ini(TEST_PY_COMMANDS): + with self.script("#! python -prearg") as script: + data = self.run_py([script, "-postarg"]) + self.assertEqual("PythonTestSuite", data["SearchInfo.company"]) + self.assertEqual("3.100", data["SearchInfo.tag"]) + self.assertEqual(f"X.Y.exe -prearg {script} -postarg", data["stdout"].strip()) + def test_py2_shebang(self): with self.py_ini(TEST_PY_COMMANDS): with self.script("#! /usr/bin/python2 -prearg") as script: @@ -610,3 +634,42 @@ def test_install(self): self.assertIn("winget.exe", cmd) # Both command lines include the store ID self.assertIn("9PJPW5LDXLZ5", cmd) + + def test_literal_shebang_absolute(self): + with self.script(f"#! C:/some_random_app -witharg") as script: + data = self.run_py([script]) + self.assertEqual( + f"C:\\some_random_app -witharg {script}", + data["stdout"].strip(), + ) + + def test_literal_shebang_relative(self): + with self.script(f"#! ..\\some_random_app -witharg") as script: + data = self.run_py([script]) + self.assertEqual( + f"{script.parent.parent}\\some_random_app -witharg {script}", + data["stdout"].strip(), + ) + + def test_literal_shebang_quoted(self): + with self.script(f'#! "some random app" -witharg') as script: + data = self.run_py([script]) + self.assertEqual( + f'"{script.parent}\\some random app" -witharg {script}', + data["stdout"].strip(), + ) + + with self.script(f'#! some" random "app -witharg') as script: + data = self.run_py([script]) + self.assertEqual( + f'"{script.parent}\\some random app" -witharg {script}', + data["stdout"].strip(), + ) + + def test_literal_shebang_quoted_escape(self): + with self.script(f'#! some\\" random "app -witharg') as script: + data = self.run_py([script]) + self.assertEqual( + f'"{script.parent}\\some\\ random app" -witharg {script}', + data["stdout"].strip(), + ) diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py index 9aa6dd1095668b..2969c6e2f98a23 100644 --- a/Lib/test/test_list.py +++ b/Lib/test/test_list.py @@ -103,7 +103,7 @@ def test_list_resize_overflow(self): del lst[1:] self.assertEqual(len(lst), 1) - size = ((2 ** (tuple.__itemsize__ * 8) - 1) // 2) + size = sys.maxsize with self.assertRaises((MemoryError, OverflowError)): lst * size with self.assertRaises((MemoryError, OverflowError)): diff --git a/Lib/test/test_lltrace.py b/Lib/test/test_lltrace.py index 7cf89846f8a727..747666e256700e 100644 --- a/Lib/test/test_lltrace.py +++ b/Lib/test/test_lltrace.py @@ -8,7 +8,7 @@ def example(): x = [] - for i in range(1): + for i in range(0): x.append(i) x = "this is" y = "an example" @@ -75,7 +75,7 @@ def test_lltrace_different_module(self): self.assertIn('this is an example', stdout) # check that offsets match the output of dis.dis() - instr_map = {i.offset: i for i in dis.get_instructions(example)} + instr_map = {i.offset: i for i in dis.get_instructions(example, adaptive=True)} for line in stdout.splitlines(): offset, colon, opname_oparg = line.partition(":") if not colon: diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index d70bfd6b09e13d..072056d3722106 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -47,6 +47,7 @@ from test.support import socket_helper from test.support import threading_helper from test.support import warnings_helper +from test.support import asyncore from test.support.logging_helper import TestHandler import textwrap import threading @@ -64,9 +65,6 @@ with warnings.catch_warnings(): from . import smtpd -asyncore = warnings_helper.import_deprecated('asyncore') - - try: import win32evtlog, win32evtlogutil, pywintypes except ImportError: diff --git a/Lib/test/test_long.py b/Lib/test/test_long.py index b6407b5a7c881e..77b37ca1fa4afe 100644 --- a/Lib/test/test_long.py +++ b/Lib/test/test_long.py @@ -1334,6 +1334,12 @@ def equivalent_python(n, length, byteorder, signed=False): b'\xff\xff\xff\xff\xff') self.assertRaises(OverflowError, (1).to_bytes, 0, 'big') + # gh-98783 + class SubStr(str): + pass + self.assertEqual((0).to_bytes(1, SubStr('big')), b'\x00') + self.assertEqual((0).to_bytes(0, SubStr('little')), b'') + def test_from_bytes(self): def check(tests, byteorder, signed=False): def equivalent_python(byte_array, byteorder, signed=False): @@ -1534,6 +1540,12 @@ def __bytes__(self): self.assertRaises(TypeError, int.from_bytes, MissingBytes()) self.assertRaises(ZeroDivisionError, int.from_bytes, RaisingBytes()) + # gh-98783 + class SubStr(str): + pass + self.assertEqual(int.from_bytes(b'', SubStr('big')), 0) + self.assertEqual(int.from_bytes(b'\x00', SubStr('little')), 0) + @support.cpython_only def test_from_bytes_small(self): # bpo-46361 diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py index fe4f368bed42f4..54c5a324897d23 100644 --- a/Lib/test/test_marshal.py +++ b/Lib/test/test_marshal.py @@ -260,6 +260,8 @@ def test_recursion_limit(self): #if os.name == 'nt' and support.Py_DEBUG: if os.name == 'nt': MAX_MARSHAL_STACK_DEPTH = 1000 + elif sys.platform == 'wasi': + MAX_MARSHAL_STACK_DEPTH = 1500 else: MAX_MARSHAL_STACK_DEPTH = 2000 for i in range(MAX_MARSHAL_STACK_DEPTH - 2): diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py index cfaf3b3ea26a7e..bf0d0a56e6ac8b 100644 --- a/Lib/test/test_math.py +++ b/Lib/test/test_math.py @@ -1006,6 +1006,11 @@ class T(tuple): self.assertEqual(math.dist(p, q), 5*scale) self.assertEqual(math.dist(q, p), 5*scale) + def test_math_dist_leak(self): + # gh-98897: Check for error handling does not leak memory + with self.assertRaises(ValueError): + math.dist([1, 2], [3, 4, 5]) + def testIsqrt(self): # Test a variety of inputs, large and small. test_values = ( diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index d51946322c8056..336648273b6cf1 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -856,6 +856,23 @@ def test_nt_helpers(self): self.assertIsInstance(b_final_path, bytes) self.assertGreater(len(b_final_path), 0) + @unittest.skipIf(sys.platform != 'win32', "Can only test junctions with creation on win32.") + def test_isjunction(self): + with os_helper.temp_dir() as d: + with os_helper.change_cwd(d): + os.mkdir('tmpdir') + + import _winapi + try: + _winapi.CreateJunction('tmpdir', 'testjunc') + except OSError: + raise unittest.SkipTest('creating the test junction failed') + + self.assertTrue(ntpath.isjunction('testjunc')) + self.assertFalse(ntpath.isjunction('tmpdir')) + self.assertPathEqual(ntpath.realpath('testjunc'), ntpath.realpath('tmpdir')) + + class NtCommonTest(test_genericpath.CommonTest, unittest.TestCase): pathmodule = ntpath attributes = ['relpath'] diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index 5c032d59b13f16..e39b7260624899 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -177,6 +177,73 @@ def f(): for _ in range(1025): self.assertFalse(f()) + def test_load_shadowing_slot_should_raise_type_error(self): + class Class: + __slots__ = ("slot",) + + class Sneaky: + __slots__ = ("shadowed",) + shadowing = Class.slot + + def f(o): + o.shadowing + + o = Sneaky() + o.shadowed = 42 + + for _ in range(1025): + with self.assertRaises(TypeError): + f(o) + + def test_store_shadowing_slot_should_raise_type_error(self): + class Class: + __slots__ = ("slot",) + + class Sneaky: + __slots__ = ("shadowed",) + shadowing = Class.slot + + def f(o): + o.shadowing = 42 + + o = Sneaky() + + for _ in range(1025): + with self.assertRaises(TypeError): + f(o) + + def test_load_borrowed_slot_should_not_crash(self): + class Class: + __slots__ = ("slot",) + + class Sneaky: + borrowed = Class.slot + + def f(o): + o.borrowed + + o = Sneaky() + + for _ in range(1025): + with self.assertRaises(TypeError): + f(o) + + def test_store_borrowed_slot_should_not_crash(self): + class Class: + __slots__ = ("slot",) + + class Sneaky: + borrowed = Class.slot + + def f(o): + o.borrowed = 42 + + o = Sneaky() + + for _ in range(1025): + with self.assertRaises(TypeError): + f(o) + class TestLoadMethodCache(unittest.TestCase): def test_descriptor_added_after_optimization(self): diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 2dafb78ebe0ec2..94db8bb7737acd 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -3797,8 +3797,6 @@ class Str(str): else: encoded = os.fsencode(os_helper.TESTFN) self.bytes_filenames.append(encoded) - self.bytes_filenames.append(bytearray(encoded)) - self.bytes_filenames.append(memoryview(encoded)) self.filenames = self.bytes_filenames + self.unicode_filenames @@ -3810,21 +3808,10 @@ def test_oserror_filename(self): (self.filenames, os.rmdir,), (self.filenames, os.stat,), (self.filenames, os.unlink,), + (self.filenames, os.listdir,), + (self.filenames, os.rename, "dst"), + (self.filenames, os.replace, "dst"), ] - if sys.platform == "win32": - funcs.extend(( - (self.bytes_filenames, os.rename, b"dst"), - (self.bytes_filenames, os.replace, b"dst"), - (self.unicode_filenames, os.rename, "dst"), - (self.unicode_filenames, os.replace, "dst"), - (self.unicode_filenames, os.listdir, ), - )) - else: - funcs.extend(( - (self.filenames, os.listdir,), - (self.filenames, os.rename, "dst"), - (self.filenames, os.replace, "dst"), - )) if os_helper.can_chmod(): funcs.append((self.filenames, os.chmod, 0o777)) if hasattr(os, "chown"): @@ -3840,11 +3827,7 @@ def test_oserror_filename(self): if hasattr(os, "chroot"): funcs.append((self.filenames, os.chroot,)) if hasattr(os, "link"): - if sys.platform == "win32": - funcs.append((self.bytes_filenames, os.link, b"dst")) - funcs.append((self.unicode_filenames, os.link, "dst")) - else: - funcs.append((self.filenames, os.link, "dst")) + funcs.append((self.filenames, os.link, "dst")) if hasattr(os, "listxattr"): funcs.extend(( (self.filenames, os.listxattr,), @@ -3857,21 +3840,16 @@ def test_oserror_filename(self): if hasattr(os, "readlink"): funcs.append((self.filenames, os.readlink,)) - for filenames, func, *func_args in funcs: for name in filenames: try: - if isinstance(name, (str, bytes)): - func(name, *func_args) - else: - with self.assertWarnsRegex(DeprecationWarning, 'should be'): - func(name, *func_args) + func(name, *func_args) except OSError as err: self.assertIs(err.filename, name, str(func)) except UnicodeDecodeError: pass else: - self.fail("No exception thrown by {}".format(func)) + self.fail(f"No exception thrown by {func}") class CPUCountTests(unittest.TestCase): def test_cpu_count(self): @@ -4180,6 +4158,8 @@ def check_entry(self, entry, name, is_dir, is_file, is_symlink): self.assertEqual(entry.is_file(follow_symlinks=False), stat.S_ISREG(entry_lstat.st_mode)) + self.assertEqual(entry.is_junction(), os.path.isjunction(entry.path)) + self.assert_stat_equal(entry.stat(), entry_stat, os.name == 'nt' and not is_symlink) @@ -4228,6 +4208,21 @@ def test_attributes(self): entry = entries['symlink_file.txt'] self.check_entry(entry, 'symlink_file.txt', False, True, True) + @unittest.skipIf(sys.platform != 'win32', "Can only test junctions with creation on win32.") + def test_attributes_junctions(self): + dirname = os.path.join(self.path, "tgtdir") + os.mkdir(dirname) + + import _winapi + try: + _winapi.CreateJunction(dirname, os.path.join(self.path, "srcjunc")) + except OSError: + raise unittest.SkipTest('creating the test junction failed') + + entries = self.get_entries(['srcjunc', 'tgtdir']) + self.assertEqual(entries['srcjunc'].is_junction(), True) + self.assertEqual(entries['tgtdir'].is_junction(), False) + def get_entry(self, name): path = self.bytes_path if isinstance(name, bytes) else self.path entries = list(os.scandir(path)) @@ -4350,16 +4345,8 @@ def test_bytes_like(self): for cls in bytearray, memoryview: path_bytes = cls(os.fsencode(self.path)) - with self.assertWarns(DeprecationWarning): - entries = list(os.scandir(path_bytes)) - self.assertEqual(len(entries), 1, entries) - entry = entries[0] - - self.assertEqual(entry.name, b'file.txt') - self.assertEqual(entry.path, - os.fsencode(os.path.join(self.path, 'file.txt'))) - self.assertIs(type(entry.name), bytes) - self.assertIs(type(entry.path), bytes) + with self.assertRaises(TypeError): + os.scandir(path_bytes) @unittest.skipUnless(os.listdir in os.supports_fd, 'fd support for listdir required for this test.') diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py index f324177ff85573..1d01d3cbd91d14 100644 --- a/Lib/test/test_pathlib.py +++ b/Lib/test/test_pathlib.py @@ -640,13 +640,29 @@ def test_relative_to_common(self): self.assertEqual(p.relative_to('a/'), P('b')) self.assertEqual(p.relative_to(P('a/b')), P()) self.assertEqual(p.relative_to('a/b'), P()) + self.assertEqual(p.relative_to(P(), walk_up=True), P('a/b')) + self.assertEqual(p.relative_to('', walk_up=True), P('a/b')) + self.assertEqual(p.relative_to(P('a'), walk_up=True), P('b')) + self.assertEqual(p.relative_to('a', walk_up=True), P('b')) + self.assertEqual(p.relative_to('a/', walk_up=True), P('b')) + self.assertEqual(p.relative_to(P('a/b'), walk_up=True), P()) + self.assertEqual(p.relative_to('a/b', walk_up=True), P()) + self.assertEqual(p.relative_to(P('a/c'), walk_up=True), P('../b')) + self.assertEqual(p.relative_to('a/c', walk_up=True), P('../b')) + self.assertEqual(p.relative_to(P('a/b/c'), walk_up=True), P('..')) + self.assertEqual(p.relative_to('a/b/c', walk_up=True), P('..')) + self.assertEqual(p.relative_to(P('c'), walk_up=True), P('../a/b')) + self.assertEqual(p.relative_to('c', walk_up=True), P('../a/b')) # With several args. self.assertEqual(p.relative_to('a', 'b'), P()) + self.assertEqual(p.relative_to('a', 'b', walk_up=True), P()) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P('c')) self.assertRaises(ValueError, p.relative_to, P('a/b/c')) self.assertRaises(ValueError, p.relative_to, P('a/c')) self.assertRaises(ValueError, p.relative_to, P('/a')) + self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/a'), walk_up=True) p = P('/a/b') self.assertEqual(p.relative_to(P('/')), P('a/b')) self.assertEqual(p.relative_to('/'), P('a/b')) @@ -655,6 +671,19 @@ def test_relative_to_common(self): self.assertEqual(p.relative_to('/a/'), P('b')) self.assertEqual(p.relative_to(P('/a/b')), P()) self.assertEqual(p.relative_to('/a/b'), P()) + self.assertEqual(p.relative_to(P('/'), walk_up=True), P('a/b')) + self.assertEqual(p.relative_to('/', walk_up=True), P('a/b')) + self.assertEqual(p.relative_to(P('/a'), walk_up=True), P('b')) + self.assertEqual(p.relative_to('/a', walk_up=True), P('b')) + self.assertEqual(p.relative_to('/a/', walk_up=True), P('b')) + self.assertEqual(p.relative_to(P('/a/b'), walk_up=True), P()) + self.assertEqual(p.relative_to('/a/b', walk_up=True), P()) + self.assertEqual(p.relative_to(P('/a/c'), walk_up=True), P('../b')) + self.assertEqual(p.relative_to('/a/c', walk_up=True), P('../b')) + self.assertEqual(p.relative_to(P('/a/b/c'), walk_up=True), P('..')) + self.assertEqual(p.relative_to('/a/b/c', walk_up=True), P('..')) + self.assertEqual(p.relative_to(P('/c'), walk_up=True), P('../a/b')) + self.assertEqual(p.relative_to('/c', walk_up=True), P('../a/b')) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P('/c')) self.assertRaises(ValueError, p.relative_to, P('/a/b/c')) @@ -662,6 +691,8 @@ def test_relative_to_common(self): self.assertRaises(ValueError, p.relative_to, P()) self.assertRaises(ValueError, p.relative_to, '') self.assertRaises(ValueError, p.relative_to, P('a')) + self.assertRaises(ValueError, p.relative_to, P(''), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('a'), walk_up=True) def test_is_relative_to_common(self): P = self.cls @@ -1124,6 +1155,16 @@ def test_relative_to(self): self.assertEqual(p.relative_to('c:foO/'), P('Bar')) self.assertEqual(p.relative_to(P('c:foO/baR')), P()) self.assertEqual(p.relative_to('c:foO/baR'), P()) + self.assertEqual(p.relative_to(P('c:'), walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to('c:', walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to(P('c:foO'), walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('c:foO', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('c:foO/', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to(P('c:foO/baR'), walk_up=True), P()) + self.assertEqual(p.relative_to('c:foO/baR', walk_up=True), P()) + self.assertEqual(p.relative_to(P('C:Foo/Bar/Baz'), walk_up=True), P('..')) + self.assertEqual(p.relative_to(P('C:Foo/Baz'), walk_up=True), P('../Bar')) + self.assertEqual(p.relative_to(P('C:Baz/Bar'), walk_up=True), P('../../Foo/Bar')) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P()) self.assertRaises(ValueError, p.relative_to, '') @@ -1134,11 +1175,14 @@ def test_relative_to(self): self.assertRaises(ValueError, p.relative_to, P('C:/Foo')) self.assertRaises(ValueError, p.relative_to, P('C:Foo/Bar/Baz')) self.assertRaises(ValueError, p.relative_to, P('C:Foo/Baz')) + self.assertRaises(ValueError, p.relative_to, P(), walk_up=True) + self.assertRaises(ValueError, p.relative_to, '', walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('d:'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('C:/Foo'), walk_up=True) p = P('C:/Foo/Bar') - self.assertEqual(p.relative_to(P('c:')), P('/Foo/Bar')) - self.assertEqual(p.relative_to('c:'), P('/Foo/Bar')) - self.assertEqual(str(p.relative_to(P('c:'))), '\\Foo\\Bar') - self.assertEqual(str(p.relative_to('c:')), '\\Foo\\Bar') self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar')) self.assertEqual(p.relative_to('c:/'), P('Foo/Bar')) self.assertEqual(p.relative_to(P('c:/foO')), P('Bar')) @@ -1146,7 +1190,19 @@ def test_relative_to(self): self.assertEqual(p.relative_to('c:/foO/'), P('Bar')) self.assertEqual(p.relative_to(P('c:/foO/baR')), P()) self.assertEqual(p.relative_to('c:/foO/baR'), P()) + self.assertEqual(p.relative_to(P('c:/'), walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to('c:/', walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to(P('c:/foO'), walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('c:/foO', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('c:/foO/', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to(P('c:/foO/baR'), walk_up=True), P()) + self.assertEqual(p.relative_to('c:/foO/baR', walk_up=True), P()) + self.assertEqual(p.relative_to('C:/Baz', walk_up=True), P('../Foo/Bar')) + self.assertEqual(p.relative_to('C:/Foo/Bar/Baz', walk_up=True), P('..')) + self.assertEqual(p.relative_to('C:/Foo/Baz', walk_up=True), P('../Bar')) # Unrelated paths. + self.assertRaises(ValueError, p.relative_to, 'c:') + self.assertRaises(ValueError, p.relative_to, P('c:')) self.assertRaises(ValueError, p.relative_to, P('C:/Baz')) self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz')) self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz')) @@ -1156,6 +1212,14 @@ def test_relative_to(self): self.assertRaises(ValueError, p.relative_to, P('/')) self.assertRaises(ValueError, p.relative_to, P('/Foo')) self.assertRaises(ValueError, p.relative_to, P('//C/Foo')) + self.assertRaises(ValueError, p.relative_to, 'c:', walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('c:'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('C:Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('d:'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('d:/'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('//C/Foo'), walk_up=True) # UNC paths. p = P('//Server/Share/Foo/Bar') self.assertEqual(p.relative_to(P('//sErver/sHare')), P('Foo/Bar')) @@ -1166,11 +1230,25 @@ def test_relative_to(self): self.assertEqual(p.relative_to('//sErver/sHare/Foo/'), P('Bar')) self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar')), P()) self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar'), P()) + self.assertEqual(p.relative_to(P('//sErver/sHare'), walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to('//sErver/sHare', walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/', walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to(P('//sErver/sHare/Foo'), walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/Foo', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/Foo/', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar'), walk_up=True), P()) + self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar', walk_up=True), P()) + self.assertEqual(p.relative_to(P('//sErver/sHare/bar'), walk_up=True), P('../Foo/Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/bar', walk_up=True), P('../Foo/Bar')) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo')) self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo')) self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo')) self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo')) + self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo'), walk_up=True) def test_is_relative_to(self): P = self.cls @@ -1193,13 +1271,13 @@ def test_is_relative_to(self): self.assertFalse(p.is_relative_to(P('C:Foo/Bar/Baz'))) self.assertFalse(p.is_relative_to(P('C:Foo/Baz'))) p = P('C:/Foo/Bar') - self.assertTrue(p.is_relative_to('c:')) self.assertTrue(p.is_relative_to(P('c:/'))) self.assertTrue(p.is_relative_to(P('c:/foO'))) self.assertTrue(p.is_relative_to('c:/foO/')) self.assertTrue(p.is_relative_to(P('c:/foO/baR'))) self.assertTrue(p.is_relative_to('c:/foO/baR')) # Unrelated paths. + self.assertFalse(p.is_relative_to('c:')) self.assertFalse(p.is_relative_to(P('C:/Baz'))) self.assertFalse(p.is_relative_to(P('C:/Foo/Bar/Baz'))) self.assertFalse(p.is_relative_to(P('C:/Foo/Baz'))) @@ -2329,6 +2407,13 @@ def test_is_symlink(self): self.assertIs((P / 'linkA\udfff').is_file(), False) self.assertIs((P / 'linkA\x00').is_file(), False) + def test_is_junction(self): + P = self.cls(BASE) + + with mock.patch.object(P._flavour, 'pathmod'): + self.assertEqual(P.is_junction(), P._flavour.pathmod.isjunction.return_value) + P._flavour.pathmod.isjunction.assert_called_once_with(P) + def test_is_fifo_false(self): P = self.cls(BASE) self.assertFalse((P / 'fileA').is_fifo()) diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index 55c3283e26b1e9..48f419e62fbbed 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -2104,6 +2104,52 @@ def inner(v): pass stdout, stderr = self.run_pdb_script(script, commands) self.assertFalse(stderr) + def test_gh_93696_frozen_list(self): + frozen_src = """ + def func(): + x = "Sentinel string for gh-93696" + print(x) + """ + host_program = """ + import os + import sys + + def _create_fake_frozen_module(): + with open('gh93696.py') as f: + src = f.read() + + # this function has a co_filename as if it were in a frozen module + dummy_mod = compile(src, "", "exec") + func_code = dummy_mod.co_consts[0] + + mod = type(sys)("gh93696") + mod.func = type(lambda: None)(func_code, mod.__dict__) + mod.__file__ = 'gh93696.py' + + return mod + + mod = _create_fake_frozen_module() + mod.func() + """ + commands = """ + break 20 + continue + step + list + quit + """ + with open('gh93696.py', 'w') as f: + f.write(textwrap.dedent(frozen_src)) + + with open('gh93696_host.py', 'w') as f: + f.write(textwrap.dedent(host_program)) + + self.addCleanup(os_helper.unlink, 'gh93696.py') + self.addCleanup(os_helper.unlink, 'gh93696_host.py') + stdout, stderr = self._run_pdb(["gh93696_host.py"], commands) + # verify that pdb found the source of the "frozen" function + self.assertIn('x = "Sentinel string for gh-93696"', stdout, "Sentinel statement not found") + class ChecklineTests(unittest.TestCase): def setUp(self): linecache.clearcache() # Pdb.checkline() uses linecache.getline() diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py index ab45e3c52a039b..239c9d03fd9d1f 100644 --- a/Lib/test/test_peepholer.py +++ b/Lib/test/test_peepholer.py @@ -776,15 +776,54 @@ def f(): self.assertInBytecode(f, 'LOAD_FAST_CHECK') self.assertNotInBytecode(f, 'LOAD_FAST') - def test_setting_lineno_adds_check(self): - code = textwrap.dedent("""\ + def test_load_fast_too_many_locals(self): + # When there get to be too many locals to analyze completely, + # later locals are all converted to LOAD_FAST_CHECK, except + # when a store or prior load occurred in the same basicblock. + def f(): + a00 = a01 = a02 = a03 = a04 = a05 = a06 = a07 = a08 = a09 = 1 + a10 = a11 = a12 = a13 = a14 = a15 = a16 = a17 = a18 = a19 = 1 + a20 = a21 = a22 = a23 = a24 = a25 = a26 = a27 = a28 = a29 = 1 + a30 = a31 = a32 = a33 = a34 = a35 = a36 = a37 = a38 = a39 = 1 + a40 = a41 = a42 = a43 = a44 = a45 = a46 = a47 = a48 = a49 = 1 + a50 = a51 = a52 = a53 = a54 = a55 = a56 = a57 = a58 = a59 = 1 + a60 = a61 = a62 = a63 = a64 = a65 = a66 = a67 = a68 = a69 = 1 + a70 = a71 = a72 = a73 = a74 = a75 = a76 = a77 = a78 = a79 = 1 + del a72, a73 + print(a73) + print(a70, a71, a72, a73) + while True: + print(a00, a01, a62, a63) + print(a64, a65, a78, a79) + + for i in 0, 1, 62, 63: + # First 64 locals: analyze completely + self.assertInBytecode(f, 'LOAD_FAST', f"a{i:02}") + self.assertNotInBytecode(f, 'LOAD_FAST_CHECK', f"a{i:02}") + for i in 64, 65, 78, 79: + # Locals >=64 not in the same basicblock + self.assertInBytecode(f, 'LOAD_FAST_CHECK', f"a{i:02}") + self.assertNotInBytecode(f, 'LOAD_FAST', f"a{i:02}") + for i in 70, 71: + # Locals >=64 in the same basicblock + self.assertInBytecode(f, 'LOAD_FAST', f"a{i:02}") + self.assertNotInBytecode(f, 'LOAD_FAST_CHECK', f"a{i:02}") + # del statements should invalidate within basicblocks. + self.assertInBytecode(f, 'LOAD_FAST_CHECK', "a72") + self.assertNotInBytecode(f, 'LOAD_FAST', "a72") + # previous checked loads within a basicblock enable unchecked loads + self.assertInBytecode(f, 'LOAD_FAST_CHECK', "a73") + self.assertInBytecode(f, 'LOAD_FAST', "a73") + + def test_setting_lineno_no_undefined(self): + code = textwrap.dedent(f"""\ def f(): - x = 2 - L = 3 - L = 4 + x = y = 2 + if not x: + return 4 for i in range(55): x + 6 - del x + L = 7 L = 8 L = 9 L = 10 @@ -793,15 +832,88 @@ def f(): exec(code, ns) f = ns['f'] self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + co_code = f.__code__.co_code def trace(frame, event, arg): if event == 'line' and frame.f_lineno == 9: - frame.f_lineno = 2 + frame.f_lineno = 3 sys.settrace(None) return None return trace sys.settrace(trace) - f() - self.assertNotInBytecode(f, "LOAD_FAST") + result = f() + self.assertIsNone(result) + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + self.assertEqual(f.__code__.co_code, co_code) + + def test_setting_lineno_one_undefined(self): + code = textwrap.dedent(f"""\ + def f(): + x = y = 2 + if not x: + return 4 + for i in range(55): + x + 6 + del x + L = 8 + L = 9 + L = 10 + """) + ns = {} + exec(code, ns) + f = ns['f'] + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + co_code = f.__code__.co_code + def trace(frame, event, arg): + if event == 'line' and frame.f_lineno == 9: + frame.f_lineno = 3 + sys.settrace(None) + return None + return trace + e = r"assigning None to 1 unbound local" + with self.assertWarnsRegex(RuntimeWarning, e): + sys.settrace(trace) + result = f() + self.assertEqual(result, 4) + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + self.assertEqual(f.__code__.co_code, co_code) + + def test_setting_lineno_two_undefined(self): + code = textwrap.dedent(f"""\ + def f(): + x = y = 2 + if not x: + return 4 + for i in range(55): + x + 6 + del x, y + L = 8 + L = 9 + L = 10 + """) + ns = {} + exec(code, ns) + f = ns['f'] + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + co_code = f.__code__.co_code + def trace(frame, event, arg): + if event == 'line' and frame.f_lineno == 9: + frame.f_lineno = 3 + sys.settrace(None) + return None + return trace + e = r"assigning None to 2 unbound locals" + with self.assertWarnsRegex(RuntimeWarning, e): + sys.settrace(trace) + result = f() + self.assertEqual(result, 4) + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + self.assertEqual(f.__code__.co_code, co_code) def make_function_with_no_checks(self): code = textwrap.dedent("""\ @@ -821,18 +933,22 @@ def f(): self.assertNotInBytecode(f, "LOAD_FAST_CHECK") return f - def test_deleting_local_adds_check(self): + def test_deleting_local_warns_and_assigns_none(self): f = self.make_function_with_no_checks() + co_code = f.__code__.co_code def trace(frame, event, arg): if event == 'line' and frame.f_lineno == 4: del frame.f_locals["x"] sys.settrace(None) return None return trace - sys.settrace(trace) - f() - self.assertNotInBytecode(f, "LOAD_FAST") - self.assertInBytecode(f, "LOAD_FAST_CHECK") + e = r"assigning None to unbound local 'x'" + with self.assertWarnsRegex(RuntimeWarning, e): + sys.settrace(trace) + f() + self.assertInBytecode(f, "LOAD_FAST") + self.assertNotInBytecode(f, "LOAD_FAST_CHECK") + self.assertEqual(f.__code__.co_code, co_code) def test_modifying_local_does_not_add_check(self): f = self.make_function_with_no_checks() @@ -868,7 +984,7 @@ def cfg_optimization_test(self, insts, expected_insts, if expected_consts is None: expected_consts = consts opt_insts, opt_consts = self.get_optimized(insts, consts) - self.compareInstructions(opt_insts, expected_insts) + self.assertInstructionsMatch(opt_insts, expected_insts) self.assertEqual(opt_consts, expected_consts) def test_conditional_jump_forward_non_const_condition(self): diff --git a/Lib/test/test_peg_generator/__init__.py b/Lib/test/test_peg_generator/__init__.py index 77f72fcc7c6e3b..7c402c3d7c5acf 100644 --- a/Lib/test/test_peg_generator/__init__.py +++ b/Lib/test/test_peg_generator/__init__.py @@ -3,6 +3,9 @@ from test import support from test.support import load_package_tests +# TODO: gh-92584: peg_generator uses distutils which was removed in Python 3.12 +raise unittest.SkipTest("distutils has been removed in Python 3.12") + if support.check_sanitizer(address=True, memory=True): # bpo-46633: Skip the test because it is too slow when Python is built diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index 9c03a89fd57d07..3992faf8e5cd5b 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -277,6 +277,14 @@ def test_uname_slices(self): self.assertEqual(res[:], expected) self.assertEqual(res[:5], expected[:5]) + def test_uname_fields(self): + self.assertIn('processor', platform.uname()._fields) + + def test_uname_asdict(self): + res = platform.uname()._asdict() + self.assertEqual(len(res), 6) + self.assertIn('processor', res) + @unittest.skipIf(sys.platform in ['win32', 'OpenVMS'], "uname -p not used") @support.requires_subprocess() def test_uname_processor(self): diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py index 5ad9202433dcfb..fa41ba0b6e4637 100644 --- a/Lib/test/test_poplib.py +++ b/Lib/test/test_poplib.py @@ -15,11 +15,8 @@ from test.support import hashlib_helper from test.support import socket_helper from test.support import threading_helper -from test.support import warnings_helper - - -asynchat = warnings_helper.import_deprecated('asynchat') -asyncore = warnings_helper.import_deprecated('asyncore') +from test.support import asynchat +from test.support import asyncore test_support.requires_working_socket(module=True) @@ -425,13 +422,6 @@ def test_context(self): ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE - self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host, - self.server.port, keyfile=CERTFILE, context=ctx) - self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host, - self.server.port, certfile=CERTFILE, context=ctx) - self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host, - self.server.port, keyfile=CERTFILE, - certfile=CERTFILE, context=ctx) self.client.quit() self.client = poplib.POP3_SSL(self.server.host, self.server.port, diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index ae25ef55885dd6..442dec8b28065b 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -634,7 +634,7 @@ def test_stat(self): self.assertTrue(posix.stat(os_helper.TESTFN)) self.assertTrue(posix.stat(os.fsencode(os_helper.TESTFN))) - self.assertWarnsRegex(DeprecationWarning, + self.assertRaisesRegex(TypeError, 'should be string, bytes, os.PathLike or integer, not', posix.stat, bytearray(os.fsencode(os_helper.TESTFN))) self.assertRaisesRegex(TypeError, @@ -841,11 +841,8 @@ def test_listdir_bytes(self): def test_listdir_bytes_like(self): for cls in bytearray, memoryview: - with self.assertWarns(DeprecationWarning): - names = posix.listdir(cls(b'.')) - self.assertIn(os.fsencode(os_helper.TESTFN), names) - for name in names: - self.assertIs(type(name), bytes) + with self.assertRaises(TypeError): + posix.listdir(cls(b'.')) @unittest.skipUnless(posix.listdir in os.supports_fd, "test needs fd support for posix.listdir()") @@ -2090,6 +2087,28 @@ def test_mkdir(self): with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"): os.mkdir("dir", dir_fd=0) + def test_mkfifo(self): + self._verify_available("HAVE_MKFIFOAT") + if self.mac_ver >= (13, 0): + self.assertIn("HAVE_MKFIFOAT", posix._have_functions) + + else: + self.assertNotIn("HAVE_MKFIFOAT", posix._have_functions) + + with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"): + os.mkfifo("path", dir_fd=0) + + def test_mknod(self): + self._verify_available("HAVE_MKNODAT") + if self.mac_ver >= (13, 0): + self.assertIn("HAVE_MKNODAT", posix._have_functions) + + else: + self.assertNotIn("HAVE_MKNODAT", posix._have_functions) + + with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"): + os.mknod("path", dir_fd=0) + def test_rename_replace(self): self._verify_available("HAVE_RENAMEAT") if self.mac_ver >= (10, 10): @@ -2172,6 +2191,53 @@ def test_utime(self): os.utime("path", dir_fd=0) +class NamespacesTests(unittest.TestCase): + """Tests for os.unshare() and os.setns().""" + + @unittest.skipUnless(hasattr(os, 'unshare'), 'needs os.unshare()') + @unittest.skipUnless(hasattr(os, 'setns'), 'needs os.setns()') + @unittest.skipUnless(os.path.exists('/proc/self/ns/uts'), 'need /proc/self/ns/uts') + @support.requires_linux_version(3, 0, 0) + def test_unshare_setns(self): + code = """if 1: + import errno + import os + import sys + fd = os.open('/proc/self/ns/uts', os.O_RDONLY) + try: + original = os.readlink('/proc/self/ns/uts') + try: + os.unshare(os.CLONE_NEWUTS) + except OSError as e: + if e.errno == errno.ENOSPC: + # skip test if limit is exceeded + sys.exit() + raise + new = os.readlink('/proc/self/ns/uts') + if original == new: + raise Exception('os.unshare failed') + os.setns(fd, os.CLONE_NEWUTS) + restored = os.readlink('/proc/self/ns/uts') + if original != restored: + raise Exception('os.setns failed') + except PermissionError: + # The calling process did not have the required privileges + # for this operation + pass + except OSError as e: + # Skip the test on these errors: + # - ENOSYS: syscall not available + # - EINVAL: kernel was not configured with the CONFIG_UTS_NS option + # - ENOMEM: not enough memory + if e.errno not in (errno.ENOSYS, errno.EINVAL, errno.ENOMEM): + raise + finally: + os.close(fd) + """ + + assert_python_ok("-c", code) + + def tearDownModule(): support.reap_children() diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index c644f881e460fe..6c1c0f5577b7ec 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -178,6 +178,8 @@ def test_islink(self): def test_ismount(self): self.assertIs(posixpath.ismount("/"), True) self.assertIs(posixpath.ismount(b"/"), True) + self.assertIs(posixpath.ismount(FakePath("/")), True) + self.assertIs(posixpath.ismount(FakePath(b"/")), True) def test_ismount_non_existent(self): # Non-existent mountpoint. @@ -242,6 +244,9 @@ def fake_lstat(path): finally: os.lstat = save_lstat + def test_isjunction(self): + self.assertFalse(posixpath.isjunction(ABSTFN)) + def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") diff --git a/Lib/test/test_py_compile.py b/Lib/test/test_py_compile.py index a4a52b180dbb55..5e0a44ad9691ec 100644 --- a/Lib/test/test_py_compile.py +++ b/Lib/test/test_py_compile.py @@ -235,11 +235,12 @@ def pycompilecmd(self, *args, **kwargs): # assert_python_* helpers don't return proc object. We'll just use # subprocess.run() instead of spawn_python() and its friends to test # stdin support of the CLI. + opts = '-m' if __debug__ else '-Om' if args and args[0] == '-' and 'input' in kwargs: - return subprocess.run([sys.executable, '-m', 'py_compile', '-'], + return subprocess.run([sys.executable, opts, 'py_compile', '-'], input=kwargs['input'].encode(), capture_output=True) - return script_helper.assert_python_ok('-m', 'py_compile', *args, **kwargs) + return script_helper.assert_python_ok(opts, 'py_compile', *args, **kwargs) def pycompilecmd_failure(self, *args): return script_helper.assert_python_failure('-m', 'py_compile', *args) diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py index 8ab3289dd74006..cefc71cb5a7f54 100644 --- a/Lib/test/test_pydoc.py +++ b/Lib/test/test_pydoc.py @@ -702,7 +702,7 @@ def test_synopsis(self): def test_synopsis_sourceless(self): os = import_helper.import_fresh_module('os') expected = os.__doc__.splitlines()[0] - filename = os.__cached__ + filename = os.__spec__.cached synopsis = pydoc.synopsis(filename) self.assertEqual(synopsis, expected) diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py index 3f0f84ea8cee6f..11628a236ade9a 100644 --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -630,6 +630,11 @@ def test_re_groupref_exists_errors(self): self.checkPatternError(r'()(?(2)a)', "invalid group reference 2", 5) + def test_re_groupref_exists_validation_bug(self): + for i in range(256): + with self.subTest(code=i): + re.compile(r'()(?(1)\x%02x?)' % i) + def test_re_groupref_overflow(self): from re._constants import MAXGROUPS self.checkTemplateError('()', r'\g<%s>' % MAXGROUPS, 'xx', diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index a36d18488a5ef7..baae4efc2ad789 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -28,6 +28,11 @@ ROOT_DIR = os.path.abspath(os.path.normpath(ROOT_DIR)) LOG_PREFIX = r'[0-9]+:[0-9]+:[0-9]+ (?:load avg: [0-9]+\.[0-9]{2} )?' +EXITCODE_BAD_TEST = 2 +EXITCODE_ENV_CHANGED = 3 +EXITCODE_NO_TESTS_RAN = 4 +EXITCODE_INTERRUPTED = 130 + TEST_INTERRUPTED = textwrap.dedent(""" from signal import SIGINT, raise_signal try: @@ -497,7 +502,7 @@ def list_regex(line_format, tests): result.append('INTERRUPTED') if not any((good, result, failed, interrupted, skipped, env_changed, fail_env_changed)): - result.append("NO TEST RUN") + result.append("NO TESTS RAN") elif not result: result.append('SUCCESS') result = ', '.join(result) @@ -707,7 +712,7 @@ def test_failing(self): test_failing = self.create_test('failing', code=code) tests = [test_ok, test_failing] - output = self.run_tests(*tests, exitcode=2) + output = self.run_tests(*tests, exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, tests, failed=test_failing) def test_resources(self): @@ -748,13 +753,14 @@ def test_random(self): test = self.create_test('random', code) # first run to get the output with the random seed - output = self.run_tests('-r', test) + output = self.run_tests('-r', test, exitcode=EXITCODE_NO_TESTS_RAN) randseed = self.parse_random_seed(output) match = self.regex_search(r'TESTRANDOM: ([0-9]+)', output) test_random = int(match.group(1)) # try to reproduce with the random seed - output = self.run_tests('-r', '--randseed=%s' % randseed, test) + output = self.run_tests('-r', '--randseed=%s' % randseed, test, + exitcode=EXITCODE_NO_TESTS_RAN) randseed2 = self.parse_random_seed(output) self.assertEqual(randseed2, randseed) @@ -813,7 +819,7 @@ def test_fromfile(self): def test_interrupted(self): code = TEST_INTERRUPTED test = self.create_test('sigint', code=code) - output = self.run_tests(test, exitcode=130) + output = self.run_tests(test, exitcode=EXITCODE_INTERRUPTED) self.check_executed_tests(output, test, omitted=test, interrupted=True) @@ -838,7 +844,7 @@ def test_slowest_interrupted(self): args = ("--slowest", "-j2", test) else: args = ("--slowest", test) - output = self.run_tests(*args, exitcode=130) + output = self.run_tests(*args, exitcode=EXITCODE_INTERRUPTED) self.check_executed_tests(output, test, omitted=test, interrupted=True) @@ -878,7 +884,7 @@ def test_run(self): builtins.__dict__['RUN'] = 1 """) test = self.create_test('forever', code=code) - output = self.run_tests('--forever', test, exitcode=2) + output = self.run_tests('--forever', test, exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, [test]*3, failed=test) def check_leak(self, code, what): @@ -887,7 +893,7 @@ def check_leak(self, code, what): filename = 'reflog.txt' self.addCleanup(os_helper.unlink, filename) output = self.run_tests('--huntrleaks', '3:3:', test, - exitcode=2, + exitcode=EXITCODE_BAD_TEST, stderr=subprocess.STDOUT) self.check_executed_tests(output, [test], failed=test) @@ -969,7 +975,7 @@ def test_crashed(self): crash_test = self.create_test(name="crash", code=code) tests = [crash_test] - output = self.run_tests("-j2", *tests, exitcode=2) + output = self.run_tests("-j2", *tests, exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, tests, failed=crash_test, randomize=True) @@ -1069,7 +1075,8 @@ def test_env_changed(self): self.check_executed_tests(output, [testname], env_changed=testname) # fail with --fail-env-changed - output = self.run_tests("--fail-env-changed", testname, exitcode=3) + output = self.run_tests("--fail-env-changed", testname, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=testname, fail_env_changed=True) @@ -1088,7 +1095,7 @@ def test_fail_always(self): """) testname = self.create_test(code=code) - output = self.run_tests("-w", testname, exitcode=2) + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, [testname], failed=testname, rerun={testname: "test_fail_always"}) @@ -1123,7 +1130,8 @@ def test_bug(self): """) testname = self.create_test(code=code) - output = self.run_tests(testname, "-m", "nosuchtest", exitcode=0) + output = self.run_tests(testname, "-m", "nosuchtest", + exitcode=EXITCODE_NO_TESTS_RAN) self.check_executed_tests(output, [testname], no_test_ran=testname) def test_no_tests_ran_skip(self): @@ -1136,7 +1144,7 @@ def test_skipped(self): """) testname = self.create_test(code=code) - output = self.run_tests(testname, exitcode=0) + output = self.run_tests(testname) self.check_executed_tests(output, [testname]) def test_no_tests_ran_multiple_tests_nonexistent(self): @@ -1150,7 +1158,8 @@ def test_bug(self): testname = self.create_test(code=code) testname2 = self.create_test(code=code) - output = self.run_tests(testname, testname2, "-m", "nosuchtest", exitcode=0) + output = self.run_tests(testname, testname2, "-m", "nosuchtest", + exitcode=EXITCODE_NO_TESTS_RAN) self.check_executed_tests(output, [testname, testname2], no_test_ran=[testname, testname2]) @@ -1198,7 +1207,8 @@ def test_garbage(self): """) testname = self.create_test(code=code) - output = self.run_tests("--fail-env-changed", testname, exitcode=3) + output = self.run_tests("--fail-env-changed", testname, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=[testname], fail_env_changed=True) @@ -1224,7 +1234,8 @@ def test_sleep(self): """) testname = self.create_test(code=code) - output = self.run_tests("-j2", "--timeout=1.0", testname, exitcode=2) + output = self.run_tests("-j2", "--timeout=1.0", testname, + exitcode=EXITCODE_BAD_TEST) self.check_executed_tests(output, [testname], failed=testname) self.assertRegex(output, @@ -1256,7 +1267,8 @@ def test_unraisable_exc(self): """) testname = self.create_test(code=code) - output = self.run_tests("--fail-env-changed", "-v", testname, exitcode=3) + output = self.run_tests("--fail-env-changed", "-v", testname, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=[testname], fail_env_changed=True) @@ -1287,7 +1299,8 @@ def test_threading_excepthook(self): """) testname = self.create_test(code=code) - output = self.run_tests("--fail-env-changed", "-v", testname, exitcode=3) + output = self.run_tests("--fail-env-changed", "-v", testname, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=[testname], fail_env_changed=True) @@ -1328,7 +1341,7 @@ def test_print_warning(self): for option in ("-v", "-W"): with self.subTest(option=option): cmd = ["--fail-env-changed", option, testname] - output = self.run_tests(*cmd, exitcode=3) + output = self.run_tests(*cmd, exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, [testname], env_changed=[testname], fail_env_changed=True) @@ -1373,7 +1386,8 @@ def test_leak_tmp_file(self): """) testnames = [self.create_test(code=code) for _ in range(3)] - output = self.run_tests("--fail-env-changed", "-v", "-j2", *testnames, exitcode=3) + output = self.run_tests("--fail-env-changed", "-v", "-j2", *testnames, + exitcode=EXITCODE_ENV_CHANGED) self.check_executed_tests(output, testnames, env_changed=testnames, fail_env_changed=True, diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 6789fe4cc72e3a..8fe62216ecdca0 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -752,18 +752,25 @@ def _copy(src, dst): @os_helper.skip_unless_symlink def test_copytree_dangling_symlinks(self): - # a dangling symlink raises an error at the end src_dir = self.mkdtemp() + valid_file = os.path.join(src_dir, 'test.txt') + write_file(valid_file, 'abc') + dir_a = os.path.join(src_dir, 'dir_a') + os.mkdir(dir_a) + for d in src_dir, dir_a: + os.symlink('IDONTEXIST', os.path.join(d, 'broken')) + os.symlink(valid_file, os.path.join(d, 'valid')) + + # A dangling symlink should raise an error. dst_dir = os.path.join(self.mkdtemp(), 'destination') - os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt')) - os.mkdir(os.path.join(src_dir, 'test_dir')) - write_file((src_dir, 'test_dir', 'test.txt'), '456') self.assertRaises(Error, shutil.copytree, src_dir, dst_dir) - # a dangling symlink is ignored with the proper flag + # Dangling symlinks should be ignored with the proper flag. dst_dir = os.path.join(self.mkdtemp(), 'destination2') shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True) - self.assertNotIn('test.txt', os.listdir(dst_dir)) + for root, dirs, files in os.walk(dst_dir): + self.assertNotIn('broken', files) + self.assertIn('valid', files) # a dangling symlink is copied if symlinks=True dst_dir = os.path.join(self.mkdtemp(), 'destination3') diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py index 3210ef217703e5..b6d5b8c3d82580 100644 --- a/Lib/test/test_smtplib.py +++ b/Lib/test/test_smtplib.py @@ -21,13 +21,11 @@ from test.support import hashlib_helper from test.support import socket_helper from test.support import threading_helper -from test.support import warnings_helper +from test.support import asyncore from unittest.mock import Mock from . import smtpd -asyncore = warnings_helper.import_deprecated('asyncore') - support.requires_working_socket(module=True) diff --git a/Lib/test/test_sqlite3/test_transactions.py b/Lib/test/test_sqlite3/test_transactions.py index 9c3d19e79bd989..5d211dd47b0b6b 100644 --- a/Lib/test/test_sqlite3/test_transactions.py +++ b/Lib/test/test_sqlite3/test_transactions.py @@ -22,9 +22,11 @@ import unittest import sqlite3 as sqlite +from contextlib import contextmanager from test.support import LOOPBACK_TIMEOUT from test.support.os_helper import TESTFN, unlink +from test.support.script_helper import assert_python_ok from test.test_sqlite3.test_dbapi import memory_database @@ -366,5 +368,176 @@ def test_isolation_level_none(self): self.assertEqual(self.traced, [self.QUERY]) +class AutocommitAttribute(unittest.TestCase): + """Test PEP 249-compliant autocommit behaviour.""" + legacy = sqlite.LEGACY_TRANSACTION_CONTROL + + @contextmanager + def check_stmt_trace(self, cx, expected, reset=True): + try: + traced = [] + cx.set_trace_callback(lambda stmt: traced.append(stmt)) + yield + finally: + self.assertEqual(traced, expected) + if reset: + cx.set_trace_callback(None) + + def test_autocommit_default(self): + with memory_database() as cx: + self.assertEqual(cx.autocommit, + sqlite.LEGACY_TRANSACTION_CONTROL) + + def test_autocommit_setget(self): + dataset = ( + True, + False, + sqlite.LEGACY_TRANSACTION_CONTROL, + ) + for mode in dataset: + with self.subTest(mode=mode): + with memory_database(autocommit=mode) as cx: + self.assertEqual(cx.autocommit, mode) + with memory_database() as cx: + cx.autocommit = mode + self.assertEqual(cx.autocommit, mode) + + def test_autocommit_setget_invalid(self): + msg = "autocommit must be True, False, or.*LEGACY" + for mode in "a", 12, (), None: + with self.subTest(mode=mode): + with self.assertRaisesRegex(ValueError, msg): + sqlite.connect(":memory:", autocommit=mode) + + def test_autocommit_disabled(self): + expected = [ + "SELECT 1", + "COMMIT", + "BEGIN", + "ROLLBACK", + "BEGIN", + ] + with memory_database(autocommit=False) as cx: + self.assertTrue(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.execute("SELECT 1") + cx.commit() + cx.rollback() + + def test_autocommit_disabled_implicit_rollback(self): + expected = ["ROLLBACK"] + with memory_database(autocommit=False) as cx: + self.assertTrue(cx.in_transaction) + with self.check_stmt_trace(cx, expected, reset=False): + cx.close() + + def test_autocommit_enabled(self): + expected = ["CREATE TABLE t(t)", "INSERT INTO t VALUES(1)"] + with memory_database(autocommit=True) as cx: + self.assertFalse(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.execute("CREATE TABLE t(t)") + cx.execute("INSERT INTO t VALUES(1)") + self.assertFalse(cx.in_transaction) + + def test_autocommit_enabled_txn_ctl(self): + for op in "commit", "rollback": + with self.subTest(op=op): + with memory_database(autocommit=True) as cx: + meth = getattr(cx, op) + self.assertFalse(cx.in_transaction) + with self.check_stmt_trace(cx, []): + meth() # expect this to pass silently + self.assertFalse(cx.in_transaction) + + def test_autocommit_disabled_then_enabled(self): + expected = ["COMMIT"] + with memory_database(autocommit=False) as cx: + self.assertTrue(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.autocommit = True # should commit + self.assertFalse(cx.in_transaction) + + def test_autocommit_enabled_then_disabled(self): + expected = ["BEGIN"] + with memory_database(autocommit=True) as cx: + self.assertFalse(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.autocommit = False # should begin + self.assertTrue(cx.in_transaction) + + def test_autocommit_explicit_then_disabled(self): + expected = ["BEGIN DEFERRED"] + with memory_database(autocommit=True) as cx: + self.assertFalse(cx.in_transaction) + with self.check_stmt_trace(cx, expected): + cx.execute("BEGIN DEFERRED") + cx.autocommit = False # should now be a no-op + self.assertTrue(cx.in_transaction) + + def test_autocommit_enabled_ctx_mgr(self): + with memory_database(autocommit=True) as cx: + # The context manager is a no-op if autocommit=True + with self.check_stmt_trace(cx, []): + with cx: + self.assertFalse(cx.in_transaction) + self.assertFalse(cx.in_transaction) + + def test_autocommit_disabled_ctx_mgr(self): + expected = ["COMMIT", "BEGIN"] + with memory_database(autocommit=False) as cx: + with self.check_stmt_trace(cx, expected): + with cx: + self.assertTrue(cx.in_transaction) + self.assertTrue(cx.in_transaction) + + def test_autocommit_compat_ctx_mgr(self): + expected = ["BEGIN ", "INSERT INTO T VALUES(1)", "COMMIT"] + with memory_database(autocommit=self.legacy) as cx: + cx.execute("create table t(t)") + with self.check_stmt_trace(cx, expected): + with cx: + self.assertFalse(cx.in_transaction) + cx.execute("INSERT INTO T VALUES(1)") + self.assertTrue(cx.in_transaction) + self.assertFalse(cx.in_transaction) + + def test_autocommit_enabled_executescript(self): + expected = ["BEGIN", "SELECT 1"] + with memory_database(autocommit=True) as cx: + with self.check_stmt_trace(cx, expected): + self.assertFalse(cx.in_transaction) + cx.execute("BEGIN") + cx.executescript("SELECT 1") + self.assertTrue(cx.in_transaction) + + def test_autocommit_disabled_executescript(self): + expected = ["SELECT 1"] + with memory_database(autocommit=False) as cx: + with self.check_stmt_trace(cx, expected): + self.assertTrue(cx.in_transaction) + cx.executescript("SELECT 1") + self.assertTrue(cx.in_transaction) + + def test_autocommit_compat_executescript(self): + expected = ["BEGIN", "COMMIT", "SELECT 1"] + with memory_database(autocommit=self.legacy) as cx: + with self.check_stmt_trace(cx, expected): + self.assertFalse(cx.in_transaction) + cx.execute("BEGIN") + cx.executescript("SELECT 1") + self.assertFalse(cx.in_transaction) + + def test_autocommit_disabled_implicit_shutdown(self): + # The implicit ROLLBACK should not call back into Python during + # interpreter tear-down. + code = """if 1: + import sqlite3 + cx = sqlite3.connect(":memory:", autocommit=False) + cx.set_trace_callback(print) + """ + assert_python_ok("-c", code, PYTHONIOENCODING="utf-8") + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_sqlite3/test_types.py b/Lib/test/test_sqlite3/test_types.py index 62318823510d40..5e0ff353cbbd6b 100644 --- a/Lib/test/test_sqlite3/test_types.py +++ b/Lib/test/test_sqlite3/test_types.py @@ -106,9 +106,9 @@ def test_string_with_surrogates(self): @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform') @support.bigmemtest(size=2**31, memuse=4, dry_run=False) def test_too_large_string(self, maxsize): - with self.assertRaises(sqlite.InterfaceError): + with self.assertRaises(sqlite.DataError): self.cur.execute("insert into test(s) values (?)", ('x'*(2**31-1),)) - with self.assertRaises(OverflowError): + with self.assertRaises(sqlite.DataError): self.cur.execute("insert into test(s) values (?)", ('x'*(2**31),)) self.cur.execute("select 1 from test") row = self.cur.fetchone() @@ -117,9 +117,9 @@ def test_too_large_string(self, maxsize): @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform') @support.bigmemtest(size=2**31, memuse=3, dry_run=False) def test_too_large_blob(self, maxsize): - with self.assertRaises(sqlite.InterfaceError): + with self.assertRaises(sqlite.DataError): self.cur.execute("insert into test(s) values (?)", (b'x'*(2**31-1),)) - with self.assertRaises(OverflowError): + with self.assertRaises(sqlite.DataError): self.cur.execute("insert into test(s) values (?)", (b'x'*(2**31),)) self.cur.execute("select 1 from test") row = self.cur.fetchone() diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index 5007e08f321b5a..e926fc5e88e584 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -9,6 +9,7 @@ from test.support import socket_helper from test.support import threading_helper from test.support import warnings_helper +from test.support import asyncore import socket import select import time @@ -30,9 +31,6 @@ ctypes = None -asyncore = warnings_helper.import_deprecated('asyncore') - - ssl = import_helper.import_module("ssl") import _ssl diff --git a/Lib/test/test_stable_abi_ctypes.py b/Lib/test/test_stable_abi_ctypes.py index a803e3a5025985..67c653428a6dee 100644 --- a/Lib/test/test_stable_abi_ctypes.py +++ b/Lib/test/test_stable_abi_ctypes.py @@ -547,6 +547,8 @@ def test_windows_feature_macros(self): "PyObject_Size", "PyObject_Str", "PyObject_Type", + "PyObject_Vectorcall", + "PyObject_VectorcallMethod", "PyProperty_Type", "PyRangeIter_Type", "PyRange_Type", diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 05ce79f126590a..31a3cb6b53a6f2 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -3003,14 +3003,19 @@ def __init__(self, mu, sigma): nd = NormalDist(100, 15) self.assertNotEqual(nd, lnd) - def test_pickle_and_copy(self): + def test_copy(self): nd = self.module.NormalDist(37.5, 5.625) nd1 = copy.copy(nd) self.assertEqual(nd, nd1) nd2 = copy.deepcopy(nd) self.assertEqual(nd, nd2) - nd3 = pickle.loads(pickle.dumps(nd)) - self.assertEqual(nd, nd3) + + def test_pickle(self): + nd = self.module.NormalDist(37.5, 5.625) + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + pickled = pickle.loads(pickle.dumps(nd, protocol=proto)) + self.assertEqual(nd, pickled) def test_hashability(self): ND = self.module.NormalDist diff --git a/Lib/test/test_string_literals.py b/Lib/test/test_string_literals.py index 7247b7e48bc2b6..9b663c00223d1b 100644 --- a/Lib/test/test_string_literals.py +++ b/Lib/test/test_string_literals.py @@ -109,11 +109,11 @@ def test_eval_str_invalid_escape(self): for b in range(1, 128): if b in b"""\n\r"'01234567NU\\abfnrtuvx""": continue - with self.assertWarns(DeprecationWarning): + with self.assertWarns(SyntaxWarning): self.assertEqual(eval(r"'\%c'" % b), '\\' + chr(b)) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', category=DeprecationWarning) + warnings.simplefilter('always', category=SyntaxWarning) eval("'''\n\\z'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), r"invalid escape sequence '\z'") @@ -121,7 +121,7 @@ def test_eval_str_invalid_escape(self): self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('error', category=DeprecationWarning) + warnings.simplefilter('error', category=SyntaxWarning) with self.assertRaises(SyntaxError) as cm: eval("'''\n\\z'''") exc = cm.exception @@ -133,11 +133,11 @@ def test_eval_str_invalid_escape(self): def test_eval_str_invalid_octal_escape(self): for i in range(0o400, 0o1000): - with self.assertWarns(DeprecationWarning): + with self.assertWarns(SyntaxWarning): self.assertEqual(eval(r"'\%o'" % i), chr(i)) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', category=DeprecationWarning) + warnings.simplefilter('always', category=SyntaxWarning) eval("'''\n\\407'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), @@ -146,7 +146,7 @@ def test_eval_str_invalid_octal_escape(self): self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('error', category=DeprecationWarning) + warnings.simplefilter('error', category=SyntaxWarning) with self.assertRaises(SyntaxError) as cm: eval("'''\n\\407'''") exc = cm.exception @@ -186,11 +186,11 @@ def test_eval_bytes_invalid_escape(self): for b in range(1, 128): if b in b"""\n\r"'01234567\\abfnrtvx""": continue - with self.assertWarns(DeprecationWarning): + with self.assertWarns(SyntaxWarning): self.assertEqual(eval(r"b'\%c'" % b), b'\\' + bytes([b])) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', category=DeprecationWarning) + warnings.simplefilter('always', category=SyntaxWarning) eval("b'''\n\\z'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), r"invalid escape sequence '\z'") @@ -198,7 +198,7 @@ def test_eval_bytes_invalid_escape(self): self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('error', category=DeprecationWarning) + warnings.simplefilter('error', category=SyntaxWarning) with self.assertRaises(SyntaxError) as cm: eval("b'''\n\\z'''") exc = cm.exception @@ -209,11 +209,11 @@ def test_eval_bytes_invalid_escape(self): def test_eval_bytes_invalid_octal_escape(self): for i in range(0o400, 0o1000): - with self.assertWarns(DeprecationWarning): + with self.assertWarns(SyntaxWarning): self.assertEqual(eval(r"b'\%o'" % i), bytes([i & 0o377])) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always', category=DeprecationWarning) + warnings.simplefilter('always', category=SyntaxWarning) eval("b'''\n\\407'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), @@ -222,7 +222,7 @@ def test_eval_bytes_invalid_octal_escape(self): self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('error', category=DeprecationWarning) + warnings.simplefilter('error', category=SyntaxWarning) with self.assertRaises(SyntaxError) as cm: eval("b'''\n\\407'''") exc = cm.exception diff --git a/Lib/test/test_sundry.py b/Lib/test/test_sundry.py index de2e7305ccce24..f4a8d434ed1b8c 100644 --- a/Lib/test/test_sundry.py +++ b/Lib/test/test_sundry.py @@ -18,29 +18,6 @@ def test_untested_modules_can_be_imported(self): self.fail('{} has tests even though test_sundry claims ' 'otherwise'.format(name)) - import distutils.bcppcompiler - import distutils.ccompiler - import distutils.cygwinccompiler - import distutils.filelist - import distutils.text_file - import distutils.unixccompiler - - import distutils.command.bdist_dumb - import distutils.command.bdist - import distutils.command.bdist_rpm - import distutils.command.build_clib - import distutils.command.build_ext - import distutils.command.build - import distutils.command.clean - import distutils.command.config - import distutils.command.install_data - import distutils.command.install_egg_info - import distutils.command.install_headers - import distutils.command.install_lib - import distutils.command.register - import distutils.command.sdist - import distutils.command.upload - import html.entities try: diff --git a/Lib/test/test_symtable.py b/Lib/test/test_symtable.py index 819354e4eee9b5..25714aecda3a15 100644 --- a/Lib/test/test_symtable.py +++ b/Lib/test/test_symtable.py @@ -222,10 +222,9 @@ def checkfilename(brokencode, offset): checkfilename("def f(x): foo)(", 14) # parse-time checkfilename("def f(x): global x", 11) # symtable-build-time symtable.symtable("pass", b"spam", "exec") - with self.assertWarns(DeprecationWarning), \ - self.assertRaises(TypeError): + with self.assertRaises(TypeError): symtable.symtable("pass", bytearray(b"spam"), "exec") - with self.assertWarns(DeprecationWarning): + with self.assertRaises(TypeError): symtable.symtable("pass", memoryview(b"spam"), "exec") with self.assertRaises(TypeError): symtable.symtable("pass", list(b"spam"), "exec") diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 69a86231e2f276..78cac231929a61 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -756,6 +756,27 @@ >>> __debug__: int Traceback (most recent call last): SyntaxError: cannot assign to __debug__ +>>> f(a=) +Traceback (most recent call last): +SyntaxError: expected argument value expression +>>> f(a, b, c=) +Traceback (most recent call last): +SyntaxError: expected argument value expression +>>> f(a, b, c=, d) +Traceback (most recent call last): +SyntaxError: expected argument value expression +>>> f(*args=[0]) +Traceback (most recent call last): +SyntaxError: cannot assign to iterable argument unpacking +>>> f(a, b, *args=[0]) +Traceback (most recent call last): +SyntaxError: cannot assign to iterable argument unpacking +>>> f(**kwargs={'a': 1}) +Traceback (most recent call last): +SyntaxError: cannot assign to keyword argument unpacking +>>> f(a, b, *args, **kwargs={'a': 1}) +Traceback (most recent call last): +SyntaxError: cannot assign to keyword argument unpacking More set_context(): @@ -1584,6 +1605,22 @@ Traceback (most recent call last): SyntaxError: trailing comma not allowed without surrounding parentheses +>>> import a from b +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a.y.z from b.y.z +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a from b as bar +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + +>>> import a.y.z from b.y.z as bar +Traceback (most recent call last): +SyntaxError: Did you mean to use 'from ... import ...' instead? + # Check that we dont raise the "trailing comma" error if there is more # input to the left of the valid part that we parsed. @@ -1998,6 +2035,16 @@ def test_generator_in_function_call(self): "Generator expression must be parenthesized", lineno=1, end_lineno=1, offset=11, end_offset=53) + def test_except_then_except_star(self): + self._check_error("try: pass\nexcept ValueError: pass\nexcept* TypeError: pass", + r"cannot have both 'except' and 'except\*' on the same 'try'", + lineno=3, end_lineno=3, offset=1, end_offset=8) + + def test_except_star_then_except(self): + self._check_error("try: pass\nexcept* ValueError: pass\nexcept TypeError: pass", + r"cannot have both 'except' and 'except\*' on the same 'try'", + lineno=3, end_lineno=3, offset=1, end_offset=7) + def test_empty_line_after_linecont(self): # See issue-40847 s = r"""\ diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 41482734872e06..2403c7c815f2c0 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1439,7 +1439,7 @@ def bar(cls): check(bar, size('PP')) # generator def get_gen(): yield 1 - check(get_gen(), size('P2P4P4c7P2ic??P')) + check(get_gen(), size('P2P4P4c7P2ic??2P')) # iterator check(iter('abc'), size('lP')) # callable-iterator @@ -1521,7 +1521,7 @@ def delx(self): del self.__x check((1,2,3), vsize('') + 3*self.P) # type # static type: PyTypeObject - fmt = 'P2nPI13Pl4Pn9Pn12PIP' + fmt = 'P2nPI13Pl4Pn9Pn12PIPc' s = vsize('2P' + fmt) check(int, s) # class diff --git a/Lib/test/test_sys_setprofile.py b/Lib/test/test_sys_setprofile.py index 4c3053a1e3e9e6..acae433cd0a549 100644 --- a/Lib/test/test_sys_setprofile.py +++ b/Lib/test/test_sys_setprofile.py @@ -437,12 +437,8 @@ def __del__(self): sys.setprofile(bar) sys.setprofile(A()) - with support.catch_unraisable_exception() as cm: - sys.setprofile(foo) - self.assertEqual(cm.unraisable.object, A.__del__) - self.assertIsInstance(cm.unraisable.exc_value, RuntimeError) - - self.assertEqual(sys.getprofile(), foo) + sys.setprofile(foo) + self.assertEqual(sys.getprofile(), bar) def test_same_object(self): diff --git a/Lib/test/test_sys_settrace.py b/Lib/test/test_sys_settrace.py index aa61f8b185883b..a251b2272e95eb 100644 --- a/Lib/test/test_sys_settrace.py +++ b/Lib/test/test_sys_settrace.py @@ -346,7 +346,7 @@ def make_tracer(): return Tracer() def compare_events(self, line_offset, events, expected_events): - events = [(l - line_offset, e) for (l, e) in events] + events = [(l - line_offset if l is not None else None, e) for (l, e) in events] if events != expected_events: self.fail( "events did not match expectation:\n" + @@ -834,9 +834,8 @@ def func(): (5, 'line'), (6, 'line'), (7, 'line'), - (10, 'line'), - (13, 'line'), - (13, 'return')]) + (10, 'line')] + + ([(13, 'line'), (13, 'return')] if __debug__ else [(10, 'return')])) def test_continue_through_finally(self): @@ -871,9 +870,8 @@ def func(): (6, 'line'), (7, 'line'), (10, 'line'), - (3, 'line'), - (13, 'line'), - (13, 'return')]) + (3, 'line')] + + ([(13, 'line'), (13, 'return')] if __debug__ else [(3, 'return')])) def test_return_through_finally(self): @@ -2798,12 +2796,8 @@ def __del__(self): sys.settrace(bar) sys.settrace(A()) - with support.catch_unraisable_exception() as cm: - sys.settrace(foo) - self.assertEqual(cm.unraisable.object, A.__del__) - self.assertIsInstance(cm.unraisable.exc_value, RuntimeError) - - self.assertEqual(sys.gettrace(), foo) + sys.settrace(foo) + self.assertEqual(sys.gettrace(), bar) def test_same_object(self): diff --git a/Lib/test/test_syslog.py b/Lib/test/test_syslog.py index fe09bd39f8b7fd..2125ec58d87e03 100644 --- a/Lib/test/test_syslog.py +++ b/Lib/test/test_syslog.py @@ -1,5 +1,9 @@ -from test.support import import_helper +from test.support import import_helper, threading_helper syslog = import_helper.import_module("syslog") #skip if not supported +from test import support +import sys +import threading +import time import unittest # XXX(nnorwitz): This test sucks. I don't know of a platform independent way @@ -8,6 +12,9 @@ class Test(unittest.TestCase): + def tearDown(self): + syslog.closelog() + def test_openlog(self): syslog.openlog('python') # Issue #6697. @@ -18,22 +25,59 @@ def test_syslog(self): syslog.syslog('test message from python test_syslog') syslog.syslog(syslog.LOG_ERR, 'test error from python test_syslog') + def test_syslog_implicit_open(self): + syslog.closelog() # Make sure log is closed + syslog.syslog('test message from python test_syslog') + syslog.syslog(syslog.LOG_ERR, 'test error from python test_syslog') + def test_closelog(self): syslog.openlog('python') syslog.closelog() + syslog.closelog() # idempotent operation def test_setlogmask(self): - syslog.setlogmask(syslog.LOG_DEBUG) + mask = syslog.LOG_UPTO(syslog.LOG_WARNING) + oldmask = syslog.setlogmask(mask) + self.assertEqual(syslog.setlogmask(0), mask) + self.assertEqual(syslog.setlogmask(oldmask), mask) def test_log_mask(self): - syslog.LOG_MASK(syslog.LOG_INFO) - - def test_log_upto(self): - syslog.LOG_UPTO(syslog.LOG_INFO) + mask = syslog.LOG_UPTO(syslog.LOG_WARNING) + self.assertTrue(mask & syslog.LOG_MASK(syslog.LOG_WARNING)) + self.assertTrue(mask & syslog.LOG_MASK(syslog.LOG_ERR)) + self.assertFalse(mask & syslog.LOG_MASK(syslog.LOG_INFO)) def test_openlog_noargs(self): syslog.openlog() syslog.syslog('test message from python test_syslog') + @threading_helper.requires_working_threading() + def test_syslog_threaded(self): + start = threading.Event() + stop = False + def opener(): + start.wait(10) + i = 1 + while not stop: + syslog.openlog(f'python-test-{i}') # new string object + i += 1 + def logger(): + start.wait(10) + while not stop: + syslog.syslog('test message from python test_syslog') + + orig_si = sys.getswitchinterval() + support.setswitchinterval(1e-9) + try: + threads = [threading.Thread(target=opener)] + threads += [threading.Thread(target=logger) for k in range(10)] + with threading_helper.start_threads(threads): + start.set() + time.sleep(0.1) + stop = True + finally: + sys.setswitchinterval(orig_si) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index c6649962331464..13ba5068ae20d7 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -1305,6 +1305,62 @@ def f(): self.assertIn("Fatal Python error: Py_EndInterpreter: " "not the last thread", err.decode()) + def _check_allowed(self, before_start='', *, + allowed=True, + daemon_allowed=True, + daemon=False, + ): + subinterp_code = textwrap.dedent(f""" + import test.support + import threading + def func(): + print('this should not have run!') + t = threading.Thread(target=func, daemon={daemon}) + {before_start} + t.start() + """) + script = textwrap.dedent(f""" + import test.support + test.support.run_in_subinterp_with_config( + {subinterp_code!r}, + allow_fork=True, + allow_exec=True, + allow_threads={allowed}, + allow_daemon_threads={daemon_allowed}, + ) + """) + with test.support.SuppressCrashReport(): + _, _, err = assert_python_ok("-c", script) + return err.decode() + + @cpython_only + def test_threads_not_allowed(self): + err = self._check_allowed( + allowed=False, + daemon_allowed=False, + daemon=False, + ) + self.assertIn('RuntimeError', err) + + @cpython_only + def test_daemon_threads_not_allowed(self): + with self.subTest('via Thread()'): + err = self._check_allowed( + allowed=True, + daemon_allowed=False, + daemon=True, + ) + self.assertIn('RuntimeError', err) + + with self.subTest('via Thread.daemon setter'): + err = self._check_allowed( + 't.daemon = True', + allowed=True, + daemon_allowed=False, + daemon=False, + ) + self.assertIn('RuntimeError', err) + class ThreadingExceptionTests(BaseTestCase): # A RuntimeError should be raised if Thread.start() is called diff --git a/Lib/test/test_tkinter/test_text.py b/Lib/test/test_tkinter/test_text.py index d1583f0b20aabe..328e4256ce0711 100644 --- a/Lib/test/test_tkinter/test_text.py +++ b/Lib/test/test_tkinter/test_text.py @@ -40,6 +40,58 @@ def test_search(self): self.assertEqual(text.search('-test', '1.0', 'end'), '1.2') self.assertEqual(text.search('test', '1.0', 'end'), '1.3') + def test_count(self): + # XXX Some assertions do not check against the intended result, + # but instead check the current result to prevent regression. + text = self.text + text.insert('1.0', + 'Lorem ipsum dolor sit amet,\n' + 'consectetur adipiscing elit,\n' + 'sed do eiusmod tempor incididunt\n' + 'ut labore et dolore magna aliqua.') + + options = ('chars', 'indices', 'lines', + 'displaychars', 'displayindices', 'displaylines', + 'xpixels', 'ypixels') + if self.wantobjects: + self.assertEqual(len(text.count('1.0', 'end', *options)), 8) + else: + text.count('1.0', 'end', *options) + self.assertEqual(text.count('1.0', 'end', 'chars', 'lines'), (124, 4) + if self.wantobjects else '124 4') + self.assertEqual(text.count('1.3', '4.5', 'chars', 'lines'), (92, 3) + if self.wantobjects else '92 3') + self.assertEqual(text.count('4.5', '1.3', 'chars', 'lines'), (-92, -3) + if self.wantobjects else '-92 -3') + self.assertEqual(text.count('1.3', '1.3', 'chars', 'lines'), (0, 0) + if self.wantobjects else '0 0') + self.assertEqual(text.count('1.0', 'end', 'lines'), (4,) + if self.wantobjects else ('4',)) + self.assertEqual(text.count('end', '1.0', 'lines'), (-4,) + if self.wantobjects else ('-4',)) + self.assertEqual(text.count('1.3', '1.5', 'lines'), None + if self.wantobjects else ('0',)) + self.assertEqual(text.count('1.3', '1.3', 'lines'), None + if self.wantobjects else ('0',)) + self.assertEqual(text.count('1.0', 'end'), (124,) # 'indices' by default + if self.wantobjects else ('124',)) + self.assertRaises(tkinter.TclError, text.count, '1.0', 'end', 'spam') + self.assertRaises(tkinter.TclError, text.count, '1.0', 'end', '-lines') + + self.assertIsInstance(text.count('1.3', '1.5', 'ypixels'), tuple) + self.assertIsInstance(text.count('1.3', '1.5', 'update', 'ypixels'), int + if self.wantobjects else str) + self.assertEqual(text.count('1.3', '1.3', 'update', 'ypixels'), None + if self.wantobjects else '0') + self.assertEqual(text.count('1.3', '1.5', 'update', 'indices'), 2 + if self.wantobjects else '2') + self.assertEqual(text.count('1.3', '1.3', 'update', 'indices'), None + if self.wantobjects else '0') + self.assertEqual(text.count('1.3', '1.5', 'update'), (2,) + if self.wantobjects else ('2',)) + self.assertEqual(text.count('1.3', '1.3', 'update'), None + if self.wantobjects else ('0',)) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_tkinter/test_widgets.py b/Lib/test/test_tkinter/test_widgets.py index 6cde1ccdbfcda8..6fde93cbecc73f 100644 --- a/Lib/test/test_tkinter/test_widgets.py +++ b/Lib/test/test_tkinter/test_widgets.py @@ -759,6 +759,164 @@ def test_configure_yscrollincrement(self): self.checkPixelsParam(widget, 'yscrollincrement', 10, 0, 11.2, 13.6, -10, '0.1i') + def _test_option_joinstyle(self, c, factory): + for joinstyle in 'bevel', 'miter', 'round': + i = factory(joinstyle=joinstyle) + self.assertEqual(c.itemcget(i, 'joinstyle'), joinstyle) + self.assertRaises(TclError, factory, joinstyle='spam') + + def _test_option_smooth(self, c, factory): + for smooth in 1, True, '1', 'true', 'yes', 'on': + i = factory(smooth=smooth) + self.assertEqual(c.itemcget(i, 'smooth'), 'true') + for smooth in 0, False, '0', 'false', 'no', 'off': + i = factory(smooth=smooth) + self.assertEqual(c.itemcget(i, 'smooth'), '0') + i = factory(smooth=True, splinestep=30) + self.assertEqual(c.itemcget(i, 'smooth'), 'true') + self.assertEqual(c.itemcget(i, 'splinestep'), '30') + i = factory(smooth='raw', splinestep=30) + self.assertEqual(c.itemcget(i, 'smooth'), 'raw') + self.assertEqual(c.itemcget(i, 'splinestep'), '30') + self.assertRaises(TclError, factory, smooth='spam') + + def test_create_rectangle(self): + c = self.create() + i1 = c.create_rectangle(20, 30, 60, 10) + self.assertEqual(c.coords(i1), [20.0, 10.0, 60.0, 30.0]) + self.assertEqual(c.bbox(i1), (19, 9, 61, 31)) + + i2 = c.create_rectangle([21, 31, 61, 11]) + self.assertEqual(c.coords(i2), [21.0, 11.0, 61.0, 31.0]) + self.assertEqual(c.bbox(i2), (20, 10, 62, 32)) + + i3 = c.create_rectangle((22, 32), (62, 12)) + self.assertEqual(c.coords(i3), [22.0, 12.0, 62.0, 32.0]) + self.assertEqual(c.bbox(i3), (21, 11, 63, 33)) + + i4 = c.create_rectangle([(23, 33), (63, 13)]) + self.assertEqual(c.coords(i4), [23.0, 13.0, 63.0, 33.0]) + self.assertEqual(c.bbox(i4), (22, 12, 64, 34)) + + self.assertRaises(TclError, c.create_rectangle, 20, 30, 60) + self.assertRaises(TclError, c.create_rectangle, [20, 30, 60]) + self.assertRaises(TclError, c.create_rectangle, 20, 30, 40, 50, 60, 10) + self.assertRaises(TclError, c.create_rectangle, [20, 30, 40, 50, 60, 10]) + self.assertRaises(TclError, c.create_rectangle, 20, 30) + self.assertRaises(TclError, c.create_rectangle, [20, 30]) + self.assertRaises(IndexError, c.create_rectangle) + self.assertRaises(IndexError, c.create_rectangle, []) + + def test_create_line(self): + c = self.create() + i1 = c.create_line(20, 30, 40, 50, 60, 10) + self.assertEqual(c.coords(i1), [20.0, 30.0, 40.0, 50.0, 60.0, 10.0]) + self.assertEqual(c.bbox(i1), (18, 8, 62, 52)) + self.assertEqual(c.itemcget(i1, 'arrow'), 'none') + self.assertEqual(c.itemcget(i1, 'arrowshape'), '8 10 3') + self.assertEqual(c.itemcget(i1, 'capstyle'), 'butt') + self.assertEqual(c.itemcget(i1, 'joinstyle'), 'round') + self.assertEqual(c.itemcget(i1, 'smooth'), '0') + self.assertEqual(c.itemcget(i1, 'splinestep'), '12') + + i2 = c.create_line([21, 31, 41, 51, 61, 11]) + self.assertEqual(c.coords(i2), [21.0, 31.0, 41.0, 51.0, 61.0, 11.0]) + self.assertEqual(c.bbox(i2), (19, 9, 63, 53)) + + i3 = c.create_line((22, 32), (42, 52), (62, 12)) + self.assertEqual(c.coords(i3), [22.0, 32.0, 42.0, 52.0, 62.0, 12.0]) + self.assertEqual(c.bbox(i3), (20, 10, 64, 54)) + + i4 = c.create_line([(23, 33), (43, 53), (63, 13)]) + self.assertEqual(c.coords(i4), [23.0, 33.0, 43.0, 53.0, 63.0, 13.0]) + self.assertEqual(c.bbox(i4), (21, 11, 65, 55)) + + self.assertRaises(TclError, c.create_line, 20, 30, 60) + self.assertRaises(TclError, c.create_line, [20, 30, 60]) + self.assertRaises(TclError, c.create_line, 20, 30) + self.assertRaises(TclError, c.create_line, [20, 30]) + self.assertRaises(IndexError, c.create_line) + self.assertRaises(IndexError, c.create_line, []) + + for arrow in 'none', 'first', 'last', 'both': + i = c.create_line(20, 30, 60, 10, arrow=arrow) + self.assertEqual(c.itemcget(i, 'arrow'), arrow) + i = c.create_line(20, 30, 60, 10, arrow='first', arrowshape=[10, 15, 5]) + self.assertEqual(c.itemcget(i, 'arrowshape'), '10 15 5') + self.assertRaises(TclError, c.create_line, 20, 30, 60, 10, arrow='spam') + + for capstyle in 'butt', 'projecting', 'round': + i = c.create_line(20, 30, 60, 10, capstyle=capstyle) + self.assertEqual(c.itemcget(i, 'capstyle'), capstyle) + self.assertRaises(TclError, c.create_line, 20, 30, 60, 10, capstyle='spam') + + self._test_option_joinstyle(c, + lambda **kwargs: c.create_line(20, 30, 40, 50, 60, 10, **kwargs)) + self._test_option_smooth(c, + lambda **kwargs: c.create_line(20, 30, 60, 10, **kwargs)) + + def test_create_polygon(self): + c = self.create() + i1 = c.create_polygon(20, 30, 40, 50, 60, 10) + self.assertEqual(c.coords(i1), [20.0, 30.0, 40.0, 50.0, 60.0, 10.0]) + self.assertEqual(c.bbox(i1), (19, 9, 61, 51)) + self.assertEqual(c.itemcget(i1, 'joinstyle'), 'round') + self.assertEqual(c.itemcget(i1, 'smooth'), '0') + self.assertEqual(c.itemcget(i1, 'splinestep'), '12') + + i2 = c.create_polygon([21, 31, 41, 51, 61, 11]) + self.assertEqual(c.coords(i2), [21.0, 31.0, 41.0, 51.0, 61.0, 11.0]) + self.assertEqual(c.bbox(i2), (20, 10, 62, 52)) + + i3 = c.create_polygon((22, 32), (42, 52), (62, 12)) + self.assertEqual(c.coords(i3), [22.0, 32.0, 42.0, 52.0, 62.0, 12.0]) + self.assertEqual(c.bbox(i3), (21, 11, 63, 53)) + + i4 = c.create_polygon([(23, 33), (43, 53), (63, 13)]) + self.assertEqual(c.coords(i4), [23.0, 33.0, 43.0, 53.0, 63.0, 13.0]) + self.assertEqual(c.bbox(i4), (22, 12, 64, 54)) + + self.assertRaises(TclError, c.create_polygon, 20, 30, 60) + self.assertRaises(TclError, c.create_polygon, [20, 30, 60]) + self.assertRaises(IndexError, c.create_polygon) + self.assertRaises(IndexError, c.create_polygon, []) + + self._test_option_joinstyle(c, + lambda **kwargs: c.create_polygon(20, 30, 40, 50, 60, 10, **kwargs)) + self._test_option_smooth(c, + lambda **kwargs: c.create_polygon(20, 30, 40, 50, 60, 10, **kwargs)) + + def test_coords(self): + c = self.create() + i = c.create_line(20, 30, 40, 50, 60, 10, tags='x') + self.assertEqual(c.coords(i), [20.0, 30.0, 40.0, 50.0, 60.0, 10.0]) + self.assertEqual(c.coords('x'), [20.0, 30.0, 40.0, 50.0, 60.0, 10.0]) + self.assertEqual(c.bbox(i), (18, 8, 62, 52)) + + c.coords(i, 50, 60, 70, 80, 90, 40) + self.assertEqual(c.coords(i), [50.0, 60.0, 70.0, 80.0, 90.0, 40.0]) + self.assertEqual(c.bbox(i), (48, 38, 92, 82)) + + c.coords(i, [21, 31, 41, 51, 61, 11]) + self.assertEqual(c.coords(i), [21.0, 31.0, 41.0, 51.0, 61.0, 11.0]) + + c.coords(i, 20, 30, 60, 10) + self.assertEqual(c.coords(i), [20.0, 30.0, 60.0, 10.0]) + self.assertEqual(c.bbox(i), (18, 8, 62, 32)) + + self.assertRaises(TclError, c.coords, i, 20, 30, 60) + self.assertRaises(TclError, c.coords, i, [20, 30, 60]) + self.assertRaises(TclError, c.coords, i, 20, 30) + self.assertRaises(TclError, c.coords, i, [20, 30]) + + c.coords(i, '20', '30c', '60i', '10p') + coords = c.coords(i) + self.assertIsInstance(coords, list) + self.assertEqual(len(coords), 4) + self.assertEqual(coords[0], 20) + for i in range(4): + self.assertIsInstance(coords[i], float) + @requires_tcl(8, 6) def test_moveto(self): widget = self.create() diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 1272e1e9be002e..63c2501cfe2338 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -3,13 +3,15 @@ from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP, STRING, ENDMARKER, ENCODING, tok_name, detect_encoding, open as tokenize_open, Untokenizer, generate_tokens, - NEWLINE, _generate_tokens_from_c_tokenizer) + NEWLINE, _generate_tokens_from_c_tokenizer, DEDENT) from io import BytesIO, StringIO import unittest from textwrap import dedent from unittest import TestCase, mock from test.test_grammar import (VALID_UNDERSCORE_LITERALS, INVALID_UNDERSCORE_LITERALS) +from test.support import os_helper +from test.support.script_helper import run_test_script, make_script import os import token @@ -2512,6 +2514,26 @@ def get_tokens(string): self.assertRaises(SyntaxError, get_tokens, "("*1000+"a"+")"*1000) self.assertRaises(SyntaxError, get_tokens, "]") + def test_max_indent(self): + MAXINDENT = 100 + + def generate_source(indents): + source = ''.join((' ' * x) + 'if True:\n' for x in range(indents)) + source += ' ' * indents + 'pass\n' + return source + + valid = generate_source(MAXINDENT - 1) + tokens = list(_generate_tokens_from_c_tokenizer(valid)) + self.assertEqual(tokens[-1].type, DEDENT) + compile(valid, "", "exec") + + invalid = generate_source(MAXINDENT) + tokens = list(_generate_tokens_from_c_tokenizer(invalid)) + self.assertEqual(tokens[-1].type, NEWLINE) + self.assertRaises( + IndentationError, compile, invalid, "", "exec" + ) + def test_continuation_lines_indentation(self): def get_tokens(string): return [(kind, string) for (kind, string, *_) in _generate_tokens_from_c_tokenizer(string)] @@ -2611,5 +2633,19 @@ def fib(n): self.assertEqual(get_tokens(code), get_tokens(code_no_cont)) +class CTokenizerBufferTests(unittest.TestCase): + def test_newline_at_the_end_of_buffer(self): + # See issue 99581: Make sure that if we need to add a new line at the + # end of the buffer, we have enough space in the buffer, specially when + # the current line is as long as the buffer space available. + test_script = f"""\ + #coding: latin-1 + #{"a"*10000} + #{"a"*10002}""" + with os_helper.temp_dir() as temp_dir: + file_name = make_script(temp_dir, 'foo', test_script) + run_test_script(file_name) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_tools/test_gprof2html.py b/Lib/test/test_tools/test_gprof2html.py deleted file mode 100644 index 7cceb8faf8e5f4..00000000000000 --- a/Lib/test/test_tools/test_gprof2html.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Tests for the gprof2html script in the Tools directory.""" - -import os -import sys -import unittest -from unittest import mock -import tempfile - -from test.test_tools import skip_if_missing, import_tool - -skip_if_missing() - -class Gprof2htmlTests(unittest.TestCase): - - def setUp(self): - self.gprof = import_tool('gprof2html') - oldargv = sys.argv - def fixup(): - sys.argv = oldargv - self.addCleanup(fixup) - sys.argv = [] - - def test_gprof(self): - # Issue #14508: this used to fail with a NameError. - with mock.patch.object(self.gprof, 'webbrowser') as wmock, \ - tempfile.TemporaryDirectory() as tmpdir: - fn = os.path.join(tmpdir, 'abc') - open(fn, 'wb').close() - sys.argv = ['gprof2html', fn] - self.gprof.main() - self.assertTrue(wmock.open.called) - - -if __name__ == '__main__': - unittest.main() diff --git a/Lib/test/test_tools/test_i18n.py b/Lib/test/test_tools/test_i18n.py index 7f18edaaa8ca60..c083a04475e726 100644 --- a/Lib/test/test_tools/test_i18n.py +++ b/Lib/test/test_tools/test_i18n.py @@ -155,6 +155,26 @@ class C: ''')) self.assertFalse([msgid for msgid in msgids if 'doc' in msgid]) + def test_moduledocstring(self): + for doc in ('"""doc"""', "r'''doc'''", "R'doc'", 'u"doc"'): + with self.subTest(doc): + msgids = self.extract_docstrings_from_str(dedent('''\ + %s + ''' % doc)) + self.assertIn('doc', msgids) + + def test_moduledocstring_bytes(self): + msgids = self.extract_docstrings_from_str(dedent('''\ + b"""doc""" + ''')) + self.assertFalse([msgid for msgid in msgids if 'doc' in msgid]) + + def test_moduledocstring_fstring(self): + msgids = self.extract_docstrings_from_str(dedent('''\ + f"""doc""" + ''')) + self.assertFalse([msgid for msgid in msgids if 'doc' in msgid]) + def test_msgid(self): msgids = self.extract_docstrings_from_str( '''_("""doc""" r'str' u"ing")''') diff --git a/Lib/test/test_tools/test_md5sum.py b/Lib/test/test_tools/test_md5sum.py deleted file mode 100644 index 710461f8819754..00000000000000 --- a/Lib/test/test_tools/test_md5sum.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Tests for the md5sum script in the Tools directory.""" - -import os -import unittest -from test.support import os_helper -from test.support import hashlib_helper -from test.support.script_helper import assert_python_ok, assert_python_failure - -from test.test_tools import scriptsdir, skip_if_missing - -skip_if_missing() - -@hashlib_helper.requires_hashdigest('md5', openssl=True) -class MD5SumTests(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.script = os.path.join(scriptsdir, 'md5sum.py') - os.mkdir(os_helper.TESTFN_ASCII) - cls.fodder = os.path.join(os_helper.TESTFN_ASCII, 'md5sum.fodder') - with open(cls.fodder, 'wb') as f: - f.write(b'md5sum\r\ntest file\r\n') - cls.fodder_md5 = b'd38dae2eb1ab346a292ef6850f9e1a0d' - cls.fodder_textmode_md5 = b'a8b07894e2ca3f2a4c3094065fa6e0a5' - - @classmethod - def tearDownClass(cls): - os_helper.rmtree(os_helper.TESTFN_ASCII) - - def test_noargs(self): - rc, out, err = assert_python_ok(self.script) - self.assertEqual(rc, 0) - self.assertTrue( - out.startswith(b'd41d8cd98f00b204e9800998ecf8427e ')) - self.assertFalse(err) - - def test_checksum_fodder(self): - rc, out, err = assert_python_ok(self.script, self.fodder) - self.assertEqual(rc, 0) - self.assertTrue(out.startswith(self.fodder_md5)) - for part in self.fodder.split(os.path.sep): - self.assertIn(part.encode(), out) - self.assertFalse(err) - - def test_dash_l(self): - rc, out, err = assert_python_ok(self.script, '-l', self.fodder) - self.assertEqual(rc, 0) - self.assertIn(self.fodder_md5, out) - parts = self.fodder.split(os.path.sep) - self.assertIn(parts[-1].encode(), out) - self.assertNotIn(parts[-2].encode(), out) - - def test_dash_t(self): - rc, out, err = assert_python_ok(self.script, '-t', self.fodder) - self.assertEqual(rc, 0) - self.assertTrue(out.startswith(self.fodder_textmode_md5)) - self.assertNotIn(self.fodder_md5, out) - - def test_dash_s(self): - rc, out, err = assert_python_ok(self.script, '-s', '512', self.fodder) - self.assertEqual(rc, 0) - self.assertIn(self.fodder_md5, out) - - def test_multiple_files(self): - rc, out, err = assert_python_ok(self.script, self.fodder, self.fodder) - self.assertEqual(rc, 0) - lines = out.splitlines() - self.assertEqual(len(lines), 2) - self.assertEqual(*lines) - - def test_usage(self): - rc, out, err = assert_python_failure(self.script, '-h') - self.assertEqual(rc, 2) - self.assertEqual(out, b'') - self.assertGreater(err, b'') - - -if __name__ == '__main__': - unittest.main() diff --git a/Lib/test/test_tools/test_pathfix.py b/Lib/test/test_tools/test_pathfix.py deleted file mode 100644 index aa754bc479b468..00000000000000 --- a/Lib/test/test_tools/test_pathfix.py +++ /dev/null @@ -1,131 +0,0 @@ -import os -import subprocess -import sys -import unittest -from test.support import os_helper -from test.test_tools import scriptsdir, skip_if_missing - - -# need Tools/script/ directory: skip if run on Python installed on the system -skip_if_missing() - - -class TestPathfixFunctional(unittest.TestCase): - script = os.path.join(scriptsdir, 'pathfix.py') - - def setUp(self): - self.addCleanup(os_helper.unlink, os_helper.TESTFN) - - def pathfix(self, shebang, pathfix_flags, exitcode=0, stdout='', stderr='', - directory=''): - if directory: - # bpo-38347: Test filename should contain lowercase, uppercase, - # "-", "_" and digits. - filename = os.path.join(directory, 'script-A_1.py') - pathfix_arg = directory - else: - filename = os_helper.TESTFN - pathfix_arg = filename - - with open(filename, 'w', encoding='utf8') as f: - f.write(f'{shebang}\n' + 'print("Hello world")\n') - - encoding = sys.getfilesystemencoding() - proc = subprocess.run( - [sys.executable, self.script, - *pathfix_flags, '-n', pathfix_arg], - env={**os.environ, 'PYTHONIOENCODING': encoding}, - capture_output=True) - - if stdout == '' and proc.returncode == 0: - stdout = f'{filename}: updating\n' - self.assertEqual(proc.returncode, exitcode, proc) - self.assertEqual(proc.stdout.decode(encoding), stdout.replace('\n', os.linesep), proc) - self.assertEqual(proc.stderr.decode(encoding), stderr.replace('\n', os.linesep), proc) - - with open(filename, 'r', encoding='utf8') as f: - output = f.read() - - lines = output.split('\n') - self.assertEqual(lines[1:], ['print("Hello world")', '']) - new_shebang = lines[0] - - if proc.returncode != 0: - self.assertEqual(shebang, new_shebang) - - return new_shebang - - def test_recursive(self): - tmpdir = os_helper.TESTFN + '.d' - self.addCleanup(os_helper.rmtree, tmpdir) - os.mkdir(tmpdir) - expected_stderr = f"recursedown('{os.path.basename(tmpdir)}')\n" - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python', - ['-i', '/usr/bin/python3'], - directory=tmpdir, - stderr=expected_stderr), - '#! /usr/bin/python3') - - def test_pathfix(self): - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python', - ['-i', '/usr/bin/python3']), - '#! /usr/bin/python3') - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python -R', - ['-i', '/usr/bin/python3']), - '#! /usr/bin/python3') - - def test_pathfix_keeping_flags(self): - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python -R', - ['-i', '/usr/bin/python3', '-k']), - '#! /usr/bin/python3 -R') - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python', - ['-i', '/usr/bin/python3', '-k']), - '#! /usr/bin/python3') - - def test_pathfix_adding_flag(self): - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python', - ['-i', '/usr/bin/python3', '-a', 's']), - '#! /usr/bin/python3 -s') - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python -S', - ['-i', '/usr/bin/python3', '-a', 's']), - '#! /usr/bin/python3 -s') - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python -V', - ['-i', '/usr/bin/python3', '-a', 'v', '-k']), - '#! /usr/bin/python3 -vV') - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python', - ['-i', '/usr/bin/python3', '-a', 'Rs']), - '#! /usr/bin/python3 -Rs') - self.assertEqual( - self.pathfix( - '#! /usr/bin/env python -W default', - ['-i', '/usr/bin/python3', '-a', 's', '-k']), - '#! /usr/bin/python3 -sW default') - - def test_pathfix_adding_errors(self): - self.pathfix( - '#! /usr/bin/env python -E', - ['-i', '/usr/bin/python3', '-a', 'W default', '-k'], - exitcode=2, - stderr="-a option doesn't support whitespaces") - - -if __name__ == '__main__': - unittest.main() diff --git a/Lib/test/test_tools/test_reindent.py b/Lib/test/test_tools/test_reindent.py index 34df0c5d511904..3b0c793a38e4da 100644 --- a/Lib/test/test_tools/test_reindent.py +++ b/Lib/test/test_tools/test_reindent.py @@ -9,12 +9,12 @@ from test.support.script_helper import assert_python_ok from test.support import findfile -from test.test_tools import scriptsdir, skip_if_missing +from test.test_tools import toolsdir, skip_if_missing skip_if_missing() class ReindentTests(unittest.TestCase): - script = os.path.join(scriptsdir, 'reindent.py') + script = os.path.join(toolsdir, 'patchcheck', 'reindent.py') def test_noargs(self): assert_python_ok(self.script) diff --git a/Lib/test/test_tools/test_sundry.py b/Lib/test/test_tools/test_sundry.py index ed03c2f75a3aa5..81f06763980a32 100644 --- a/Lib/test/test_tools/test_sundry.py +++ b/Lib/test/test_tools/test_sundry.py @@ -19,17 +19,19 @@ class TestSundryScripts(unittest.TestCase): # added for a script it should be added to the allowlist below. # scripts that have independent tests. - allowlist = ['reindent', 'pdeps', 'gprof2html', 'md5sum'] + allowlist = ['reindent'] # scripts that can't be imported without running denylist = ['make_ctype'] - # scripts that use windows-only modules - windows_only = ['win_add2path'] # denylisted for other reasons other = ['2to3'] - skiplist = denylist + allowlist + windows_only + other + skiplist = denylist + allowlist + other - def test_sundry(self): + # import logging registers "atfork" functions which keep indirectly the + # logging module dictionary alive. Mock the function to be able to unload + # cleanly the logging module. + @import_helper.mock_register_at_fork + def test_sundry(self, mock_os): old_modules = import_helper.modules_setup() try: for fn in os.listdir(scriptsdir): @@ -45,11 +47,6 @@ def test_sundry(self): # Unload all modules loaded in this test import_helper.modules_cleanup(*old_modules) - @unittest.skipIf(sys.platform != "win32", "Windows-only test") - def test_sundry_windows(self): - for name in self.windows_only: - import_tool(name) - if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 4864b5c10b01bf..c17bbb48b65b2d 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -6,14 +6,21 @@ import sys import types import inspect +import importlib +import builtins import unittest import re +import tempfile +import random +import string from test import support +import shutil from test.support import (Error, captured_output, cpython_only, ALWAYS_EQ, requires_debug_ranges, has_no_debug_ranges, requires_subprocess) from test.support.os_helper import TESTFN, unlink from test.support.script_helper import assert_python_ok, assert_python_failure +from test.support.import_helper import forget import json import textwrap @@ -552,6 +559,23 @@ def f_with_binary_operator(): result_lines = self.get_exception(f_with_binary_operator) self.assertEqual(result_lines, expected_error.splitlines()) + def test_caret_for_binary_operators_with_unicode(self): + def f_with_binary_operator(): + áóí = 20 + return 10 + áóí / 0 + 30 + + lineno_f = f_with_binary_operator.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + f' File "{__file__}", line {lineno_f+2}, in f_with_binary_operator\n' + ' return 10 + áóí / 0 + 30\n' + ' ~~~~^~~\n' + ) + result_lines = self.get_exception(f_with_binary_operator) + self.assertEqual(result_lines, expected_error.splitlines()) + def test_caret_for_binary_operators_two_char(self): def f_with_binary_operator(): divisor = 20 @@ -586,6 +610,23 @@ def f_with_subscript(): result_lines = self.get_exception(f_with_subscript) self.assertEqual(result_lines, expected_error.splitlines()) + def test_caret_for_subscript_unicode(self): + def f_with_subscript(): + some_dict = {'ó': {'á': {'í': {'theta': 1}}}} + return some_dict['ó']['á']['í']['beta'] + + lineno_f = f_with_subscript.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + f' File "{__file__}", line {lineno_f+2}, in f_with_subscript\n' + " return some_dict['ó']['á']['í']['beta']\n" + ' ~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^\n' + ) + result_lines = self.get_exception(f_with_subscript) + self.assertEqual(result_lines, expected_error.splitlines()) + def test_traceback_specialization_with_syntax_error(self): bytecode = compile("1 / 0 / 1 / 2\n", TESTFN, "exec") @@ -798,6 +839,56 @@ def f(): ] self.assertEqual(actual, expected) + def test_wide_characters_unicode_with_problematic_byte_offset(self): + def f(): + width + + actual = self.get_exception(f) + expected = [ + f"Traceback (most recent call last):", + f" File \"{__file__}\", line {self.callable_line}, in get_exception", + f" callable()", + f" File \"{__file__}\", line {f.__code__.co_firstlineno + 1}, in f", + f" width", + ] + self.assertEqual(actual, expected) + + + def test_byte_offset_with_wide_characters_middle(self): + def f(): + width = 1 + raise ValueError(width) + + actual = self.get_exception(f) + expected = [ + f"Traceback (most recent call last):", + f" File \"{__file__}\", line {self.callable_line}, in get_exception", + f" callable()", + f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f", + f" raise ValueError(width)", + ] + self.assertEqual(actual, expected) + + def test_byte_offset_multiline(self): + def f(): + www = 1 + th = 0 + + print(1, www( + th)) + + actual = self.get_exception(f) + expected = [ + f"Traceback (most recent call last):", + f" File \"{__file__}\", line {self.callable_line}, in get_exception", + f" callable()", + f" File \"{__file__}\", line {f.__code__.co_firstlineno + 4}, in f", + f" print(1, www(", + f" ^^^^", + ] + self.assertEqual(actual, expected) + + @requires_debug_ranges() class PurePythonTracebackErrorCaretTests( @@ -2985,6 +3076,122 @@ def __getattribute__(self, attr): self.assertIn("Did you mean", actual) self.assertIn("bluch", actual) + def make_module(self, code): + tmpdir = Path(tempfile.mkdtemp()) + self.addCleanup(shutil.rmtree, tmpdir) + + sys.path.append(str(tmpdir)) + self.addCleanup(sys.path.pop) + + mod_name = ''.join(random.choices(string.ascii_letters, k=16)) + module = tmpdir / (mod_name + ".py") + module.write_text(code) + + return mod_name + + def get_import_from_suggestion(self, mod_dict, name): + modname = self.make_module(mod_dict) + + def callable(): + try: + exec(f"from {modname} import {name}") + except ImportError as e: + raise e from None + except Exception as e: + self.fail(f"Expected ImportError but got {type(e)}") + self.addCleanup(forget, modname) + + result_lines = self.get_exception( + callable, slice_start=-1, slice_end=None + ) + return result_lines[0] + + def test_import_from_suggestions(self): + substitution = textwrap.dedent("""\ + noise = more_noise = a = bc = None + blech = None + """) + + elimination = textwrap.dedent(""" + noise = more_noise = a = bc = None + blch = None + """) + + addition = textwrap.dedent(""" + noise = more_noise = a = bc = None + bluchin = None + """) + + substitutionOverElimination = textwrap.dedent(""" + blach = None + bluc = None + """) + + substitutionOverAddition = textwrap.dedent(""" + blach = None + bluchi = None + """) + + eliminationOverAddition = textwrap.dedent(""" + blucha = None + bluc = None + """) + + caseChangeOverSubstitution = textwrap.dedent(""" + Luch = None + fluch = None + BLuch = None + """) + + for code, suggestion in [ + (addition, "'bluchin'?"), + (substitution, "'blech'?"), + (elimination, "'blch'?"), + (addition, "'bluchin'?"), + (substitutionOverElimination, "'blach'?"), + (substitutionOverAddition, "'blach'?"), + (eliminationOverAddition, "'bluc'?"), + (caseChangeOverSubstitution, "'BLuch'?"), + ]: + actual = self.get_import_from_suggestion(code, 'bluch') + self.assertIn(suggestion, actual) + + def test_import_from_suggestions_do_not_trigger_for_long_attributes(self): + code = "blech = None" + + actual = self.get_suggestion(code, 'somethingverywrong') + self.assertNotIn("blech", actual) + + def test_import_from_error_bad_suggestions_do_not_trigger_for_small_names(self): + code = "vvv = mom = w = id = pytho = None" + + for name in ("b", "v", "m", "py"): + with self.subTest(name=name): + actual = self.get_import_from_suggestion(code, name) + self.assertNotIn("you mean", actual) + self.assertNotIn("vvv", actual) + self.assertNotIn("mom", actual) + self.assertNotIn("'id'", actual) + self.assertNotIn("'w'", actual) + self.assertNotIn("'pytho'", actual) + + def test_import_from_suggestions_do_not_trigger_for_big_namespaces(self): + # A module with lots of names will not be considered for suggestions. + chunks = [f"index_{index} = " for index in range(200)] + chunks.append(" None") + code = " ".join(chunks) + actual = self.get_import_from_suggestion(code, 'bluch') + self.assertNotIn("blech", actual) + + def test_import_from_error_with_bad_name(self): + def raise_attribute_error_with_bad_name(): + raise ImportError(name=12, obj=23, name_from=11) + + result_lines = self.get_exception( + raise_attribute_error_with_bad_name, slice_start=-1, slice_end=None + ) + self.assertNotIn("?", result_lines[-1]) + def test_name_error_suggestions(self): def Substitution(): noise = more_noise = a = bc = None @@ -3037,6 +3244,14 @@ def func(): actual = self.get_suggestion(func) self.assertIn("'ZeroDivisionError'?", actual) + def test_name_error_suggestions_from_builtins_when_builtins_is_module(self): + def func(): + custom_globals = globals().copy() + custom_globals["__builtins__"] = builtins + print(eval("ZeroDivisionErrrrr", custom_globals)) + actual = self.get_suggestion(func) + self.assertIn("'ZeroDivisionError'?", actual) + def test_name_error_suggestions_do_not_trigger_for_long_names(self): def func(): somethingverywronghehehehehehe = None @@ -3176,6 +3391,31 @@ def func(): actual = self.get_suggestion(func) self.assertNotIn("blech", actual) + def test_name_error_with_instance(self): + class A: + def __init__(self): + self.blech = None + def foo(self): + blich = 1 + x = blech + + instance = A() + actual = self.get_suggestion(instance.foo) + self.assertIn("self.blech", actual) + + def test_unbound_local_error_with_instance(self): + class A: + def __init__(self): + self.blech = None + def foo(self): + blich = 1 + x = blech + blech = 1 + + instance = A() + actual = self.get_suggestion(instance.foo) + self.assertNotIn("self.blech", actual) + def test_unbound_local_error_does_not_match(self): def func(): something = 3 @@ -3185,6 +3425,21 @@ def func(): actual = self.get_suggestion(func) self.assertNotIn("something", actual) + def test_name_error_for_stdlib_modules(self): + def func(): + stream = io.StringIO() + + actual = self.get_suggestion(func) + self.assertIn("forget to import 'io'", actual) + + def test_name_error_for_private_stdlib_modules(self): + def func(): + stream = _io.StringIO() + + actual = self.get_suggestion(func) + self.assertIn("forget to import '_io'", actual) + + class PurePythonSuggestionFormattingTests( PurePythonExceptionFormattingMixin, diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index e4cf3c7d53b657..6ff7f612867e62 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -42,7 +42,7 @@ import weakref import types -from test.support import import_helper, captured_stderr +from test.support import import_helper, captured_stderr, cpython_only from test import mod_generics_cache from test import _typed_dict_helper @@ -592,10 +592,9 @@ def test_no_duplicates_if_replacement_not_in_templates(self): class GenericAliasSubstitutionTests(BaseTestCase): """Tests for type variable substitution in generic aliases. - Note that the expected results here are tentative, based on a - still-being-worked-out spec for what we allow at runtime (given that - implementation of *full* substitution logic at runtime would add too much - complexity to typing.py). This spec is currently being discussed at + For variadic cases, these tests should be regarded as the source of truth, + since we hadn't realised the full complexity of variadic substitution + at the time of finalizing PEP 646. For full discussion, see https://github.com/python/cpython/issues/91162. """ @@ -682,9 +681,6 @@ class C(Generic[T1, T2]): pass ('generic[T1, T2]', '[tuple_type[int, ...]]', 'TypeError'), ('generic[T1, T2]', '[tuple_type[int, ...], tuple_type[str, ...]]', 'generic[tuple_type[int, ...], tuple_type[str, ...]]'), - # Should raise TypeError according to the tentative spec: unpacked - # types cannot be used as arguments to aliases that expect a fixed - # number of arguments. ('generic[T1, T2]', '[*tuple_type[int, ...]]', 'TypeError'), ('generic[T1, T2]', '[int, *tuple_type[str, ...]]', 'TypeError'), ('generic[T1, T2]', '[*tuple_type[int, ...], str]', 'TypeError'), @@ -692,10 +688,12 @@ class C(Generic[T1, T2]): pass ('generic[T1, T2]', '[*Ts]', 'TypeError'), ('generic[T1, T2]', '[T, *Ts]', 'TypeError'), ('generic[T1, T2]', '[*Ts, T]', 'TypeError'), - # Should raise TypeError according to the tentative spec: unpacked - # types cannot be used as arguments to generics that expect a fixed - # number of arguments. - # (None of the things in `generics` were defined using *Ts.) + # This one isn't technically valid - none of the things that + # `generic` can be (defined in `generics` above) are variadic, so we + # shouldn't really be able to do `generic[T1, *tuple_type[int, ...]]`. + # So even if type checkers shouldn't allow it, we allow it at + # runtime, in accordance with a general philosophy of "Keep the + # runtime lenient so people can experiment with typing constructs". ('generic[T1, *tuple_type[int, ...]]', '[str]', 'generic[str, *tuple_type[int, ...]]'), ] @@ -757,8 +755,6 @@ class C(Generic[*Ts]): pass generics = ['C', 'tuple', 'Tuple'] tuple_types = ['tuple', 'Tuple'] - # The majority of these have three separate cases for C, tuple and - # Tuple because tuple currently behaves differently. tests = [ # Alias # Args # Expected result ('generic[*Ts]', '[()]', 'generic[()]'), @@ -791,7 +787,11 @@ class C(Generic[*Ts]): pass ('generic[*Ts, list[T]]', '[int, str, bool]', 'generic[int, str, list[bool]]'), ('generic[T, *Ts]', '[*tuple_type[int, ...]]', 'generic[int, *tuple_type[int, ...]]'), + ('generic[T, *Ts]', '[str, *tuple_type[int, ...]]', 'generic[str, *tuple_type[int, ...]]'), + ('generic[T, *Ts]', '[*tuple_type[int, ...], str]', 'generic[int, *tuple_type[int, ...], str]'), ('generic[*Ts, T]', '[*tuple_type[int, ...]]', 'generic[*tuple_type[int, ...], int]'), + ('generic[*Ts, T]', '[str, *tuple_type[int, ...]]', 'generic[str, *tuple_type[int, ...], int]'), + ('generic[*Ts, T]', '[*tuple_type[int, ...], str]', 'generic[*tuple_type[int, ...], str]'), ('generic[T1, *Ts, T2]', '[*tuple_type[int, ...]]', 'generic[int, *tuple_type[int, ...], int]'), ('generic[T, str, *Ts]', '[*tuple_type[int, ...]]', 'generic[int, str, *tuple_type[int, ...]]'), ('generic[*Ts, str, T]', '[*tuple_type[int, ...]]', 'generic[*tuple_type[int, ...], str, int]'), @@ -830,13 +830,19 @@ class C(Generic[*Ts]): pass class UnpackTests(BaseTestCase): def test_accepts_single_type(self): + (*tuple[int],) Unpack[Tuple[int]] def test_rejects_multiple_types(self): with self.assertRaises(TypeError): Unpack[Tuple[int], Tuple[str]] + # We can't do the equivalent for `*` here - + # *(Tuple[int], Tuple[str]) is just plain tuple unpacking, + # which is valid. def test_rejects_multiple_parameterization(self): + with self.assertRaises(TypeError): + (*tuple[int],)[0][tuple[int]] with self.assertRaises(TypeError): Unpack[Tuple[int]][Tuple[int]] @@ -875,19 +881,20 @@ def test_cannot_call_instance(self): def test_unpacked_typevartuple_is_equal_to_itself(self): Ts = TypeVarTuple('Ts') + self.assertEqual((*Ts,)[0], (*Ts,)[0]) self.assertEqual(Unpack[Ts], Unpack[Ts]) def test_parameterised_tuple_is_equal_to_itself(self): Ts = TypeVarTuple('Ts') - self.assertEqual(tuple[Unpack[Ts]], tuple[Unpack[Ts]]) + self.assertEqual(tuple[*Ts], tuple[*Ts]) self.assertEqual(Tuple[Unpack[Ts]], Tuple[Unpack[Ts]]) def tests_tuple_arg_ordering_matters(self): Ts1 = TypeVarTuple('Ts1') Ts2 = TypeVarTuple('Ts2') self.assertNotEqual( - tuple[Unpack[Ts1], Unpack[Ts2]], - tuple[Unpack[Ts2], Unpack[Ts1]], + tuple[*Ts1, *Ts2], + tuple[*Ts2, *Ts1], ) self.assertNotEqual( Tuple[Unpack[Ts1], Unpack[Ts2]], @@ -896,8 +903,8 @@ def tests_tuple_arg_ordering_matters(self): def test_tuple_args_and_parameters_are_correct(self): Ts = TypeVarTuple('Ts') - t1 = tuple[Unpack[Ts]] - self.assertEqual(t1.__args__, (Unpack[Ts],)) + t1 = tuple[*Ts] + self.assertEqual(t1.__args__, (*Ts,)) self.assertEqual(t1.__parameters__, (Ts,)) t2 = Tuple[Unpack[Ts]] self.assertEqual(t2.__args__, (Unpack[Ts],)) @@ -907,128 +914,218 @@ def test_var_substitution(self): Ts = TypeVarTuple('Ts') T = TypeVar('T') T2 = TypeVar('T2') - class G(Generic[Unpack[Ts]]): pass + class G1(Generic[*Ts]): pass + class G2(Generic[Unpack[Ts]]): pass - for A in G, Tuple, tuple: - B = A[Unpack[Ts]] + for A in G1, G2, Tuple, tuple: + B = A[*Ts] self.assertEqual(B[()], A[()]) self.assertEqual(B[float], A[float]) self.assertEqual(B[float, str], A[float, str]) - C = List[A[Unpack[Ts]]] - self.assertEqual(C[()], List[A[()]]) - self.assertEqual(C[float], List[A[float]]) - self.assertEqual(C[float, str], List[A[float, str]]) + C = A[Unpack[Ts]] + self.assertEqual(C[()], A[()]) + self.assertEqual(C[float], A[float]) + self.assertEqual(C[float, str], A[float, str]) + + D = list[A[*Ts]] + self.assertEqual(D[()], list[A[()]]) + self.assertEqual(D[float], list[A[float]]) + self.assertEqual(D[float, str], list[A[float, str]]) + + E = List[A[Unpack[Ts]]] + self.assertEqual(E[()], List[A[()]]) + self.assertEqual(E[float], List[A[float]]) + self.assertEqual(E[float, str], List[A[float, str]]) - D = A[T, Unpack[Ts], T2] + F = A[T, *Ts, T2] with self.assertRaises(TypeError): - D[()] + F[()] with self.assertRaises(TypeError): - D[float] - self.assertEqual(D[float, str], A[float, str]) - self.assertEqual(D[float, str, int], A[float, str, int]) - self.assertEqual(D[float, str, int, bytes], A[float, str, int, bytes]) + F[float] + self.assertEqual(F[float, str], A[float, str]) + self.assertEqual(F[float, str, int], A[float, str, int]) + self.assertEqual(F[float, str, int, bytes], A[float, str, int, bytes]) - E = Tuple[List[T], A[Unpack[Ts]], List[T2]] + G = A[T, Unpack[Ts], T2] + with self.assertRaises(TypeError): + G[()] + with self.assertRaises(TypeError): + G[float] + self.assertEqual(G[float, str], A[float, str]) + self.assertEqual(G[float, str, int], A[float, str, int]) + self.assertEqual(G[float, str, int, bytes], A[float, str, int, bytes]) + + H = tuple[list[T], A[*Ts], list[T2]] + with self.assertRaises(TypeError): + H[()] + with self.assertRaises(TypeError): + H[float] + if A != Tuple: + self.assertEqual(H[float, str], + tuple[list[float], A[()], list[str]]) + self.assertEqual(H[float, str, int], + tuple[list[float], A[str], list[int]]) + self.assertEqual(H[float, str, int, bytes], + tuple[list[float], A[str, int], list[bytes]]) + + I = Tuple[List[T], A[Unpack[Ts]], List[T2]] with self.assertRaises(TypeError): - E[()] + I[()] with self.assertRaises(TypeError): - E[float] + I[float] if A != Tuple: - self.assertEqual(E[float, str], + self.assertEqual(I[float, str], Tuple[List[float], A[()], List[str]]) - self.assertEqual(E[float, str, int], + self.assertEqual(I[float, str, int], Tuple[List[float], A[str], List[int]]) - self.assertEqual(E[float, str, int, bytes], + self.assertEqual(I[float, str, int, bytes], Tuple[List[float], A[str, int], List[bytes]]) def test_bad_var_substitution(self): Ts = TypeVarTuple('Ts') T = TypeVar('T') T2 = TypeVar('T2') - class G(Generic[Unpack[Ts]]): pass + class G1(Generic[*Ts]): pass + class G2(Generic[Unpack[Ts]]): pass - for A in G, Tuple, tuple: + for A in G1, G2, Tuple, tuple: B = A[Ts] with self.assertRaises(TypeError): B[int, str] C = A[T, T2] + with self.assertRaises(TypeError): + C[*Ts] with self.assertRaises(TypeError): C[Unpack[Ts]] - def test_repr_is_correct(self): - Ts = TypeVarTuple('Ts') - T = TypeVar('T') - T2 = TypeVar('T2') - class G(Generic[Unpack[Ts]]): pass - - for A in G, Tuple: - B = A[T, Unpack[Ts], str, T2] + B = A[T, *Ts, str, T2] with self.assertRaises(TypeError): - B[int, Unpack[Ts]] + B[int, *Ts] + with self.assertRaises(TypeError): + B[int, *Ts, *Ts] + C = A[T, Unpack[Ts], str, T2] + with self.assertRaises(TypeError): + C[int, Unpack[Ts]] with self.assertRaises(TypeError): C[int, Unpack[Ts], Unpack[Ts]] def test_repr_is_correct(self): Ts = TypeVarTuple('Ts') + T = TypeVar('T') + T2 = TypeVar('T2') + + class G1(Generic[*Ts]): pass + class G2(Generic[Unpack[Ts]]): pass + self.assertEqual(repr(Ts), 'Ts') + + self.assertEqual(repr((*Ts,)[0]), '*Ts') self.assertEqual(repr(Unpack[Ts]), '*Ts') - self.assertEqual(repr(tuple[Unpack[Ts]]), 'tuple[*Ts]') + + self.assertEqual(repr(tuple[*Ts]), 'tuple[*Ts]') self.assertEqual(repr(Tuple[Unpack[Ts]]), 'typing.Tuple[*Ts]') - self.assertEqual(repr(Unpack[tuple[Unpack[Ts]]]), '*tuple[*Ts]') + + self.assertEqual(repr(*tuple[*Ts]), '*tuple[*Ts]') self.assertEqual(repr(Unpack[Tuple[Unpack[Ts]]]), '*typing.Tuple[*Ts]') def test_variadic_class_repr_is_correct(self): Ts = TypeVarTuple('Ts') - class A(Generic[Unpack[Ts]]): pass + class A(Generic[*Ts]): pass + class B(Generic[Unpack[Ts]]): pass self.assertEndsWith(repr(A[()]), 'A[()]') + self.assertEndsWith(repr(B[()]), 'B[()]') self.assertEndsWith(repr(A[float]), 'A[float]') + self.assertEndsWith(repr(B[float]), 'B[float]') self.assertEndsWith(repr(A[float, str]), 'A[float, str]') - self.assertEndsWith(repr(A[Unpack[tuple[int, ...]]]), + self.assertEndsWith(repr(B[float, str]), 'B[float, str]') + + self.assertEndsWith(repr(A[*tuple[int, ...]]), 'A[*tuple[int, ...]]') - self.assertEndsWith(repr(A[float, Unpack[tuple[int, ...]]]), + self.assertEndsWith(repr(B[Unpack[Tuple[int, ...]]]), + 'B[*typing.Tuple[int, ...]]') + + self.assertEndsWith(repr(A[float, *tuple[int, ...]]), 'A[float, *tuple[int, ...]]') - self.assertEndsWith(repr(A[Unpack[tuple[int, ...]], str]), + self.assertEndsWith(repr(A[float, Unpack[Tuple[int, ...]]]), + 'A[float, *typing.Tuple[int, ...]]') + + self.assertEndsWith(repr(A[*tuple[int, ...], str]), 'A[*tuple[int, ...], str]') - self.assertEndsWith(repr(A[float, Unpack[tuple[int, ...]], str]), + self.assertEndsWith(repr(B[Unpack[Tuple[int, ...]], str]), + 'B[*typing.Tuple[int, ...], str]') + + self.assertEndsWith(repr(A[float, *tuple[int, ...], str]), 'A[float, *tuple[int, ...], str]') + self.assertEndsWith(repr(B[float, Unpack[Tuple[int, ...]], str]), + 'B[float, *typing.Tuple[int, ...], str]') def test_variadic_class_alias_repr_is_correct(self): Ts = TypeVarTuple('Ts') class A(Generic[Unpack[Ts]]): pass - B = A[Unpack[Ts]] + B = A[*Ts] self.assertEndsWith(repr(B), 'A[*Ts]') self.assertEndsWith(repr(B[()]), 'A[()]') self.assertEndsWith(repr(B[float]), 'A[float]') self.assertEndsWith(repr(B[float, str]), 'A[float, str]') - C = A[Unpack[Ts], int] - self.assertEndsWith(repr(C), 'A[*Ts, int]') - self.assertEndsWith(repr(C[()]), 'A[int]') - self.assertEndsWith(repr(C[float]), 'A[float, int]') - self.assertEndsWith(repr(C[float, str]), 'A[float, str, int]') + C = A[Unpack[Ts]] + self.assertEndsWith(repr(C), 'A[*Ts]') + self.assertEndsWith(repr(C[()]), 'A[()]') + self.assertEndsWith(repr(C[float]), 'A[float]') + self.assertEndsWith(repr(C[float, str]), 'A[float, str]') - D = A[int, Unpack[Ts]] - self.assertEndsWith(repr(D), 'A[int, *Ts]') + D = A[*Ts, int] + self.assertEndsWith(repr(D), 'A[*Ts, int]') self.assertEndsWith(repr(D[()]), 'A[int]') - self.assertEndsWith(repr(D[float]), 'A[int, float]') - self.assertEndsWith(repr(D[float, str]), 'A[int, float, str]') - - E = A[int, Unpack[Ts], str] - self.assertEndsWith(repr(E), 'A[int, *Ts, str]') - self.assertEndsWith(repr(E[()]), 'A[int, str]') - self.assertEndsWith(repr(E[float]), 'A[int, float, str]') - self.assertEndsWith(repr(E[float, str]), 'A[int, float, str, str]') - - F = A[Unpack[Ts], Unpack[tuple[str, ...]]] - self.assertEndsWith(repr(F), 'A[*Ts, *tuple[str, ...]]') - self.assertEndsWith(repr(F[()]), 'A[*tuple[str, ...]]') - self.assertEndsWith(repr(F[float]), 'A[float, *tuple[str, ...]]') - self.assertEndsWith(repr(F[float, str]), 'A[float, str, *tuple[str, ...]]') + self.assertEndsWith(repr(D[float]), 'A[float, int]') + self.assertEndsWith(repr(D[float, str]), 'A[float, str, int]') + + E = A[Unpack[Ts], int] + self.assertEndsWith(repr(E), 'A[*Ts, int]') + self.assertEndsWith(repr(E[()]), 'A[int]') + self.assertEndsWith(repr(E[float]), 'A[float, int]') + self.assertEndsWith(repr(E[float, str]), 'A[float, str, int]') + + F = A[int, *Ts] + self.assertEndsWith(repr(F), 'A[int, *Ts]') + self.assertEndsWith(repr(F[()]), 'A[int]') + self.assertEndsWith(repr(F[float]), 'A[int, float]') + self.assertEndsWith(repr(F[float, str]), 'A[int, float, str]') + + G = A[int, Unpack[Ts]] + self.assertEndsWith(repr(G), 'A[int, *Ts]') + self.assertEndsWith(repr(G[()]), 'A[int]') + self.assertEndsWith(repr(G[float]), 'A[int, float]') + self.assertEndsWith(repr(G[float, str]), 'A[int, float, str]') + + H = A[int, *Ts, str] + self.assertEndsWith(repr(H), 'A[int, *Ts, str]') + self.assertEndsWith(repr(H[()]), 'A[int, str]') + self.assertEndsWith(repr(H[float]), 'A[int, float, str]') + self.assertEndsWith(repr(H[float, str]), 'A[int, float, str, str]') + + I = A[int, Unpack[Ts], str] + self.assertEndsWith(repr(I), 'A[int, *Ts, str]') + self.assertEndsWith(repr(I[()]), 'A[int, str]') + self.assertEndsWith(repr(I[float]), 'A[int, float, str]') + self.assertEndsWith(repr(I[float, str]), 'A[int, float, str, str]') + + J = A[*Ts, *tuple[str, ...]] + self.assertEndsWith(repr(J), 'A[*Ts, *tuple[str, ...]]') + self.assertEndsWith(repr(J[()]), 'A[*tuple[str, ...]]') + self.assertEndsWith(repr(J[float]), 'A[float, *tuple[str, ...]]') + self.assertEndsWith(repr(J[float, str]), 'A[float, str, *tuple[str, ...]]') + + K = A[Unpack[Ts], Unpack[Tuple[str, ...]]] + self.assertEndsWith(repr(K), 'A[*Ts, *typing.Tuple[str, ...]]') + self.assertEndsWith(repr(K[()]), 'A[*typing.Tuple[str, ...]]') + self.assertEndsWith(repr(K[float]), 'A[float, *typing.Tuple[str, ...]]') + self.assertEndsWith(repr(K[float, str]), 'A[float, str, *typing.Tuple[str, ...]]') def test_cannot_subclass(self): with self.assertRaisesRegex(TypeError, CANNOT_SUBCLASS_TYPE): @@ -1037,39 +1134,69 @@ class C(TypeVarTuple): pass with self.assertRaisesRegex(TypeError, CANNOT_SUBCLASS_INSTANCE % 'TypeVarTuple'): class C(Ts): pass - with self.assertRaisesRegex(TypeError, r'Cannot subclass \*Ts'): - class C(*Ts): pass with self.assertRaisesRegex(TypeError, CANNOT_SUBCLASS_TYPE): class C(type(Unpack)): pass + with self.assertRaisesRegex(TypeError, CANNOT_SUBCLASS_TYPE): + class C(type(*Ts)): pass with self.assertRaisesRegex(TypeError, CANNOT_SUBCLASS_TYPE): class C(type(Unpack[Ts])): pass with self.assertRaisesRegex(TypeError, r'Cannot subclass typing\.Unpack'): class C(Unpack): pass + with self.assertRaisesRegex(TypeError, r'Cannot subclass \*Ts'): + class C(*Ts): pass with self.assertRaisesRegex(TypeError, r'Cannot subclass \*Ts'): class C(Unpack[Ts]): pass def test_variadic_class_args_are_correct(self): T = TypeVar('T') Ts = TypeVarTuple('Ts') - class A(Generic[Unpack[Ts]]): pass - B = A[()] - self.assertEqual(B.__args__, ()) - C = A[int] - self.assertEqual(C.__args__, (int,)) - D = A[int, str] - self.assertEqual(D.__args__, (int, str)) - E = A[T] - self.assertEqual(E.__args__, (T,)) - F = A[Unpack[Ts]] - self.assertEqual(F.__args__, (Unpack[Ts],)) - G = A[T, Unpack[Ts]] - self.assertEqual(G.__args__, (T, Unpack[Ts])) - H = A[Unpack[Ts], T] - self.assertEqual(H.__args__, (Unpack[Ts], T)) + class A(Generic[*Ts]): pass + class B(Generic[Unpack[Ts]]): pass + + C = A[()] + D = B[()] + self.assertEqual(C.__args__, ()) + self.assertEqual(D.__args__, ()) + + E = A[int] + F = B[int] + self.assertEqual(E.__args__, (int,)) + self.assertEqual(F.__args__, (int,)) + + G = A[int, str] + H = B[int, str] + self.assertEqual(G.__args__, (int, str)) + self.assertEqual(H.__args__, (int, str)) + + I = A[T] + J = B[T] + self.assertEqual(I.__args__, (T,)) + self.assertEqual(J.__args__, (T,)) + + K = A[*Ts] + L = B[Unpack[Ts]] + self.assertEqual(K.__args__, (*Ts,)) + self.assertEqual(L.__args__, (Unpack[Ts],)) + + M = A[T, *Ts] + N = B[T, Unpack[Ts]] + self.assertEqual(M.__args__, (T, *Ts)) + self.assertEqual(N.__args__, (T, Unpack[Ts])) + + O = A[*Ts, T] + P = B[Unpack[Ts], T] + self.assertEqual(O.__args__, (*Ts, T)) + self.assertEqual(P.__args__, (Unpack[Ts], T)) def test_variadic_class_origin_is_correct(self): Ts = TypeVarTuple('Ts') + + class C(Generic[*Ts]): pass + self.assertIs(C[int].__origin__, C) + self.assertIs(C[T].__origin__, C) + self.assertIs(C[Unpack[Ts]].__origin__, C) + class D(Generic[Unpack[Ts]]): pass self.assertIs(D[int].__origin__, D) self.assertIs(D[T].__origin__, D) @@ -1078,21 +1205,21 @@ class D(Generic[Unpack[Ts]]): pass def test_tuple_args_are_correct(self): Ts = TypeVarTuple('Ts') - self.assertEqual(tuple[Unpack[Ts]].__args__, (Unpack[Ts],)) + self.assertEqual(tuple[*Ts].__args__, (*Ts,)) self.assertEqual(Tuple[Unpack[Ts]].__args__, (Unpack[Ts],)) - self.assertEqual(tuple[Unpack[Ts], int].__args__, (Unpack[Ts], int)) + self.assertEqual(tuple[*Ts, int].__args__, (*Ts, int)) self.assertEqual(Tuple[Unpack[Ts], int].__args__, (Unpack[Ts], int)) - self.assertEqual(tuple[int, Unpack[Ts]].__args__, (int, Unpack[Ts])) + self.assertEqual(tuple[int, *Ts].__args__, (int, *Ts)) self.assertEqual(Tuple[int, Unpack[Ts]].__args__, (int, Unpack[Ts])) - self.assertEqual(tuple[int, Unpack[Ts], str].__args__, - (int, Unpack[Ts], str)) + self.assertEqual(tuple[int, *Ts, str].__args__, + (int, *Ts, str)) self.assertEqual(Tuple[int, Unpack[Ts], str].__args__, (int, Unpack[Ts], str)) - self.assertEqual(tuple[Unpack[Ts], int].__args__, (Unpack[Ts], int)) + self.assertEqual(tuple[*Ts, int].__args__, (*Ts, int)) self.assertEqual(Tuple[Unpack[Ts]].__args__, (Unpack[Ts],)) def test_callable_args_are_correct(self): @@ -1102,63 +1229,97 @@ def test_callable_args_are_correct(self): # TypeVarTuple in the arguments - a = Callable[[Unpack[Ts]], None] - self.assertEqual(a.__args__, (Unpack[Ts], type(None))) + a = Callable[[*Ts], None] + b = Callable[[Unpack[Ts]], None] + self.assertEqual(a.__args__, (*Ts, type(None))) + self.assertEqual(b.__args__, (Unpack[Ts], type(None))) - b = Callable[[int, Unpack[Ts]], None] - self.assertEqual(b.__args__, (int, Unpack[Ts], type(None))) + c = Callable[[int, *Ts], None] + d = Callable[[int, Unpack[Ts]], None] + self.assertEqual(c.__args__, (int, *Ts, type(None))) + self.assertEqual(d.__args__, (int, Unpack[Ts], type(None))) - c = Callable[[Unpack[Ts], int], None] - self.assertEqual(c.__args__, (Unpack[Ts], int, type(None))) + e = Callable[[*Ts, int], None] + f = Callable[[Unpack[Ts], int], None] + self.assertEqual(e.__args__, (*Ts, int, type(None))) + self.assertEqual(f.__args__, (Unpack[Ts], int, type(None))) - d = Callable[[str, Unpack[Ts], int], None] - self.assertEqual(d.__args__, (str, Unpack[Ts], int, type(None))) + g = Callable[[str, *Ts, int], None] + h = Callable[[str, Unpack[Ts], int], None] + self.assertEqual(g.__args__, (str, *Ts, int, type(None))) + self.assertEqual(h.__args__, (str, Unpack[Ts], int, type(None))) # TypeVarTuple as the return - e = Callable[[None], Unpack[Ts]] - self.assertEqual(e.__args__, (type(None), Unpack[Ts])) + i = Callable[[None], *Ts] + j = Callable[[None], Unpack[Ts]] + self.assertEqual(i.__args__, (type(None), *Ts)) + self.assertEqual(i.__args__, (type(None), Unpack[Ts])) - f = Callable[[None], tuple[int, Unpack[Ts]]] - self.assertEqual(f.__args__, (type(None), tuple[int, Unpack[Ts]])) + k = Callable[[None], tuple[int, *Ts]] + l = Callable[[None], Tuple[int, Unpack[Ts]]] + self.assertEqual(k.__args__, (type(None), tuple[int, *Ts])) + self.assertEqual(l.__args__, (type(None), Tuple[int, Unpack[Ts]])) - g = Callable[[None], tuple[Unpack[Ts], int]] - self.assertEqual(g.__args__, (type(None), tuple[Unpack[Ts], int])) + m = Callable[[None], tuple[*Ts, int]] + n = Callable[[None], Tuple[Unpack[Ts], int]] + self.assertEqual(m.__args__, (type(None), tuple[*Ts, int])) + self.assertEqual(n.__args__, (type(None), Tuple[Unpack[Ts], int])) - h = Callable[[None], tuple[str, Unpack[Ts], int]] - self.assertEqual(h.__args__, (type(None), tuple[str, Unpack[Ts], int])) + o = Callable[[None], tuple[str, *Ts, int]] + p = Callable[[None], Tuple[str, Unpack[Ts], int]] + self.assertEqual(o.__args__, (type(None), tuple[str, *Ts, int])) + self.assertEqual(p.__args__, (type(None), Tuple[str, Unpack[Ts], int])) # TypeVarTuple in both - i = Callable[[Unpack[Ts]], Unpack[Ts]] - self.assertEqual(i.__args__, (Unpack[Ts], Unpack[Ts])) + q = Callable[[*Ts], *Ts] + r = Callable[[Unpack[Ts]], Unpack[Ts]] + self.assertEqual(q.__args__, (*Ts, *Ts)) + self.assertEqual(r.__args__, (Unpack[Ts], Unpack[Ts])) - j = Callable[[Unpack[Ts1]], Unpack[Ts2]] - self.assertEqual(j.__args__, (Unpack[Ts1], Unpack[Ts2])) + s = Callable[[*Ts1], *Ts2] + u = Callable[[Unpack[Ts1]], Unpack[Ts2]] + self.assertEqual(s.__args__, (*Ts1, *Ts2)) + self.assertEqual(u.__args__, (Unpack[Ts1], Unpack[Ts2])) def test_variadic_class_with_duplicate_typevartuples_fails(self): Ts1 = TypeVarTuple('Ts1') Ts2 = TypeVarTuple('Ts2') + + with self.assertRaises(TypeError): + class C(Generic[*Ts1, *Ts1]): pass with self.assertRaises(TypeError): class C(Generic[Unpack[Ts1], Unpack[Ts1]]): pass + + with self.assertRaises(TypeError): + class C(Generic[*Ts1, *Ts2, *Ts1]): pass with self.assertRaises(TypeError): class C(Generic[Unpack[Ts1], Unpack[Ts2], Unpack[Ts1]]): pass def test_type_concatenation_in_variadic_class_argument_list_succeeds(self): Ts = TypeVarTuple('Ts') class C(Generic[Unpack[Ts]]): pass + + C[int, *Ts] C[int, Unpack[Ts]] + + C[*Ts, int] C[Unpack[Ts], int] + + C[int, *Ts, str] C[int, Unpack[Ts], str] + + C[int, bool, *Ts, float, str] C[int, bool, Unpack[Ts], float, str] def test_type_concatenation_in_tuple_argument_list_succeeds(self): Ts = TypeVarTuple('Ts') - tuple[int, Unpack[Ts]] - tuple[Unpack[Ts], int] - tuple[int, Unpack[Ts], str] - tuple[int, bool, Unpack[Ts], float, str] + tuple[int, *Ts] + tuple[*Ts, int] + tuple[int, *Ts, str] + tuple[int, bool, *Ts, float, str] Tuple[int, Unpack[Ts]] Tuple[Unpack[Ts], int] @@ -1172,6 +1333,8 @@ class C(Generic[Ts]): pass def test_variadic_class_definition_using_concrete_types_fails(self): Ts = TypeVarTuple('Ts') + with self.assertRaises(TypeError): + class F(Generic[*Ts, int]): pass with self.assertRaises(TypeError): class E(Generic[Unpack[Ts], int]): pass @@ -1180,32 +1343,50 @@ def test_variadic_class_with_2_typevars_accepts_2_or_more_args(self): T1 = TypeVar('T1') T2 = TypeVar('T2') - class A(Generic[T1, T2, Unpack[Ts]]): pass + class A(Generic[T1, T2, *Ts]): pass A[int, str] A[int, str, float] A[int, str, float, bool] - class B(Generic[T1, Unpack[Ts], T2]): pass + class B(Generic[T1, T2, Unpack[Ts]]): pass B[int, str] B[int, str, float] B[int, str, float, bool] - class C(Generic[Unpack[Ts], T1, T2]): pass + class C(Generic[T1, *Ts, T2]): pass C[int, str] C[int, str, float] C[int, str, float, bool] + class D(Generic[T1, Unpack[Ts], T2]): pass + D[int, str] + D[int, str, float] + D[int, str, float, bool] + + class E(Generic[*Ts, T1, T2]): pass + E[int, str] + E[int, str, float] + E[int, str, float, bool] + + class F(Generic[Unpack[Ts], T1, T2]): pass + F[int, str] + F[int, str, float] + F[int, str, float, bool] + def test_variadic_args_annotations_are_correct(self): Ts = TypeVarTuple('Ts') + def f(*args: Unpack[Ts]): pass + def g(*args: *Ts): pass self.assertEqual(f.__annotations__, {'args': Unpack[Ts]}) + self.assertEqual(g.__annotations__, {'args': (*Ts,)[0]}) def test_variadic_args_with_ellipsis_annotations_are_correct(self): Ts = TypeVarTuple('Ts') - def a(*args: Unpack[tuple[int, ...]]): pass + def a(*args: *tuple[int, ...]): pass self.assertEqual(a.__annotations__, - {'args': Unpack[tuple[int, ...]]}) + {'args': (*tuple[int, ...],)[0]}) def b(*args: Unpack[Tuple[int, ...]]): pass self.assertEqual(b.__annotations__, @@ -1214,30 +1395,30 @@ def b(*args: Unpack[Tuple[int, ...]]): pass def test_concatenation_in_variadic_args_annotations_are_correct(self): Ts = TypeVarTuple('Ts') - # Unpacking using `Unpack`, native `tuple` type + # Unpacking using `*`, native `tuple` type - def a(*args: Unpack[tuple[int, Unpack[Ts]]]): pass + def a(*args: *tuple[int, *Ts]): pass self.assertEqual( a.__annotations__, - {'args': Unpack[tuple[int, Unpack[Ts]]]}, + {'args': (*tuple[int, *Ts],)[0]}, ) - def b(*args: Unpack[tuple[Unpack[Ts], int]]): pass + def b(*args: *tuple[*Ts, int]): pass self.assertEqual( b.__annotations__, - {'args': Unpack[tuple[Unpack[Ts], int]]}, + {'args': (*tuple[*Ts, int],)[0]}, ) - def c(*args: Unpack[tuple[str, Unpack[Ts], int]]): pass + def c(*args: *tuple[str, *Ts, int]): pass self.assertEqual( c.__annotations__, - {'args': Unpack[tuple[str, Unpack[Ts], int]]}, + {'args': (*tuple[str, *Ts, int],)[0]}, ) - def d(*args: Unpack[tuple[int, bool, Unpack[Ts], float, str]]): pass + def d(*args: *tuple[int, bool, *Ts, float, str]): pass self.assertEqual( d.__annotations__, - {'args': Unpack[tuple[int, bool, Unpack[Ts], float, str]]}, + {'args': (*tuple[int, bool, *Ts, float, str],)[0]}, ) # Unpacking using `Unpack`, `Tuple` type from typing.py @@ -1268,47 +1449,78 @@ def h(*args: Unpack[Tuple[int, bool, Unpack[Ts], float, str]]): pass def test_variadic_class_same_args_results_in_equalty(self): Ts = TypeVarTuple('Ts') - class C(Generic[Unpack[Ts]]): pass + class C(Generic[*Ts]): pass + class D(Generic[Unpack[Ts]]): pass self.assertEqual(C[int], C[int]) + self.assertEqual(D[int], D[int]) Ts1 = TypeVarTuple('Ts1') Ts2 = TypeVarTuple('Ts2') + + self.assertEqual( + C[*Ts1], + C[*Ts1], + ) + self.assertEqual( + D[Unpack[Ts1]], + D[Unpack[Ts1]], + ) + + self.assertEqual( + C[*Ts1, *Ts2], + C[*Ts1, *Ts2], + ) self.assertEqual( - C[Unpack[Ts1]], - C[Unpack[Ts1]], + D[Unpack[Ts1], Unpack[Ts2]], + D[Unpack[Ts1], Unpack[Ts2]], ) + self.assertEqual( - C[Unpack[Ts1], Unpack[Ts2]], - C[Unpack[Ts1], Unpack[Ts2]], + C[int, *Ts1, *Ts2], + C[int, *Ts1, *Ts2], ) self.assertEqual( - C[int, Unpack[Ts1], Unpack[Ts2]], - C[int, Unpack[Ts1], Unpack[Ts2]], + D[int, Unpack[Ts1], Unpack[Ts2]], + D[int, Unpack[Ts1], Unpack[Ts2]], ) def test_variadic_class_arg_ordering_matters(self): Ts = TypeVarTuple('Ts') - class C(Generic[Unpack[Ts]]): pass + class C(Generic[*Ts]): pass + class D(Generic[Unpack[Ts]]): pass self.assertNotEqual( C[int, str], C[str, int], ) + self.assertNotEqual( + D[int, str], + D[str, int], + ) Ts1 = TypeVarTuple('Ts1') Ts2 = TypeVarTuple('Ts2') + + self.assertNotEqual( + C[*Ts1, *Ts2], + C[*Ts2, *Ts1], + ) self.assertNotEqual( - C[Unpack[Ts1], Unpack[Ts2]], - C[Unpack[Ts2], Unpack[Ts1]], + D[Unpack[Ts1], Unpack[Ts2]], + D[Unpack[Ts2], Unpack[Ts1]], ) def test_variadic_class_arg_typevartuple_identity_matters(self): Ts = TypeVarTuple('Ts') - class C(Generic[Unpack[Ts]]): pass Ts1 = TypeVarTuple('Ts1') Ts2 = TypeVarTuple('Ts2') - self.assertNotEqual(C[Unpack[Ts1]], C[Unpack[Ts2]]) + + class C(Generic[*Ts]): pass + class D(Generic[Unpack[Ts]]): pass + + self.assertNotEqual(C[*Ts1], C[*Ts2]) + self.assertNotEqual(D[Unpack[Ts1]], D[Unpack[Ts2]]) class TypeVarTuplePicklingTests(BaseTestCase): @@ -1328,10 +1540,15 @@ def test_pickling_then_unpickling_results_in_same_identity(self, proto): def test_pickling_then_unpickling_unpacked_results_in_same_identity(self, proto): global global_Ts # See explanation at start of class. global_Ts = TypeVarTuple('global_Ts') - unpacked1 = Unpack[global_Ts] + + unpacked1 = (*global_Ts,)[0] unpacked2 = pickle.loads(pickle.dumps(unpacked1, proto)) self.assertIs(unpacked1, unpacked2) + unpacked3 = Unpack[global_Ts] + unpacked4 = pickle.loads(pickle.dumps(unpacked3, proto)) + self.assertIs(unpacked3, unpacked4) + @all_pickle_protocols def test_pickling_then_unpickling_tuple_with_typevartuple_equality( self, proto @@ -1340,17 +1557,19 @@ def test_pickling_then_unpickling_tuple_with_typevartuple_equality( global_T = TypeVar('global_T') global_Ts = TypeVarTuple('global_Ts') - a1 = Tuple[Unpack[global_Ts]] - a2 = pickle.loads(pickle.dumps(a1, proto)) - self.assertEqual(a1, a2) + tuples = [ + tuple[*global_Ts], + Tuple[Unpack[global_Ts]], - a1 = Tuple[T, Unpack[global_Ts]] - a2 = pickle.loads(pickle.dumps(a1, proto)) - self.assertEqual(a1, a2) + tuple[T, *global_Ts], + Tuple[T, Unpack[global_Ts]], - a1 = Tuple[int, Unpack[global_Ts]] - a2 = pickle.loads(pickle.dumps(a1, proto)) - self.assertEqual(a1, a2) + tuple[int, *global_Ts], + Tuple[int, Unpack[global_Ts]], + ] + for t in tuples: + t2 = pickle.loads(pickle.dumps(t, proto)) + self.assertEqual(t, t2) class UnionTests(BaseTestCase): @@ -2681,7 +2900,7 @@ class CP(P[int]): self.assertEqual(x.bar, 'abc') self.assertEqual(x.x, 1) self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'}) - s = pickle.dumps(P) + s = pickle.dumps(P, proto) D = pickle.loads(s) class E: @@ -3575,11 +3794,11 @@ class D(C): self.assertEqual(D.__parameters__, ()) - with self.assertRaises(Exception): + with self.assertRaises(TypeError): D[int] - with self.assertRaises(Exception): + with self.assertRaises(TypeError): D[Any] - with self.assertRaises(Exception): + with self.assertRaises(TypeError): D[T] def test_new_with_args(self): @@ -3697,6 +3916,34 @@ class A: # C version of GenericAlias self.assertEqual(list[A()].__parameters__, (T,)) + def test_non_generic_subscript(self): + T = TypeVar('T') + class G(Generic[T]): + pass + class A: + __parameters__ = (T,) + + for s in (int, G, A, List, list, + TypeVar, TypeVarTuple, ParamSpec, + types.GenericAlias, types.UnionType): + + for t in Tuple, tuple: + with self.subTest(tuple=t, sub=s): + self.assertEqual(t[s, T][int], t[s, int]) + self.assertEqual(t[T, s][int], t[int, s]) + a = t[s] + with self.assertRaises(TypeError): + a[int] + + for c in Callable, collections.abc.Callable: + with self.subTest(callable=c, sub=s): + self.assertEqual(c[[s], T][int], c[[s], int]) + self.assertEqual(c[[T], s][int], c[[int], s]) + a = c[[s], s] + with self.assertRaises(TypeError): + a[int] + + class ClassVarTests(BaseTestCase): def test_basics(self): @@ -4398,7 +4645,7 @@ def method(self): ... class OverloadTests(BaseTestCase): def test_overload_fails(self): - with self.assertRaises(RuntimeError): + with self.assertRaises(NotImplementedError): @overload def blah(): @@ -4416,6 +4663,7 @@ def blah(): blah() + @cpython_only # gh-98713 def test_overload_on_compiled_functions(self): with patch("typing._overload_registry", defaultdict(lambda: defaultdict(dict))): @@ -4936,6 +5184,7 @@ def h(x: collections.abc.Callable[P, int]): ... class GetUtilitiesTestCase(TestCase): def test_get_origin(self): T = TypeVar('T') + Ts = TypeVarTuple('Ts') P = ParamSpec('P') class C(Generic[T]): pass self.assertIs(get_origin(C[int]), C) @@ -4959,6 +5208,10 @@ class C(Generic[T]): pass self.assertIs(get_origin(P.kwargs), P) self.assertIs(get_origin(Required[int]), Required) self.assertIs(get_origin(NotRequired[int]), NotRequired) + self.assertIs(get_origin((*Ts,)[0]), Unpack) + self.assertIs(get_origin(Unpack[Ts]), Unpack) + self.assertIs(get_origin((*tuple[*Ts],)[0]), tuple) + self.assertIs(get_origin(Unpack[Tuple[Unpack[Ts]]]), Unpack) def test_get_args(self): T = TypeVar('T') @@ -5020,8 +5273,12 @@ class C(Generic[T]): pass self.assertEqual(get_args(TypeGuard[int]), (int,)) Ts = TypeVarTuple('Ts') self.assertEqual(get_args(Ts), ()) + self.assertEqual(get_args((*Ts,)[0]), (Ts,)) self.assertEqual(get_args(Unpack[Ts]), (Ts,)) + self.assertEqual(get_args(tuple[*Ts]), (*Ts,)) self.assertEqual(get_args(tuple[Unpack[Ts]]), (Unpack[Ts],)) + self.assertEqual(get_args((*tuple[*Ts],)[0]), (*Ts,)) + self.assertEqual(get_args(Unpack[tuple[Unpack[Ts]]]), (tuple[Unpack[Ts]],)) class CollectionsAbcTests(BaseTestCase): @@ -6699,69 +6956,112 @@ def test_typevar_subst(self): T1 = TypeVar('T1') T2 = TypeVar('T2') - A = Annotated[Tuple[Unpack[Ts]], dec] - self.assertEqual(A[int], Annotated[Tuple[int], dec]) - self.assertEqual(A[str, int], Annotated[Tuple[str, int], dec]) + A = Annotated[tuple[*Ts], dec] + self.assertEqual(A[int], Annotated[tuple[int], dec]) + self.assertEqual(A[str, int], Annotated[tuple[str, int], dec]) with self.assertRaises(TypeError): - Annotated[Unpack[Ts], dec] + Annotated[*Ts, dec] - B = Annotated[Tuple[T, Unpack[Ts]], dec] + B = Annotated[Tuple[Unpack[Ts]], dec] self.assertEqual(B[int], Annotated[Tuple[int], dec]) - self.assertEqual(B[int, str], Annotated[Tuple[int, str], dec]) - self.assertEqual( - B[int, str, float], - Annotated[Tuple[int, str, float], dec] - ) + self.assertEqual(B[str, int], Annotated[Tuple[str, int], dec]) with self.assertRaises(TypeError): - B[()] + Annotated[Unpack[Ts], dec] - C = Annotated[Tuple[Unpack[Ts], T], dec] - self.assertEqual(C[int], Annotated[Tuple[int], dec]) - self.assertEqual(C[int, str], Annotated[Tuple[int, str], dec]) + C = Annotated[tuple[T, *Ts], dec] + self.assertEqual(C[int], Annotated[tuple[int], dec]) + self.assertEqual(C[int, str], Annotated[tuple[int, str], dec]) self.assertEqual( C[int, str, float], - Annotated[Tuple[int, str, float], dec] + Annotated[tuple[int, str, float], dec] ) with self.assertRaises(TypeError): C[()] - D = Annotated[Tuple[T1, Unpack[Ts], T2], dec] + D = Annotated[Tuple[T, Unpack[Ts]], dec] + self.assertEqual(D[int], Annotated[Tuple[int], dec]) self.assertEqual(D[int, str], Annotated[Tuple[int, str], dec]) self.assertEqual( D[int, str, float], Annotated[Tuple[int, str, float], dec] ) + with self.assertRaises(TypeError): + D[()] + + E = Annotated[tuple[*Ts, T], dec] + self.assertEqual(E[int], Annotated[tuple[int], dec]) + self.assertEqual(E[int, str], Annotated[tuple[int, str], dec]) self.assertEqual( - D[int, str, bool, float], - Annotated[Tuple[int, str, bool, float], dec] + E[int, str, float], + Annotated[tuple[int, str, float], dec] ) with self.assertRaises(TypeError): - D[int] - - # Now let's try creating an alias from an alias. + E[()] - Ts2 = TypeVarTuple('Ts2') - T3 = TypeVar('T3') - T4 = TypeVar('T4') + F = Annotated[Tuple[Unpack[Ts], T], dec] + self.assertEqual(F[int], Annotated[Tuple[int], dec]) + self.assertEqual(F[int, str], Annotated[Tuple[int, str], dec]) + self.assertEqual( + F[int, str, float], + Annotated[Tuple[int, str, float], dec] + ) + with self.assertRaises(TypeError): + F[()] - E = D[T3, Unpack[Ts2], T4] + G = Annotated[tuple[T1, *Ts, T2], dec] + self.assertEqual(G[int, str], Annotated[tuple[int, str], dec]) self.assertEqual( - E, - Annotated[Tuple[T3, Unpack[Ts2], T4], dec] + G[int, str, float], + Annotated[tuple[int, str, float], dec] ) self.assertEqual( - E[int, str], Annotated[Tuple[int, str], dec] + G[int, str, bool, float], + Annotated[tuple[int, str, bool, float], dec] ) + with self.assertRaises(TypeError): + G[int] + + H = Annotated[Tuple[T1, Unpack[Ts], T2], dec] + self.assertEqual(H[int, str], Annotated[Tuple[int, str], dec]) self.assertEqual( - E[int, str, float], + H[int, str, float], Annotated[Tuple[int, str, float], dec] ) self.assertEqual( - E[int, str, bool, float], + H[int, str, bool, float], Annotated[Tuple[int, str, bool, float], dec] ) with self.assertRaises(TypeError): - E[int] + H[int] + + # Now let's try creating an alias from an alias. + + Ts2 = TypeVarTuple('Ts2') + T3 = TypeVar('T3') + T4 = TypeVar('T4') + + # G is Annotated[tuple[T1, *Ts, T2], dec]. + I = G[T3, *Ts2, T4] + J = G[T3, Unpack[Ts2], T4] + + for x, y in [ + (I, Annotated[tuple[T3, *Ts2, T4], dec]), + (J, Annotated[tuple[T3, Unpack[Ts2], T4], dec]), + (I[int, str], Annotated[tuple[int, str], dec]), + (J[int, str], Annotated[tuple[int, str], dec]), + (I[int, str, float], Annotated[tuple[int, str, float], dec]), + (J[int, str, float], Annotated[tuple[int, str, float], dec]), + (I[int, str, bool, float], + Annotated[tuple[int, str, bool, float], dec]), + (J[int, str, bool, float], + Annotated[tuple[int, str, bool, float], dec]), + ]: + self.assertEqual(x, y) + + with self.assertRaises(TypeError): + I[int] + with self.assertRaises(TypeError): + J[int] def test_annotated_in_other_types(self): X = List[Annotated[T, 5]] diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 30faaaf83bec96..5465d35534f111 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -16,7 +16,6 @@ import unicodedata import unittest import warnings -from test.support import import_helper from test.support import warnings_helper from test import support, string_tests from test.support.script_helper import assert_python_failure @@ -241,6 +240,10 @@ def test_count(self): self.checkequal(0, 'a' * 10, 'count', 'a\u0102') self.checkequal(0, 'a' * 10, 'count', 'a\U00100304') self.checkequal(0, '\u0102' * 10, 'count', '\u0102\U00100304') + # test subclass + class MyStr(str): + pass + self.checkequal(3, MyStr('aaa'), 'count', 'a') def test_find(self): string_tests.CommonTest.test_find(self) @@ -257,6 +260,20 @@ def test_find(self): self.checkequalnofix(9, 'abcdefghiabc', 'find', 'abc', 1) self.checkequalnofix(-1, 'abcdefghiabc', 'find', 'def', 4) + # test utf-8 non-ascii char + self.checkequal(0, 'тест', 'find', 'т') + self.checkequal(3, 'тест', 'find', 'т', 1) + self.checkequal(-1, 'тест', 'find', 'т', 1, 3) + self.checkequal(-1, 'тест', 'find', 'e') # english `e` + # test utf-8 non-ascii slice + self.checkequal(1, 'тест тест', 'find', 'ес') + self.checkequal(1, 'тест тест', 'find', 'ес', 1) + self.checkequal(1, 'тест тест', 'find', 'ес', 1, 3) + self.checkequal(6, 'тест тест', 'find', 'ес', 2) + self.checkequal(-1, 'тест тест', 'find', 'ес', 6, 7) + self.checkequal(-1, 'тест тест', 'find', 'ес', 7) + self.checkequal(-1, 'тест тест', 'find', 'ec') # english `ec` + self.assertRaises(TypeError, 'hello'.find) self.assertRaises(TypeError, 'hello'.find, 42) # test mixed kinds @@ -287,6 +304,19 @@ def test_rfind(self): self.checkequalnofix(9, 'abcdefghiabc', 'rfind', 'abc') self.checkequalnofix(12, 'abcdefghiabc', 'rfind', '') self.checkequalnofix(12, 'abcdefghiabc', 'rfind', '') + # test utf-8 non-ascii char + self.checkequal(1, 'тест', 'rfind', 'е') + self.checkequal(1, 'тест', 'rfind', 'е', 1) + self.checkequal(-1, 'тест', 'rfind', 'е', 2) + self.checkequal(-1, 'тест', 'rfind', 'e') # english `e` + # test utf-8 non-ascii slice + self.checkequal(6, 'тест тест', 'rfind', 'ес') + self.checkequal(6, 'тест тест', 'rfind', 'ес', 1) + self.checkequal(1, 'тест тест', 'rfind', 'ес', 1, 3) + self.checkequal(6, 'тест тест', 'rfind', 'ес', 2) + self.checkequal(-1, 'тест тест', 'rfind', 'ес', 6, 7) + self.checkequal(-1, 'тест тест', 'rfind', 'ес', 7) + self.checkequal(-1, 'тест тест', 'rfind', 'ec') # english `ec` # test mixed kinds self.checkequal(0, 'a' + '\u0102' * 100, 'rfind', 'a') self.checkequal(0, 'a' + '\U00100304' * 100, 'rfind', 'a') @@ -441,10 +471,10 @@ def test_split(self): def test_rsplit(self): string_tests.CommonTest.test_rsplit(self) # test mixed kinds - for left, right in ('ba', '\u0101\u0100', '\U00010301\U00010300'): + for left, right in ('ba', 'юё', '\u0101\u0100', '\U00010301\U00010300'): left *= 9 right *= 9 - for delim in ('c', '\u0102', '\U00010302'): + for delim in ('c', 'ы', '\u0102', '\U00010302'): self.checkequal([left + right], left + right, 'rsplit', delim) self.checkequal([left, right], @@ -454,6 +484,10 @@ def test_rsplit(self): self.checkequal([left, right], left + delim * 2 + right, 'rsplit', delim *2) + # Check `None` as well: + self.checkequal([left + right], + left + right, 'rsplit', None) + def test_partition(self): string_tests.MixinStrUnicodeUserStringTest.test_partition(self) # test mixed kinds @@ -2345,12 +2379,7 @@ class s1: def __repr__(self): return '\\n' - class s2: - def __repr__(self): - return '\\n' - self.assertEqual(repr(s1()), '\\n') - self.assertEqual(repr(s2()), '\\n') def test_printable_repr(self): self.assertEqual(repr('\U00010000'), "'%c'" % (0x10000,)) # printable @@ -2593,482 +2622,6 @@ def test_check_encoding_errors(self): self.assertEqual(proc.rc, 10, proc) -class CAPITest(unittest.TestCase): - - # Test PyUnicode_FromFormat() - def test_from_format(self): - import_helper.import_module('ctypes') - from ctypes import ( - c_char_p, - pythonapi, py_object, sizeof, - c_int, c_long, c_longlong, c_ssize_t, - c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p) - name = "PyUnicode_FromFormat" - _PyUnicode_FromFormat = getattr(pythonapi, name) - _PyUnicode_FromFormat.argtypes = (c_char_p,) - _PyUnicode_FromFormat.restype = py_object - - def PyUnicode_FromFormat(format, *args): - cargs = tuple( - py_object(arg) if isinstance(arg, str) else arg - for arg in args) - return _PyUnicode_FromFormat(format, *cargs) - - def check_format(expected, format, *args): - text = PyUnicode_FromFormat(format, *args) - self.assertEqual(expected, text) - - # ascii format, non-ascii argument - check_format('ascii\x7f=unicode\xe9', - b'ascii\x7f=%U', 'unicode\xe9') - - # non-ascii format, ascii argument: ensure that PyUnicode_FromFormatV() - # raises an error - self.assertRaisesRegex(ValueError, - r'^PyUnicode_FromFormatV\(\) expects an ASCII-encoded format ' - 'string, got a non-ASCII byte: 0xe9$', - PyUnicode_FromFormat, b'unicode\xe9=%s', 'ascii') - - # test "%c" - check_format('\uabcd', - b'%c', c_int(0xabcd)) - check_format('\U0010ffff', - b'%c', c_int(0x10ffff)) - with self.assertRaises(OverflowError): - PyUnicode_FromFormat(b'%c', c_int(0x110000)) - # Issue #18183 - check_format('\U00010000\U00100000', - b'%c%c', c_int(0x10000), c_int(0x100000)) - - # test "%" - check_format('%', - b'%%') - check_format('%s', - b'%%s') - check_format('[%]', - b'[%%]') - check_format('%abc', - b'%%%s', b'abc') - - # truncated string - check_format('abc', - b'%.3s', b'abcdef') - check_format('abc[\ufffd', - b'%.5s', 'abc[\u20ac]'.encode('utf8')) - check_format("'\\u20acABC'", - b'%A', '\u20acABC') - check_format("'\\u20", - b'%.5A', '\u20acABCDEF') - check_format("'\u20acABC'", - b'%R', '\u20acABC') - check_format("'\u20acA", - b'%.3R', '\u20acABCDEF') - check_format('\u20acAB', - b'%.3S', '\u20acABCDEF') - check_format('\u20acAB', - b'%.3U', '\u20acABCDEF') - check_format('\u20acAB', - b'%.3V', '\u20acABCDEF', None) - check_format('abc[\ufffd', - b'%.5V', None, 'abc[\u20ac]'.encode('utf8')) - - # following tests comes from #7330 - # test width modifier and precision modifier with %S - check_format("repr= abc", - b'repr=%5S', 'abc') - check_format("repr=ab", - b'repr=%.2S', 'abc') - check_format("repr= ab", - b'repr=%5.2S', 'abc') - - # test width modifier and precision modifier with %R - check_format("repr= 'abc'", - b'repr=%8R', 'abc') - check_format("repr='ab", - b'repr=%.3R', 'abc') - check_format("repr= 'ab", - b'repr=%5.3R', 'abc') - - # test width modifier and precision modifier with %A - check_format("repr= 'abc'", - b'repr=%8A', 'abc') - check_format("repr='ab", - b'repr=%.3A', 'abc') - check_format("repr= 'ab", - b'repr=%5.3A', 'abc') - - # test width modifier and precision modifier with %s - check_format("repr= abc", - b'repr=%5s', b'abc') - check_format("repr=ab", - b'repr=%.2s', b'abc') - check_format("repr= ab", - b'repr=%5.2s', b'abc') - - # test width modifier and precision modifier with %U - check_format("repr= abc", - b'repr=%5U', 'abc') - check_format("repr=ab", - b'repr=%.2U', 'abc') - check_format("repr= ab", - b'repr=%5.2U', 'abc') - - # test width modifier and precision modifier with %V - check_format("repr= abc", - b'repr=%5V', 'abc', b'123') - check_format("repr=ab", - b'repr=%.2V', 'abc', b'123') - check_format("repr= ab", - b'repr=%5.2V', 'abc', b'123') - check_format("repr= 123", - b'repr=%5V', None, b'123') - check_format("repr=12", - b'repr=%.2V', None, b'123') - check_format("repr= 12", - b'repr=%5.2V', None, b'123') - - # test integer formats (%i, %d, %u) - check_format('010', - b'%03i', c_int(10)) - check_format('0010', - b'%0.4i', c_int(10)) - check_format('-123', - b'%i', c_int(-123)) - check_format('-123', - b'%li', c_long(-123)) - check_format('-123', - b'%lli', c_longlong(-123)) - check_format('-123', - b'%zi', c_ssize_t(-123)) - - check_format('-123', - b'%d', c_int(-123)) - check_format('-123', - b'%ld', c_long(-123)) - check_format('-123', - b'%lld', c_longlong(-123)) - check_format('-123', - b'%zd', c_ssize_t(-123)) - - check_format('123', - b'%u', c_uint(123)) - check_format('123', - b'%lu', c_ulong(123)) - check_format('123', - b'%llu', c_ulonglong(123)) - check_format('123', - b'%zu', c_size_t(123)) - - # test long output - min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1)) - max_longlong = -min_longlong - 1 - check_format(str(min_longlong), - b'%lld', c_longlong(min_longlong)) - check_format(str(max_longlong), - b'%lld', c_longlong(max_longlong)) - max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1 - check_format(str(max_ulonglong), - b'%llu', c_ulonglong(max_ulonglong)) - PyUnicode_FromFormat(b'%p', c_void_p(-1)) - - # test padding (width and/or precision) - check_format('123'.rjust(10, '0'), - b'%010i', c_int(123)) - check_format('123'.rjust(100), - b'%100i', c_int(123)) - check_format('123'.rjust(100, '0'), - b'%.100i', c_int(123)) - check_format('123'.rjust(80, '0').rjust(100), - b'%100.80i', c_int(123)) - - check_format('123'.rjust(10, '0'), - b'%010u', c_uint(123)) - check_format('123'.rjust(100), - b'%100u', c_uint(123)) - check_format('123'.rjust(100, '0'), - b'%.100u', c_uint(123)) - check_format('123'.rjust(80, '0').rjust(100), - b'%100.80u', c_uint(123)) - - check_format('123'.rjust(10, '0'), - b'%010x', c_int(0x123)) - check_format('123'.rjust(100), - b'%100x', c_int(0x123)) - check_format('123'.rjust(100, '0'), - b'%.100x', c_int(0x123)) - check_format('123'.rjust(80, '0').rjust(100), - b'%100.80x', c_int(0x123)) - - # test %A - check_format(r"%A:'abc\xe9\uabcd\U0010ffff'", - b'%%A:%A', 'abc\xe9\uabcd\U0010ffff') - - # test %V - check_format('repr=abc', - b'repr=%V', 'abc', b'xyz') - - # Test string decode from parameter of %s using utf-8. - # b'\xe4\xba\xba\xe6\xb0\x91' is utf-8 encoded byte sequence of - # '\u4eba\u6c11' - check_format('repr=\u4eba\u6c11', - b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91') - - #Test replace error handler. - check_format('repr=abc\ufffd', - b'repr=%V', None, b'abc\xff') - - # Issue #33817: empty strings - check_format('', - b'') - check_format('', - b'%s', b'') - - # check for crashes - for fmt in (b'%', b'%0', b'%01', b'%.', b'%.1', - b'%0%s', b'%1%s', b'%.%s', b'%.1%s', b'%1abc', - b'%l', b'%ll', b'%z', b'%ls', b'%lls', b'%zs'): - with self.subTest(fmt=fmt): - self.assertRaisesRegex(SystemError, 'invalid format string', - PyUnicode_FromFormat, fmt, b'abc') - self.assertRaisesRegex(SystemError, 'invalid format string', - PyUnicode_FromFormat, b'%+i', c_int(10)) - - # Test PyUnicode_AsWideChar() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_aswidechar(self): - from _testcapi import unicode_aswidechar - import_helper.import_module('ctypes') - from ctypes import c_wchar, sizeof - - wchar, size = unicode_aswidechar('abcdef', 2) - self.assertEqual(size, 2) - self.assertEqual(wchar, 'ab') - - wchar, size = unicode_aswidechar('abc', 3) - self.assertEqual(size, 3) - self.assertEqual(wchar, 'abc') - - wchar, size = unicode_aswidechar('abc', 4) - self.assertEqual(size, 3) - self.assertEqual(wchar, 'abc\0') - - wchar, size = unicode_aswidechar('abc', 10) - self.assertEqual(size, 3) - self.assertEqual(wchar, 'abc\0') - - wchar, size = unicode_aswidechar('abc\0def', 20) - self.assertEqual(size, 7) - self.assertEqual(wchar, 'abc\0def\0') - - nonbmp = chr(0x10ffff) - if sizeof(c_wchar) == 2: - buflen = 3 - nchar = 2 - else: # sizeof(c_wchar) == 4 - buflen = 2 - nchar = 1 - wchar, size = unicode_aswidechar(nonbmp, buflen) - self.assertEqual(size, nchar) - self.assertEqual(wchar, nonbmp + '\0') - - # Test PyUnicode_AsWideCharString() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_aswidecharstring(self): - from _testcapi import unicode_aswidecharstring - import_helper.import_module('ctypes') - from ctypes import c_wchar, sizeof - - wchar, size = unicode_aswidecharstring('abc') - self.assertEqual(size, 3) - self.assertEqual(wchar, 'abc\0') - - wchar, size = unicode_aswidecharstring('abc\0def') - self.assertEqual(size, 7) - self.assertEqual(wchar, 'abc\0def\0') - - nonbmp = chr(0x10ffff) - if sizeof(c_wchar) == 2: - nchar = 2 - else: # sizeof(c_wchar) == 4 - nchar = 1 - wchar, size = unicode_aswidecharstring(nonbmp) - self.assertEqual(size, nchar) - self.assertEqual(wchar, nonbmp + '\0') - - # Test PyUnicode_AsUCS4() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_asucs4(self): - from _testcapi import unicode_asucs4 - for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', - 'a\ud800b\udfffc', '\ud834\udd1e']: - l = len(s) - self.assertEqual(unicode_asucs4(s, l, True), s+'\0') - self.assertEqual(unicode_asucs4(s, l, False), s+'\uffff') - self.assertEqual(unicode_asucs4(s, l+1, True), s+'\0\uffff') - self.assertEqual(unicode_asucs4(s, l+1, False), s+'\0\uffff') - self.assertRaises(SystemError, unicode_asucs4, s, l-1, True) - self.assertRaises(SystemError, unicode_asucs4, s, l-2, False) - s = '\0'.join([s, s]) - self.assertEqual(unicode_asucs4(s, len(s), True), s+'\0') - self.assertEqual(unicode_asucs4(s, len(s), False), s+'\uffff') - - # Test PyUnicode_AsUTF8() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_asutf8(self): - from _testcapi import unicode_asutf8 - - bmp = '\u0100' - bmp2 = '\uffff' - nonbmp = chr(0x10ffff) - - self.assertEqual(unicode_asutf8(bmp), b'\xc4\x80') - self.assertEqual(unicode_asutf8(bmp2), b'\xef\xbf\xbf') - self.assertEqual(unicode_asutf8(nonbmp), b'\xf4\x8f\xbf\xbf') - self.assertRaises(UnicodeEncodeError, unicode_asutf8, 'a\ud800b\udfffc') - - # Test PyUnicode_AsUTF8AndSize() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_asutf8andsize(self): - from _testcapi import unicode_asutf8andsize - - bmp = '\u0100' - bmp2 = '\uffff' - nonbmp = chr(0x10ffff) - - self.assertEqual(unicode_asutf8andsize(bmp), (b'\xc4\x80', 2)) - self.assertEqual(unicode_asutf8andsize(bmp2), (b'\xef\xbf\xbf', 3)) - self.assertEqual(unicode_asutf8andsize(nonbmp), (b'\xf4\x8f\xbf\xbf', 4)) - self.assertRaises(UnicodeEncodeError, unicode_asutf8andsize, 'a\ud800b\udfffc') - - # Test PyUnicode_Count() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_count(self): - from _testcapi import unicode_count - - st = 'abcabd' - self.assertEqual(unicode_count(st, 'a', 0, len(st)), 2) - self.assertEqual(unicode_count(st, 'ab', 0, len(st)), 2) - self.assertEqual(unicode_count(st, 'abc', 0, len(st)), 1) - self.assertEqual(unicode_count(st, 'а', 0, len(st)), 0) # cyrillic "a" - # start < end - self.assertEqual(unicode_count(st, 'a', 3, len(st)), 1) - self.assertEqual(unicode_count(st, 'a', 4, len(st)), 0) - self.assertEqual(unicode_count(st, 'a', 0, sys.maxsize), 2) - # start >= end - self.assertEqual(unicode_count(st, 'abc', 0, 0), 0) - self.assertEqual(unicode_count(st, 'a', 3, 2), 0) - self.assertEqual(unicode_count(st, 'a', sys.maxsize, 5), 0) - # negative - self.assertEqual(unicode_count(st, 'ab', -len(st), -1), 2) - self.assertEqual(unicode_count(st, 'a', -len(st), -3), 1) - # wrong args - self.assertRaises(TypeError, unicode_count, 'a', 'a') - self.assertRaises(TypeError, unicode_count, 'a', 'a', 1) - self.assertRaises(TypeError, unicode_count, 1, 'a', 0, 1) - self.assertRaises(TypeError, unicode_count, 'a', 1, 0, 1) - # empty string - self.assertEqual(unicode_count('abc', '', 0, 3), 4) - self.assertEqual(unicode_count('abc', '', 1, 3), 3) - self.assertEqual(unicode_count('', '', 0, 1), 1) - self.assertEqual(unicode_count('', 'a', 0, 1), 0) - # different unicode kinds - for uni in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": - for ch in uni: - self.assertEqual(unicode_count(uni, ch, 0, len(uni)), 1) - self.assertEqual(unicode_count(st, ch, 0, len(st)), 0) - - # Test PyUnicode_FindChar() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_findchar(self): - from _testcapi import unicode_findchar - - for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": - for i, ch in enumerate(str): - self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), 1), i) - self.assertEqual(unicode_findchar(str, ord(ch), 0, len(str), -1), i) - - str = "!>_= end - self.assertEqual(unicode_findchar(str, ord('!'), 0, 0, 1), -1) - self.assertEqual(unicode_findchar(str, ord('!'), len(str), 0, 1), -1) - # negative - self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, 1), 0) - self.assertEqual(unicode_findchar(str, ord('!'), -len(str), -1, -1), 0) - - # Test PyUnicode_CopyCharacters() - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_copycharacters(self): - from _testcapi import unicode_copycharacters - - strings = [ - 'abcde', '\xa1\xa2\xa3\xa4\xa5', - '\u4f60\u597d\u4e16\u754c\uff01', - '\U0001f600\U0001f601\U0001f602\U0001f603\U0001f604' - ] - - for idx, from_ in enumerate(strings): - # wide -> narrow: exceed maxchar limitation - for to in strings[:idx]: - self.assertRaises( - SystemError, - unicode_copycharacters, to, 0, from_, 0, 5 - ) - # same kind - for from_start in range(5): - self.assertEqual( - unicode_copycharacters(from_, 0, from_, from_start, 5), - (from_[from_start:from_start+5].ljust(5, '\0'), - 5-from_start) - ) - for to_start in range(5): - self.assertEqual( - unicode_copycharacters(from_, to_start, from_, to_start, 5), - (from_[to_start:to_start+5].rjust(5, '\0'), - 5-to_start) - ) - # narrow -> wide - # Tests omitted since this creates invalid strings. - - s = strings[0] - self.assertRaises(IndexError, unicode_copycharacters, s, 6, s, 0, 5) - self.assertRaises(IndexError, unicode_copycharacters, s, -1, s, 0, 5) - self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, 6, 5) - self.assertRaises(IndexError, unicode_copycharacters, s, 0, s, -1, 5) - self.assertRaises(SystemError, unicode_copycharacters, s, 1, s, 0, 5) - self.assertRaises(SystemError, unicode_copycharacters, s, 0, s, 0, -1) - self.assertRaises(SystemError, unicode_copycharacters, s, 0, b'', 0, 0) - - @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') - def test_pep393_utf8_caching_bug(self): - # Issue #25709: Problem with string concatenation and utf-8 cache - from _testcapi import getargs_s_hash - for k in 0x24, 0xa4, 0x20ac, 0x1f40d: - s = '' - for i in range(5): - # Due to CPython specific optimization the 's' string can be - # resized in-place. - s += chr(k) - # Parsing with the "s#" format code calls indirectly - # PyUnicode_AsUTF8AndSize() which creates the UTF-8 - # encoded string cached in the Unicode object. - self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1)) - # Check that the second call returns the same result - self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1)) - class StringModuleTest(unittest.TestCase): def test_formatter_parser(self): def parse(format): diff --git a/Lib/test/test_unittest/test_async_case.py b/Lib/test/test_unittest/test_async_case.py index fab8270ea33abd..a465103b59b6ec 100644 --- a/Lib/test/test_unittest/test_async_case.py +++ b/Lib/test/test_unittest/test_async_case.py @@ -49,69 +49,87 @@ def setUp(self): self.addCleanup(support.gc_collect) def test_full_cycle(self): + expected = ['setUp', + 'asyncSetUp', + 'test', + 'asyncTearDown', + 'tearDown', + 'cleanup6', + 'cleanup5', + 'cleanup4', + 'cleanup3', + 'cleanup2', + 'cleanup1'] class Test(unittest.IsolatedAsyncioTestCase): def setUp(self): self.assertEqual(events, []) events.append('setUp') VAR.set(VAR.get() + ('setUp',)) + self.addCleanup(self.on_cleanup1) + self.addAsyncCleanup(self.on_cleanup2) async def asyncSetUp(self): - self.assertEqual(events, ['setUp']) + self.assertEqual(events, expected[:1]) events.append('asyncSetUp') VAR.set(VAR.get() + ('asyncSetUp',)) - self.addAsyncCleanup(self.on_cleanup1) + self.addCleanup(self.on_cleanup3) + self.addAsyncCleanup(self.on_cleanup4) async def test_func(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp']) + self.assertEqual(events, expected[:2]) events.append('test') VAR.set(VAR.get() + ('test',)) - self.addAsyncCleanup(self.on_cleanup2) + self.addCleanup(self.on_cleanup5) + self.addAsyncCleanup(self.on_cleanup6) async def asyncTearDown(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp', - 'test']) + self.assertEqual(events, expected[:3]) VAR.set(VAR.get() + ('asyncTearDown',)) events.append('asyncTearDown') def tearDown(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp', - 'test', - 'asyncTearDown']) + self.assertEqual(events, expected[:4]) events.append('tearDown') VAR.set(VAR.get() + ('tearDown',)) - async def on_cleanup1(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp', - 'test', - 'asyncTearDown', - 'tearDown', - 'cleanup2']) + def on_cleanup1(self): + self.assertEqual(events, expected[:10]) events.append('cleanup1') VAR.set(VAR.get() + ('cleanup1',)) nonlocal cvar cvar = VAR.get() async def on_cleanup2(self): - self.assertEqual(events, ['setUp', - 'asyncSetUp', - 'test', - 'asyncTearDown', - 'tearDown']) + self.assertEqual(events, expected[:9]) events.append('cleanup2') VAR.set(VAR.get() + ('cleanup2',)) + def on_cleanup3(self): + self.assertEqual(events, expected[:8]) + events.append('cleanup3') + VAR.set(VAR.get() + ('cleanup3',)) + + async def on_cleanup4(self): + self.assertEqual(events, expected[:7]) + events.append('cleanup4') + VAR.set(VAR.get() + ('cleanup4',)) + + def on_cleanup5(self): + self.assertEqual(events, expected[:6]) + events.append('cleanup5') + VAR.set(VAR.get() + ('cleanup5',)) + + async def on_cleanup6(self): + self.assertEqual(events, expected[:5]) + events.append('cleanup6') + VAR.set(VAR.get() + ('cleanup6',)) + events = [] cvar = () test = Test("test_func") result = test.run() self.assertEqual(result.errors, []) self.assertEqual(result.failures, []) - expected = ['setUp', 'asyncSetUp', 'test', - 'asyncTearDown', 'tearDown', 'cleanup2', 'cleanup1'] self.assertEqual(events, expected) self.assertEqual(cvar, tuple(expected)) diff --git a/Lib/test/test_unittest/test_runner.py b/Lib/test/test_unittest/test_runner.py index d396f2bab57871..df584b7620d092 100644 --- a/Lib/test/test_unittest/test_runner.py +++ b/Lib/test/test_unittest/test_runner.py @@ -134,11 +134,13 @@ def testCleanupInRun(self): class TestableTest(unittest.TestCase): def setUp(self): ordering.append('setUp') + test.addCleanup(cleanup2) if blowUp: raise Exception('foo') def testNothing(self): ordering.append('test') + test.addCleanup(cleanup3) def tearDown(self): ordering.append('tearDown') @@ -149,8 +151,9 @@ def cleanup1(): ordering.append('cleanup1') def cleanup2(): ordering.append('cleanup2') + def cleanup3(): + ordering.append('cleanup3') test.addCleanup(cleanup1) - test.addCleanup(cleanup2) def success(some_test): self.assertEqual(some_test, test) @@ -160,7 +163,7 @@ def success(some_test): result.addSuccess = success test.run(result) - self.assertEqual(ordering, ['setUp', 'test', 'tearDown', + self.assertEqual(ordering, ['setUp', 'test', 'tearDown', 'cleanup3', 'cleanup2', 'cleanup1', 'success']) blowUp = True @@ -168,7 +171,7 @@ def success(some_test): test = TestableTest('testNothing') test.addCleanup(cleanup1) test.run(result) - self.assertEqual(ordering, ['setUp', 'cleanup1']) + self.assertEqual(ordering, ['setUp', 'cleanup2', 'cleanup1']) def testTestCaseDebugExecutesCleanups(self): ordering = [] @@ -180,9 +183,11 @@ def setUp(self): def testNothing(self): ordering.append('test') + self.addCleanup(cleanup3) def tearDown(self): ordering.append('tearDown') + test.addCleanup(cleanup4) test = TestableTest('testNothing') @@ -191,9 +196,14 @@ def cleanup1(): test.addCleanup(cleanup2) def cleanup2(): ordering.append('cleanup2') + def cleanup3(): + ordering.append('cleanup3') + def cleanup4(): + ordering.append('cleanup4') test.debug() - self.assertEqual(ordering, ['setUp', 'test', 'tearDown', 'cleanup1', 'cleanup2']) + self.assertEqual(ordering, ['setUp', 'test', 'tearDown', 'cleanup4', + 'cleanup3', 'cleanup1', 'cleanup2']) def test_enterContext(self): @@ -352,13 +362,14 @@ def testNothing(self): ordering.append('test') @classmethod def tearDownClass(cls): + ordering.append('tearDownClass') raise Exception('TearDownClassExc') suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestableTest) with self.assertRaises(Exception) as cm: suite.debug() self.assertEqual(str(cm.exception), 'TearDownClassExc') - self.assertEqual(ordering, ['setUpClass', 'test']) + self.assertEqual(ordering, ['setUpClass', 'test', 'tearDownClass']) self.assertTrue(TestableTest._class_cleanups) TestableTest._class_cleanups.clear() @@ -368,7 +379,7 @@ def tearDownClass(cls): with self.assertRaises(Exception) as cm: suite.debug() self.assertEqual(str(cm.exception), 'TearDownClassExc') - self.assertEqual(ordering, ['setUpClass', 'test']) + self.assertEqual(ordering, ['setUpClass', 'test', 'tearDownClass']) self.assertTrue(TestableTest._class_cleanups) TestableTest._class_cleanups.clear() @@ -536,6 +547,33 @@ def testNothing(self): self.assertEqual(TestableTest._class_cleanups, []) + def test_run_nested_test(self): + ordering = [] + + class InnerTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + ordering.append('inner setup') + cls.addClassCleanup(ordering.append, 'inner cleanup') + def test(self): + ordering.append('inner test') + + class OuterTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + ordering.append('outer setup') + cls.addClassCleanup(ordering.append, 'outer cleanup') + def test(self): + ordering.append('start outer test') + runTests(InnerTest) + ordering.append('end outer test') + + runTests(OuterTest) + self.assertEqual(ordering, [ + 'outer setup', 'start outer test', + 'inner setup', 'inner test', 'inner cleanup', + 'end outer test', 'outer cleanup']) + class TestModuleCleanUp(unittest.TestCase): def test_add_and_do_ModuleCleanup(self): @@ -747,6 +785,7 @@ def setUpModule(): unittest.addModuleCleanup(cleanup, ordering) @staticmethod def tearDownModule(): + ordering.append('tearDownModule') raise Exception('CleanUpExc') class TestableTest(unittest.TestCase): @@ -765,7 +804,8 @@ def tearDownClass(cls): self.assertEqual(result.errors[0][1].splitlines()[-1], 'Exception: CleanUpExc') self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test', - 'tearDownClass', 'cleanup_good']) + 'tearDownClass', 'tearDownModule', + 'cleanup_good']) self.assertEqual(unittest.case._module_cleanups, []) def test_debug_module_executes_cleanUp(self): @@ -819,6 +859,7 @@ def setUpModule(): unittest.addModuleCleanup(cleanup, ordering, blowUp=blowUp) @staticmethod def tearDownModule(): + ordering.append('tearDownModule') raise Exception('TearDownModuleExc') class TestableTest(unittest.TestCase): @@ -838,7 +879,7 @@ def tearDownClass(cls): suite.debug() self.assertEqual(str(cm.exception), 'TearDownModuleExc') self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test', - 'tearDownClass']) + 'tearDownClass', 'tearDownModule']) self.assertTrue(unittest.case._module_cleanups) unittest.case._module_cleanups.clear() @@ -849,7 +890,7 @@ def tearDownClass(cls): suite.debug() self.assertEqual(str(cm.exception), 'TearDownModuleExc') self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test', - 'tearDownClass']) + 'tearDownClass', 'tearDownModule']) self.assertTrue(unittest.case._module_cleanups) unittest.case._module_cleanups.clear() diff --git a/Lib/test/test_unittest/testmock/testasync.py b/Lib/test/test_unittest/testmock/testasync.py index 1bab671acdef18..e05a22861d47bf 100644 --- a/Lib/test/test_unittest/testmock/testasync.py +++ b/Lib/test/test_unittest/testmock/testasync.py @@ -149,6 +149,23 @@ async def test_async(): run(test_async()) + def test_patch_dict_async_def(self): + foo = {'a': 'a'} + @patch.dict(foo, {'a': 'b'}) + async def test_async(): + self.assertEqual(foo['a'], 'b') + + self.assertTrue(iscoroutinefunction(test_async)) + run(test_async()) + + def test_patch_dict_async_def_context(self): + foo = {'a': 'a'} + async def test_async(): + with patch.dict(foo, {'a': 'b'}): + self.assertEqual(foo['a'], 'b') + + run(test_async()) + class AsyncMockTest(unittest.TestCase): def test_iscoroutinefunction_default(self): diff --git a/Lib/test/test_unittest/testmock/testsealable.py b/Lib/test/test_unittest/testmock/testsealable.py index daba2b49b46f63..e0c38293cffd7a 100644 --- a/Lib/test/test_unittest/testmock/testsealable.py +++ b/Lib/test/test_unittest/testmock/testsealable.py @@ -200,6 +200,9 @@ def ban(self): self.assertIsInstance(foo.Baz.baz, mock.NonCallableMagicMock) self.assertIsInstance(foo.Baz.ban, mock.MagicMock) + # see gh-91803 + self.assertIsInstance(foo.bar2(), mock.MagicMock) + self.assertEqual(foo.bar1(), 'a') foo.bar1.return_value = 'new_a' self.assertEqual(foo.bar1(), 'new_a') @@ -212,7 +215,7 @@ def ban(self): with self.assertRaises(AttributeError): foo.bar = 1 with self.assertRaises(AttributeError): - foo.bar2() + foo.bar2().x foo.bar2.return_value = 'bar2' self.assertEqual(foo.bar2(), 'bar2') diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index 81d6018bd1a495..80fb9e5cd2a445 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -653,18 +653,39 @@ def test_attributes_bad_port(self): """Check handling of invalid ports.""" for bytes in (False, True): for parse in (urllib.parse.urlsplit, urllib.parse.urlparse): - for port in ("foo", "1.5", "-1", "0x10"): + for port in ("foo", "1.5", "-1", "0x10", "-0", "1_1", " 1", "1 ", "६"): with self.subTest(bytes=bytes, parse=parse, port=port): netloc = "www.example.net:" + port url = "http://" + netloc if bytes: - netloc = netloc.encode("ascii") - url = url.encode("ascii") + if netloc.isascii() and port.isascii(): + netloc = netloc.encode("ascii") + url = url.encode("ascii") + else: + continue p = parse(url) self.assertEqual(p.netloc, netloc) with self.assertRaises(ValueError): p.port + def test_attributes_bad_scheme(self): + """Check handling of invalid schemes.""" + for bytes in (False, True): + for parse in (urllib.parse.urlsplit, urllib.parse.urlparse): + for scheme in (".", "+", "-", "0", "http&", "६http"): + with self.subTest(bytes=bytes, parse=parse, scheme=scheme): + url = scheme + "://www.example.net" + if bytes: + if url.isascii(): + url = url.encode("ascii") + else: + continue + p = parse(url) + if bytes: + self.assertEqual(p.scheme, b"") + else: + self.assertEqual(p.scheme, "") + def test_attributes_without_netloc(self): # This example is straight from RFC 3261. It looks like it # should allow the username, hostname, and port to be filled @@ -1199,6 +1220,7 @@ def test_splitnport(self): self.assertEqual(splitnport('127.0.0.1', 55), ('127.0.0.1', 55)) self.assertEqual(splitnport('parrot:cheese'), ('parrot', None)) self.assertEqual(splitnport('parrot:cheese', 55), ('parrot', None)) + self.assertEqual(splitnport('parrot: +1_0 '), ('parrot', None)) def test_splitquery(self): # Normal cases are exercised by other tests; ensure that we also diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 4359a4e3ebe861..4e18dfc23c40c2 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -20,7 +20,7 @@ from test.support import (captured_stdout, captured_stderr, skip_if_broken_multiprocessing_synchronize, verbose, requires_subprocess, is_emscripten, is_wasi, - requires_venv_with_pip) + requires_venv_with_pip, TEST_HOME_DIR) from test.support.os_helper import (can_symlink, EnvironmentVarGuard, rmtree) import unittest import venv @@ -216,7 +216,7 @@ def test_upgrade_dependencies(self): if sys.platform == 'win32': expect_exe = os.path.normcase(os.path.realpath(expect_exe)) - def pip_cmd_checker(cmd): + def pip_cmd_checker(cmd, **kwargs): cmd[0] = os.path.normcase(cmd[0]) self.assertEqual( cmd, @@ -232,7 +232,7 @@ def pip_cmd_checker(cmd): ) fake_context = builder.ensure_directories(fake_env_dir) - with patch('venv.subprocess.check_call', pip_cmd_checker): + with patch('venv.subprocess.check_output', pip_cmd_checker): builder.upgrade_dependencies(fake_context) @requireVenvCreate @@ -482,6 +482,20 @@ def test_multiprocessing(self): 'pool.terminate()']) self.assertEqual(out.strip(), "python".encode()) + @requireVenvCreate + def test_multiprocessing_recursion(self): + """ + Test that the multiprocessing is able to spawn itself + """ + skip_if_broken_multiprocessing_synchronize() + + rmtree(self.env_dir) + self.run_with_capture(venv.create, self.env_dir) + envpy = os.path.join(os.path.realpath(self.env_dir), + self.bindir, self.exe) + script = os.path.join(TEST_HOME_DIR, '_test_venv_multiprocessing.py') + subprocess.check_call([envpy, script]) + @unittest.skipIf(os.name == 'nt', 'not relevant on Windows') def test_deactivate_with_strict_bash_opts(self): bash = shutil.which("bash") @@ -523,6 +537,80 @@ def test_pathsep_error(self): self.assertRaises(ValueError, venv.create, bad_itempath) self.assertRaises(ValueError, venv.create, pathlib.Path(bad_itempath)) + @unittest.skipIf(os.name == 'nt', 'not relevant on Windows') + @requireVenvCreate + def test_zippath_from_non_installed_posix(self): + """ + Test that when create venv from non-installed python, the zip path + value is as expected. + """ + rmtree(self.env_dir) + # First try to create a non-installed python. It's not a real full + # functional non-installed python, but enough for this test. + platlibdir = sys.platlibdir + non_installed_dir = os.path.realpath(tempfile.mkdtemp()) + self.addCleanup(rmtree, non_installed_dir) + bindir = os.path.join(non_installed_dir, self.bindir) + os.mkdir(bindir) + shutil.copy2(sys.executable, bindir) + libdir = os.path.join(non_installed_dir, platlibdir, self.lib[1]) + os.makedirs(libdir) + landmark = os.path.join(libdir, "os.py") + stdlib_zip = "python%d%d.zip" % sys.version_info[:2] + zip_landmark = os.path.join(non_installed_dir, + platlibdir, + stdlib_zip) + additional_pythonpath_for_non_installed = [] + # Copy stdlib files to the non-installed python so venv can + # correctly calculate the prefix. + for eachpath in sys.path: + if eachpath.endswith(".zip"): + if os.path.isfile(eachpath): + shutil.copyfile( + eachpath, + os.path.join(non_installed_dir, platlibdir)) + elif os.path.isfile(os.path.join(eachpath, "os.py")): + for name in os.listdir(eachpath): + if name == "site-packages": + continue + fn = os.path.join(eachpath, name) + if os.path.isfile(fn): + shutil.copy(fn, libdir) + elif os.path.isdir(fn): + shutil.copytree(fn, os.path.join(libdir, name)) + else: + additional_pythonpath_for_non_installed.append( + eachpath) + cmd = [os.path.join(non_installed_dir, self.bindir, self.exe), + "-m", + "venv", + "--without-pip", + self.env_dir] + # Our fake non-installed python is not fully functional because + # it cannot find the extensions. Set PYTHONPATH so it can run the + # venv module correctly. + pythonpath = os.pathsep.join( + additional_pythonpath_for_non_installed) + # For python built with shared enabled. We need to set + # LD_LIBRARY_PATH so the non-installed python can find and link + # libpython.so + ld_library_path = sysconfig.get_config_var("LIBDIR") + if not ld_library_path or sysconfig.is_python_build(): + ld_library_path = os.path.abspath(os.path.dirname(sys.executable)) + if sys.platform == 'darwin': + ld_library_path_env = "DYLD_LIBRARY_PATH" + else: + ld_library_path_env = "LD_LIBRARY_PATH" + subprocess.check_call(cmd, + env={"PYTHONPATH": pythonpath, + ld_library_path_env: ld_library_path}) + envpy = os.path.join(self.env_dir, self.bindir, self.exe) + # Now check the venv created from the non-installed python has + # correct zip path in pythonpath. + cmd = [envpy, '-S', '-c', 'import sys; print(sys.path)'] + out, err = check_output(cmd) + self.assertTrue(zip_landmark.encode() in out) + @requireVenvCreate class EnsurePipTest(BaseTest): """Test venv module installation of pip.""" @@ -659,8 +747,8 @@ def nicer_error(self): try: yield except subprocess.CalledProcessError as exc: - out = exc.output.decode(errors="replace") - err = exc.stderr.decode(errors="replace") + out = (exc.output or b'').decode(errors="replace") + err = (exc.stderr or b'').decode(errors="replace") self.fail( f"{exc}\n\n" f"**Subprocess Output**\n{out}\n\n" diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 3a9573d6e70559..7c5920797d2538 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -597,7 +597,7 @@ class C(object): # deallocation of c2. del c2 - def test_callback_in_cycle_1(self): + def test_callback_in_cycle(self): import gc class J(object): @@ -637,40 +637,11 @@ def acallback(self, ignore): del I, J, II gc.collect() - def test_callback_in_cycle_2(self): + def test_callback_reachable_one_way(self): import gc - # This is just like test_callback_in_cycle_1, except that II is an - # old-style class. The symptom is different then: an instance of an - # old-style class looks in its own __dict__ first. 'J' happens to - # get cleared from I.__dict__ before 'wr', and 'J' was never in II's - # __dict__, so the attribute isn't found. The difference is that - # the old-style II doesn't have a NULL __mro__ (it doesn't have any - # __mro__), so no segfault occurs. Instead it got: - # test_callback_in_cycle_2 (__main__.ReferencesTestCase) ... - # Exception exceptions.AttributeError: - # "II instance has no attribute 'J'" in > ignored - - class J(object): - pass - - class II: - def acallback(self, ignore): - self.J - - I = II() - I.J = J - I.wr = weakref.ref(J, I.acallback) - - del I, J, II - gc.collect() - - def test_callback_in_cycle_3(self): - import gc - - # This one broke the first patch that fixed the last two. In this - # case, the objects reachable from the callback aren't also reachable + # This one broke the first patch that fixed the previous test. In this case, + # the objects reachable from the callback aren't also reachable # from the object (c1) *triggering* the callback: you can get to # c1 from c2, but not vice-versa. The result was that c2's __dict__ # got tp_clear'ed by the time the c2.cb callback got invoked. @@ -690,10 +661,10 @@ def cb(self, ignore): del c1, c2 gc.collect() - def test_callback_in_cycle_4(self): + def test_callback_different_classes(self): import gc - # Like test_callback_in_cycle_3, except c2 and c1 have different + # Like test_callback_reachable_one_way, except c2 and c1 have different # classes. c2's class (C) isn't reachable from c1 then, so protecting # objects reachable from the dying object (c1) isn't enough to stop # c2's class (C) from getting tp_clear'ed before c2.cb is invoked. diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py index f20aad051da960..ae54f6c46891c6 100644 --- a/Lib/test/test_zlib.py +++ b/Lib/test/test_zlib.py @@ -944,6 +944,173 @@ def choose_lines(source, number, seed=None, generator=random): """ +class ZlibDecompressorTest(): + # Test adopted from test_bz2.py + TEXT = HAMLET_SCENE + DATA = zlib.compress(HAMLET_SCENE) + BAD_DATA = b"Not a valid deflate block" + def test_Constructor(self): + self.assertRaises(TypeError, zlib._ZlibDecompressor, 42) + + def testDecompress(self): + zlibd = zlib._ZlibDecompressor() + self.assertRaises(TypeError, zlibd.decompress) + text = zlibd.decompress(self.DATA) + self.assertEqual(text, self.TEXT) + + def testDecompressChunks10(self): + zlibd = zlib._ZlibDecompressor() + text = b'' + n = 0 + while True: + str = self.DATA[n*10:(n+1)*10] + if not str: + break + text += zlibd.decompress(str) + n += 1 + self.assertEqual(text, self.TEXT) + + def testDecompressUnusedData(self): + zlibd = zlib._ZlibDecompressor() + unused_data = b"this is unused data" + text = zlibd.decompress(self.DATA+unused_data) + self.assertEqual(text, self.TEXT) + self.assertEqual(zlibd.unused_data, unused_data) + + def testEOFError(self): + zlibd = zlib._ZlibDecompressor() + text = zlibd.decompress(self.DATA) + self.assertRaises(EOFError, zlibd.decompress, b"anything") + self.assertRaises(EOFError, zlibd.decompress, b"") + + @support.skip_if_pgo_task + @bigmemtest(size=_4G + 100, memuse=3.3) + def testDecompress4G(self, size): + # "Test zlib._ZlibDecompressor.decompress() with >4GiB input" + blocksize = 10 * 1024 * 1024 + block = random.randbytes(blocksize) + try: + data = block * (size // blocksize + 1) + compressed = zlib.compress(data) + zlibd = zlib._ZlibDecompressor() + decompressed = zlibd.decompress(compressed) + self.assertTrue(decompressed == data) + finally: + data = None + compressed = None + decompressed = None + + def testPickle(self): + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.assertRaises(TypeError): + pickle.dumps(zlib._ZlibDecompressor(), proto) + + def testDecompressorChunksMaxsize(self): + zlibd = zlib._ZlibDecompressor() + max_length = 100 + out = [] + + # Feed some input + len_ = len(self.BIG_DATA) - 64 + out.append(zlibd.decompress(self.BIG_DATA[:len_], + max_length=max_length)) + self.assertFalse(zlibd.needs_input) + self.assertEqual(len(out[-1]), max_length) + + # Retrieve more data without providing more input + out.append(zlibd.decompress(b'', max_length=max_length)) + self.assertFalse(zlibd.needs_input) + self.assertEqual(len(out[-1]), max_length) + + # Retrieve more data while providing more input + out.append(zlibd.decompress(self.BIG_DATA[len_:], + max_length=max_length)) + self.assertLessEqual(len(out[-1]), max_length) + + # Retrieve remaining uncompressed data + while not zlibd.eof: + out.append(zlibd.decompress(b'', max_length=max_length)) + self.assertLessEqual(len(out[-1]), max_length) + + out = b"".join(out) + self.assertEqual(out, self.BIG_TEXT) + self.assertEqual(zlibd.unused_data, b"") + + def test_decompressor_inputbuf_1(self): + # Test reusing input buffer after moving existing + # contents to beginning + zlibd = zlib._ZlibDecompressor() + out = [] + + # Create input buffer and fill it + self.assertEqual(zlibd.decompress(self.DATA[:100], + max_length=0), b'') + + # Retrieve some results, freeing capacity at beginning + # of input buffer + out.append(zlibd.decompress(b'', 2)) + + # Add more data that fits into input buffer after + # moving existing data to beginning + out.append(zlibd.decompress(self.DATA[100:105], 15)) + + # Decompress rest of data + out.append(zlibd.decompress(self.DATA[105:])) + self.assertEqual(b''.join(out), self.TEXT) + + def test_decompressor_inputbuf_2(self): + # Test reusing input buffer by appending data at the + # end right away + zlibd = zlib._ZlibDecompressor() + out = [] + + # Create input buffer and empty it + self.assertEqual(zlibd.decompress(self.DATA[:200], + max_length=0), b'') + out.append(zlibd.decompress(b'')) + + # Fill buffer with new data + out.append(zlibd.decompress(self.DATA[200:280], 2)) + + # Append some more data, not enough to require resize + out.append(zlibd.decompress(self.DATA[280:300], 2)) + + # Decompress rest of data + out.append(zlibd.decompress(self.DATA[300:])) + self.assertEqual(b''.join(out), self.TEXT) + + def test_decompressor_inputbuf_3(self): + # Test reusing input buffer after extending it + + zlibd = zlib._ZlibDecompressor() + out = [] + + # Create almost full input buffer + out.append(zlibd.decompress(self.DATA[:200], 5)) + + # Add even more data to it, requiring resize + out.append(zlibd.decompress(self.DATA[200:300], 5)) + + # Decompress rest of data + out.append(zlibd.decompress(self.DATA[300:])) + self.assertEqual(b''.join(out), self.TEXT) + + def test_failure(self): + zlibd = zlib._ZlibDecompressor() + self.assertRaises(Exception, zlibd.decompress, self.BAD_DATA * 30) + # Previously, a second call could crash due to internal inconsistency + self.assertRaises(Exception, zlibd.decompress, self.BAD_DATA * 30) + + @support.refcount_test + def test_refleaks_in___init__(self): + gettotalrefcount = support.get_attribute(sys, 'gettotalrefcount') + zlibd = zlib._ZlibDecompressor() + refs_before = gettotalrefcount() + for i in range(100): + zlibd.__init__() + self.assertAlmostEqual(gettotalrefcount() - refs_before, 0, delta=10) + + class CustomInt: def __index__(self): return 100 diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py index a2172f3ac21d08..fd0e3bc032ec0c 100644 --- a/Lib/test/test_zoneinfo/test_zoneinfo.py +++ b/Lib/test/test_zoneinfo/test_zoneinfo.py @@ -404,6 +404,19 @@ def test_time_fixed_offset(self): class CZoneInfoTest(ZoneInfoTest): module = c_zoneinfo + def test_signatures(self): + """Ensure that C module has valid method signatures.""" + import inspect + + must_have_signatures = ( + self.klass.clear_cache, + self.klass.no_cache, + self.klass.from_file, + ) + for method in must_have_signatures: + with self.subTest(method=method): + inspect.Signature.from_callable(method) + def test_fold_mutate(self): """Test that fold isn't mutated when no change is necessary. diff --git a/Lib/test/typinganndata/__init__.py b/Lib/test/typinganndata/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/Lib/test/typinganndata/ann_module9.py b/Lib/test/typinganndata/ann_module9.py new file mode 100644 index 00000000000000..952217393e1ff7 --- /dev/null +++ b/Lib/test/typinganndata/ann_module9.py @@ -0,0 +1,14 @@ +# Test ``inspect.formatannotation`` +# https://github.com/python/cpython/issues/96073 + +from typing import Union, List + +ann = Union[List[str], int] + +# mock typing._type_repr behaviour +class A: ... + +A.__module__ = 'testModule.typing' +A.__qualname__ = 'A' + +ann1 = Union[List[A], int] diff --git a/Lib/threading.py b/Lib/threading.py index d030e1243623f9..723bd58bf5a68b 100644 --- a/Lib/threading.py +++ b/Lib/threading.py @@ -33,6 +33,7 @@ # Rename some stuff so "from threading import *" is safe _start_new_thread = _thread.start_new_thread +_daemon_threads_allowed = _thread.daemon_threads_allowed _allocate_lock = _thread.allocate_lock _set_sentinel = _thread._set_sentinel get_ident = _thread.get_ident @@ -899,6 +900,8 @@ class is implemented. self._args = args self._kwargs = kwargs if daemon is not None: + if daemon and not _daemon_threads_allowed(): + raise RuntimeError('daemon threads are disabled in this (sub)interpreter') self._daemonic = daemon else: self._daemonic = current_thread().daemon @@ -1226,6 +1229,8 @@ def daemon(self): def daemon(self, daemonic): if not self._initialized: raise RuntimeError("Thread.__init__() not called") + if daemonic and not _daemon_threads_allowed(): + raise RuntimeError('daemon threads are disabled in this interpreter') if self._started.is_set(): raise RuntimeError("cannot set daemon status of active thread") self._daemonic = daemonic @@ -1432,7 +1437,8 @@ def __init__(self): class _DummyThread(Thread): def __init__(self): - Thread.__init__(self, name=_newname("Dummy-%d"), daemon=True) + Thread.__init__(self, name=_newname("Dummy-%d"), + daemon=_daemon_threads_allowed()) self._started.set() self._set_ident() diff --git a/Lib/timeit.py b/Lib/timeit.py index 9dfd454936e6b8..0cf8db67723a49 100755 --- a/Lib/timeit.py +++ b/Lib/timeit.py @@ -259,10 +259,9 @@ def main(args=None, *, _wrap_timer=None): args = sys.argv[1:] import getopt try: - opts, args = getopt.getopt(args, "n:u:s:r:tcpvh", + opts, args = getopt.getopt(args, "n:u:s:r:pvh", ["number=", "setup=", "repeat=", - "time", "clock", "process", - "verbose", "unit=", "help"]) + "process", "verbose", "unit=", "help"]) except getopt.error as err: print(err) print("use -h/--help for command line help") diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py index 356446911e0abc..a8e7bf490ad463 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -3648,7 +3648,7 @@ def count(self, index1, index2, *args): # new in Tk 8.5 "lines", "xpixels" and "ypixels". There is an additional possible option "update", which if given then all subsequent options ensure that any possible out of date information is recalculated.""" - args = ['-%s' % arg for arg in args if not arg.startswith('-')] + args = ['-%s' % arg for arg in args] args += [index1, index2] res = self.tk.call(self._w, 'count', *args) or None if res is not None and len(args) <= 3: diff --git a/Lib/token.py b/Lib/token.py index 9d0c0bf0fb0368..95b107c6643b3f 100644 --- a/Lib/token.py +++ b/Lib/token.py @@ -1,5 +1,5 @@ """Token constants.""" -# Auto-generated by Tools/scripts/generate_token.py +# Auto-generated by Tools/build/generate_token.py __all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF'] diff --git a/Lib/traceback.py b/Lib/traceback.py index c46ddaf51a00d4..c43c4720ae5a15 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -476,32 +476,32 @@ def format_frame_summary(self, frame_summary): frame_summary.colno is not None and frame_summary.end_colno is not None ): - colno = _byte_offset_to_character_offset( - frame_summary._original_line, frame_summary.colno) - end_colno = _byte_offset_to_character_offset( - frame_summary._original_line, frame_summary.end_colno) + start_offset = _byte_offset_to_character_offset( + frame_summary._original_line, frame_summary.colno) + 1 + end_offset = _byte_offset_to_character_offset( + frame_summary._original_line, frame_summary.end_colno) + 1 anchors = None if frame_summary.lineno == frame_summary.end_lineno: with suppress(Exception): anchors = _extract_caret_anchors_from_line_segment( - frame_summary._original_line[colno - 1:end_colno - 1] + frame_summary._original_line[start_offset - 1:end_offset - 1] ) else: - end_colno = stripped_characters + len(stripped_line) + end_offset = stripped_characters + len(stripped_line) # show indicators if primary char doesn't span the frame line - if end_colno - colno < len(stripped_line) or ( + if end_offset - start_offset < len(stripped_line) or ( anchors and anchors.right_start_offset - anchors.left_end_offset > 0): row.append(' ') - row.append(' ' * (colno - stripped_characters)) + row.append(' ' * (start_offset - stripped_characters)) if anchors: row.append(anchors.primary_char * (anchors.left_end_offset)) row.append(anchors.secondary_char * (anchors.right_start_offset - anchors.left_end_offset)) - row.append(anchors.primary_char * (end_colno - colno - anchors.right_start_offset)) + row.append(anchors.primary_char * (end_offset - start_offset - anchors.right_start_offset)) else: - row.append('^' * (end_colno - colno)) + row.append('^' * (end_offset - start_offset)) row.append('\n') @@ -561,10 +561,7 @@ def format(self): def _byte_offset_to_character_offset(str, offset): as_utf8 = str.encode('utf-8') - if offset > len(as_utf8): - offset = len(as_utf8) - - return len(as_utf8[:offset + 1].decode("utf-8")) + return len(as_utf8[:offset].decode("utf-8", errors="replace")) _Anchors = collections.namedtuple( @@ -589,12 +586,15 @@ def _extract_caret_anchors_from_line_segment(segment): if len(tree.body) != 1: return None + normalize = lambda offset: _byte_offset_to_character_offset(segment, offset) statement = tree.body[0] match statement: case ast.Expr(expr): match expr: case ast.BinOp(): - operator_str = segment[expr.left.end_col_offset:expr.right.col_offset] + operator_start = normalize(expr.left.end_col_offset) + operator_end = normalize(expr.right.col_offset) + operator_str = segment[operator_start:operator_end] operator_offset = len(operator_str) - len(operator_str.lstrip()) left_anchor = expr.left.end_col_offset + operator_offset @@ -604,9 +604,11 @@ def _extract_caret_anchors_from_line_segment(segment): and not operator_str[operator_offset + 1].isspace() ): right_anchor += 1 - return _Anchors(left_anchor, right_anchor) + return _Anchors(normalize(left_anchor), normalize(right_anchor)) case ast.Subscript(): - return _Anchors(expr.value.end_col_offset, expr.slice.end_col_offset + 1) + subscript_start = normalize(expr.value.end_col_offset) + subscript_end = normalize(expr.slice.end_col_offset + 1) + return _Anchors(subscript_start, subscript_end) return None @@ -707,11 +709,25 @@ def __init__(self, exc_type, exc_value, exc_traceback, *, limit=None, self.offset = exc_value.offset self.end_offset = exc_value.end_offset self.msg = exc_value.msg + elif exc_type and issubclass(exc_type, ImportError) and \ + getattr(exc_value, "name_from", None) is not None: + wrong_name = getattr(exc_value, "name_from", None) + suggestion = _compute_suggestion_error(exc_value, exc_traceback, wrong_name) + if suggestion: + self._str += f". Did you mean: '{suggestion}'?" elif exc_type and issubclass(exc_type, (NameError, AttributeError)) and \ getattr(exc_value, "name", None) is not None: - suggestion = _compute_suggestion_error(exc_value, exc_traceback) + wrong_name = getattr(exc_value, "name", None) + suggestion = _compute_suggestion_error(exc_value, exc_traceback, wrong_name) if suggestion: self._str += f". Did you mean: '{suggestion}'?" + if issubclass(exc_type, NameError): + wrong_name = getattr(exc_value, "name", None) + if wrong_name is not None and wrong_name in sys.stdlib_module_names: + if suggestion: + self._str += f" Or did you forget to import '{wrong_name}'" + else: + self._str += f". Did you forget to import '{wrong_name}'" if lookup_lines: self._load_lines() self.__suppress_context__ = \ @@ -998,8 +1014,7 @@ def _substitution_cost(ch_a, ch_b): return _MOVE_COST -def _compute_suggestion_error(exc_value, tb): - wrong_name = getattr(exc_value, "name", None) +def _compute_suggestion_error(exc_value, tb, wrong_name): if wrong_name is None or not isinstance(wrong_name, str): return None if isinstance(exc_value, AttributeError): @@ -1008,6 +1023,12 @@ def _compute_suggestion_error(exc_value, tb): d = dir(obj) except Exception: return None + elif isinstance(exc_value, ImportError): + try: + mod = __import__(exc_value.name) + d = dir(mod) + except Exception: + return None else: assert isinstance(exc_value, NameError) # find most recent frame @@ -1019,8 +1040,18 @@ def _compute_suggestion_error(exc_value, tb): d = ( list(frame.f_locals) + list(frame.f_globals) - + list(frame.f_globals['__builtins__']) + + list(frame.f_builtins) ) + + # Check first if we are in a method and the instance + # has the wrong name as attribute + if 'self' in frame.f_locals: + self = frame.f_locals['self'] + if hasattr(self, wrong_name): + return f"self.{wrong_name}" + + # Compute closest match + if len(d) > _MAX_CANDIDATE_ITEMS: return None wrong_name_len = len(wrong_name) diff --git a/Lib/types.py b/Lib/types.py index 2e73fbc4501337..f8353126cb527c 100644 --- a/Lib/types.py +++ b/Lib/types.py @@ -60,7 +60,7 @@ def _m(self): pass GetSetDescriptorType = type(FunctionType.__code__) MemberDescriptorType = type(FunctionType.__globals__) -del sys, _f, _g, _C, _c, _ag # Not for export +del sys, _f, _g, _C, _c, _ag, _cell_factory # Not for export # Provide a PEP 3115 compliant mechanism for class creation diff --git a/Lib/typing.py b/Lib/typing.py index 95bd61c7f8c61f..233941598f76a3 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -1437,6 +1437,10 @@ def _determine_new_args(self, args): new_args = [] for old_arg in self.__args__: + if isinstance(old_arg, type): + new_args.append(old_arg) + continue + substfunc = getattr(old_arg, '__typing_subst__', None) if substfunc: new_arg = substfunc(new_arg_by_param[old_arg]) diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index b01f6605e23e39..5167c5f843f085 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -384,11 +384,11 @@ class TestCase(object): # of difflib. See #11763. _diffThreshold = 2**16 - # Attribute used by TestSuite for classSetUp - - _classSetupFailed = False - - _class_cleanups = [] + def __init_subclass__(cls, *args, **kwargs): + # Attribute used by TestSuite for classSetUp + cls._classSetupFailed = False + cls._class_cleanups = [] + super().__init_subclass__(*args, **kwargs) def __init__(self, methodName='runTest'): """Create an instance of the class that will use the named test diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index cd46fea5162aba..a273753d6a0abb 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -35,6 +35,7 @@ from types import CodeType, ModuleType, MethodType from unittest.util import safe_repr from functools import wraps, partial +from threading import RLock class InvalidSpecError(Exception): @@ -402,6 +403,14 @@ def __init__(self, /, *args, **kwargs): class NonCallableMock(Base): """A non-callable version of `Mock`""" + # Store a mutex as a class attribute in order to protect concurrent access + # to mock attributes. Using a class attribute allows all NonCallableMock + # instances to share the mutex for simplicity. + # + # See https://github.com/python/cpython/issues/98624 for why this is + # necessary. + _lock = RLock() + def __new__(cls, /, *args, **kw): # every instance has its own class # so we can create magic methods on the @@ -644,35 +653,36 @@ def __getattr__(self, name): f"{name!r} is not a valid assertion. Use a spec " f"for the mock if {name!r} is meant to be an attribute.") - result = self._mock_children.get(name) - if result is _deleted: - raise AttributeError(name) - elif result is None: - wraps = None - if self._mock_wraps is not None: - # XXXX should we get the attribute without triggering code - # execution? - wraps = getattr(self._mock_wraps, name) - - result = self._get_child_mock( - parent=self, name=name, wraps=wraps, _new_name=name, - _new_parent=self - ) - self._mock_children[name] = result - - elif isinstance(result, _SpecState): - try: - result = create_autospec( - result.spec, result.spec_set, result.instance, - result.parent, result.name + with NonCallableMock._lock: + result = self._mock_children.get(name) + if result is _deleted: + raise AttributeError(name) + elif result is None: + wraps = None + if self._mock_wraps is not None: + # XXXX should we get the attribute without triggering code + # execution? + wraps = getattr(self._mock_wraps, name) + + result = self._get_child_mock( + parent=self, name=name, wraps=wraps, _new_name=name, + _new_parent=self ) - except InvalidSpecError: - target_name = self.__dict__['_mock_name'] or self - raise InvalidSpecError( - f'Cannot autospec attr {name!r} from target ' - f'{target_name!r} as it has already been mocked out. ' - f'[target={self!r}, attr={result.spec!r}]') - self._mock_children[name] = result + self._mock_children[name] = result + + elif isinstance(result, _SpecState): + try: + result = create_autospec( + result.spec, result.spec_set, result.instance, + result.parent, result.name + ) + except InvalidSpecError: + target_name = self.__dict__['_mock_name'] or self + raise InvalidSpecError( + f'Cannot autospec attr {name!r} from target ' + f'{target_name!r} as it has already been mocked out. ' + f'[target={self!r}, attr={result.spec!r}]') + self._mock_children[name] = result return result @@ -1799,6 +1809,12 @@ def __init__(self, in_dict, values=(), clear=False, **kwargs): def __call__(self, f): if isinstance(f, type): return self.decorate_class(f) + if inspect.iscoroutinefunction(f): + return self.decorate_async_callable(f) + return self.decorate_callable(f) + + + def decorate_callable(self, f): @wraps(f) def _inner(*args, **kw): self._patch_dict() @@ -1810,6 +1826,18 @@ def _inner(*args, **kw): return _inner + def decorate_async_callable(self, f): + @wraps(f) + async def _inner(*args, **kw): + self._patch_dict() + try: + return await f(*args, **kw) + finally: + self._unpatch_dict() + + return _inner + + def decorate_class(self, klass): for attr in dir(klass): attr_value = getattr(klass, attr) @@ -2735,6 +2763,7 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None, _new_parent=parent, **kwargs) mock._mock_children[entry] = new + new.return_value = child_klass() _check_signature(original, new, skipfirst=skipfirst) # so functions created with _set_signature become instance attributes, diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index 3734c73948c695..4f6867accbc0eb 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -167,12 +167,11 @@ def hostname(self): def port(self): port = self._hostinfo[1] if port is not None: - try: - port = int(port, 10) - except ValueError: - message = f'Port could not be cast to integer value as {port!r}' - raise ValueError(message) from None - if not ( 0 <= port <= 65535): + if port.isdigit() and port.isascii(): + port = int(port) + else: + raise ValueError(f"Port could not be cast to integer value as {port!r}") + if not (0 <= port <= 65535): raise ValueError("Port out of range 0-65535") return port @@ -461,7 +460,7 @@ def urlsplit(url, scheme='', allow_fragments=True): allow_fragments = bool(allow_fragments) netloc = query = fragment = '' i = url.find(':') - if i > 0: + if i > 0 and url[0].isascii() and url[0].isalpha(): for c in url[:i]: if c not in scheme_chars: break @@ -1132,15 +1131,15 @@ def splitnport(host, defport=-1): def _splitnport(host, defport=-1): """Split host and port, returning numeric port. Return given default port if no ':' found; defaults to -1. - Return numerical port if a valid number are found after ':'. + Return numerical port if a valid number is found after ':'. Return None if ':' but not a valid number.""" host, delim, port = host.rpartition(':') if not delim: host = port elif port: - try: + if port.isdigit() and port.isascii(): nport = int(port) - except ValueError: + else: nport = None return host, nport return host, defport diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index e2d5b8c5c59a3c..278aa3a14bfeea 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -1582,7 +1582,7 @@ def ftp_open(self, req): headers = email.message_from_string(headers) return addinfourl(fp, headers, req.full_url) except ftplib.all_errors as exp: - raise URLError(f'ftp error: {exp}') from exp + raise URLError(exp) from exp def connect_ftp(self, user, passwd, host, port, dirs, timeout): return ftpwrapper(user, passwd, host, port, dirs, timeout, diff --git a/Lib/uuid.py b/Lib/uuid.py index 8fe2479f3f22cf..e863b631877f6d 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -371,7 +371,12 @@ def _get_command_stdout(command, *args): # for are actually localized, but in theory some system could do so.) env = dict(os.environ) env['LC_ALL'] = 'C' - proc = subprocess.Popen((executable,) + args, + # Empty strings will be quoted by popen so we should just ommit it + if args != ('',): + command = (executable, *args) + else: + command = (executable,) + proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, env=env) @@ -511,7 +516,7 @@ def _ifconfig_getnode(): mac = _find_mac_near_keyword('ifconfig', args, keywords, lambda i: i+1) if mac: return mac - return None + return None def _ip_getnode(): """Get the hardware address on Unix by running ip.""" diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py index 034e3d42017721..7bfc2d1b6fe057 100644 --- a/Lib/venv/__init__.py +++ b/Lib/venv/__init__.py @@ -260,7 +260,7 @@ def symlink_or_copy(self, src, dst, relative_symlinks_ok=False): basename + ext) # Builds or venv's from builds need to remap source file # locations, as we do not put them into Lib/venv/scripts - if sysconfig.is_python_build(True) or not os.path.isfile(srcfn): + if sysconfig.is_python_build() or not os.path.isfile(srcfn): if basename.endswith('_d'): ext = '_d' + ext basename = basename[:-2] @@ -311,7 +311,7 @@ def setup_python(self, context): f for f in os.listdir(dirname) if os.path.normcase(os.path.splitext(f)[1]) in ('.exe', '.dll') ] - if sysconfig.is_python_build(True): + if sysconfig.is_python_build(): suffixes = [ f for f in suffixes if os.path.normcase(f).startswith(('python', 'vcruntime')) @@ -326,7 +326,7 @@ def setup_python(self, context): if os.path.lexists(src): copier(src, os.path.join(binpath, suffix)) - if sysconfig.is_python_build(True): + if sysconfig.is_python_build(): # copy init.tcl for root, dirs, files in os.walk(context.python_dir): if 'init.tcl' in files: @@ -339,14 +339,25 @@ def setup_python(self, context): shutil.copyfile(src, dst) break + def _call_new_python(self, context, *py_args, **kwargs): + """Executes the newly created Python using safe-ish options""" + # gh-98251: We do not want to just use '-I' because that masks + # legitimate user preferences (such as not writing bytecode). All we + # really need is to ensure that the path variables do not overrule + # normal venv handling. + args = [context.env_exec_cmd, *py_args] + kwargs['env'] = env = os.environ.copy() + env['VIRTUAL_ENV'] = context.env_dir + env.pop('PYTHONHOME', None) + env.pop('PYTHONPATH', None) + kwargs['cwd'] = context.env_dir + kwargs['executable'] = context.env_exec_cmd + subprocess.check_output(args, **kwargs) + def _setup_pip(self, context): """Installs or upgrades pip in a virtual environment""" - # We run ensurepip in isolated mode to avoid side effects from - # environment vars, the current directory and anything else - # intended for the global Python environment - cmd = [context.env_exec_cmd, '-Im', 'ensurepip', '--upgrade', - '--default-pip'] - subprocess.check_output(cmd, stderr=subprocess.STDOUT) + self._call_new_python(context, '-m', 'ensurepip', '--upgrade', + '--default-pip', stderr=subprocess.STDOUT) def setup_scripts(self, context): """ @@ -445,9 +456,8 @@ def upgrade_dependencies(self, context): logger.debug( f'Upgrading {CORE_VENV_DEPS} packages in {context.bin_path}' ) - cmd = [context.env_exec_cmd, '-m', 'pip', 'install', '--upgrade'] - cmd.extend(CORE_VENV_DEPS) - subprocess.check_call(cmd) + self._call_new_python(context, '-m', 'pip', 'install', '--upgrade', + *CORE_VENV_DEPS) def create(env_dir, system_site_packages=False, clear=False, diff --git a/Lib/wsgiref/handlers.py b/Lib/wsgiref/handlers.py index 6623b700537cf9..cd0916dc5553fb 100644 --- a/Lib/wsgiref/handlers.py +++ b/Lib/wsgiref/handlers.py @@ -237,9 +237,7 @@ def start_response(self, status, headers,exc_info=None): self.status = status self.headers = self.headers_class(headers) status = self._convert_string_type(status, "Status") - assert len(status)>=4,"Status must be at least 4 characters" - assert status[:3].isdigit(), "Status message must begin w/3-digit code" - assert status[3]==" ", "Status message must have a space after code" + self._validate_status(status) if __debug__: for name, val in headers: @@ -250,6 +248,14 @@ def start_response(self, status, headers,exc_info=None): return self.write + def _validate_status(self, status): + if len(status) < 4: + raise AssertionError("Status must be at least 4 characters") + if not status[:3].isdigit(): + raise AssertionError("Status message must begin w/3-digit code") + if status[3] != " ": + raise AssertionError("Status message must have a space after code") + def _convert_string_type(self, value, title): """Convert/check value type.""" if type(value) is str: diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py index ead0740a0eefec..8f834267b28c2e 100644 --- a/Lib/zipfile/__init__.py +++ b/Lib/zipfile/__init__.py @@ -549,7 +549,7 @@ def from_file(cls, filename, arcname=None, *, strict_timestamps=True): def is_dir(self): """Return True if this archive member is a directory.""" - return self.filename[-1] == '/' + return self.filename.endswith('/') # ZIP encryption uses the CRC32 one-byte primitive for scrambling some @@ -1727,6 +1727,9 @@ def _extract_member(self, member, targetpath, pwd): # filter illegal characters on Windows arcname = self._sanitize_windows_name(arcname, os.path.sep) + if not arcname: + raise ValueError("Empty filename.") + targetpath = os.path.join(targetpath, arcname) targetpath = os.path.normpath(targetpath) @@ -1816,7 +1819,7 @@ def writestr(self, zinfo_or_arcname, data, date_time=time.localtime(time.time())[:6]) zinfo.compress_type = self.compression zinfo._compresslevel = self.compresslevel - if zinfo.filename[-1] == '/': + if zinfo.filename.endswith('/'): zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x zinfo.external_attr |= 0x10 # MS-DOS directory flag else: diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 9fd87f3361fde9..b789e30946a835 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -360,9 +360,9 @@ def library_recipes(): ), ), dict( - name="SQLite 3.38.4", - url="https://sqlite.org/2022/sqlite-autoconf-3380400.tar.gz", - checksum="34c0b92a0609ed4ce78582e8dc1ed45a", + name="SQLite 3.39.4", + url="https://sqlite.org/2022/sqlite-autoconf-3390400.tar.gz", + checksum="44b7e6691b0954086f717a6c43b622a5", extra_cflags=('-Os ' '-DSQLITE_ENABLE_FTS5 ' '-DSQLITE_ENABLE_FTS4 ' diff --git a/Mac/BuildScript/resources/ReadMe.rtf b/Mac/BuildScript/resources/ReadMe.rtf index 8d699395f304cc..dbe8c520a7c78d 100644 --- a/Mac/BuildScript/resources/ReadMe.rtf +++ b/Mac/BuildScript/resources/ReadMe.rtf @@ -1,4 +1,4 @@ -{\rtf1\ansi\ansicpg1252\cocoartf2580 +{\rtf1\ansi\ansicpg1252\cocoartf2639 \cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fswiss\fcharset0 Helvetica-Oblique; \f3\fmodern\fcharset0 CourierNewPSMT;\f4\fmodern\fcharset0 Courier;} {\colortbl;\red255\green255\blue255;} @@ -11,7 +11,7 @@ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\partightenfactor0 \f1\b \cf0 NOTE: -\f0\b0 This is an alpha preview of Python 3.11.0, the next feature release of Python 3. It is not intended for production use.\ +\f0\b0 This is an alpha preview of Python 3.12.0, the next feature release of Python 3. It is not intended for production use.\ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 \cf0 \ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 diff --git a/Mac/BuildScript/resources/Welcome.rtf b/Mac/BuildScript/resources/Welcome.rtf index e6ccfcb3fce961..7819241b618d87 100644 --- a/Mac/BuildScript/resources/Welcome.rtf +++ b/Mac/BuildScript/resources/Welcome.rtf @@ -1,4 +1,4 @@ -{\rtf1\ansi\ansicpg1252\cocoartf2580 +{\rtf1\ansi\ansicpg1252\cocoartf2639 \cocoascreenfonts1\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fmodern\fcharset0 CourierNewPSMT; } {\colortbl;\red255\green255\blue255;} @@ -26,5 +26,5 @@ At the end of this install, click on \ \f1\b NOTE: -\f0\b0 This is an alpha test preview of Python 3.11.0, the next feature release of Python 3. It is not intended for production use.\ +\f0\b0 This is an alpha test preview of Python 3.12.0, the next feature release of Python 3. It is not intended for production use.\ } \ No newline at end of file diff --git a/Mac/Extras.install.py b/Mac/Extras.install.py index ab1af71dfd823f..41bfa53d616307 100644 --- a/Mac/Extras.install.py +++ b/Mac/Extras.install.py @@ -9,10 +9,9 @@ debug = 0 def isclean(name): - if name == 'CVS': return 0 - if name == '.cvsignore': return 0 - if name == '.DS_store': return 0 - if name == '.svn': return 0 + if name in ('CVS', '.cvsignore', '.svn'): + return 0 + if name.lower() == '.ds_store': return 0 if name.endswith('~'): return 0 if name.endswith('.BAK'): return 0 if name.endswith('.pyc'): return 0 diff --git a/Mac/README.rst b/Mac/README.rst index 7476639d0ff541..bc40b41f7f38ad 100644 --- a/Mac/README.rst +++ b/Mac/README.rst @@ -10,6 +10,19 @@ Python on macOS README This document provides a quick overview of some macOS specific features in the Python distribution. +Compilers for building on macOS +=============================== + +The core developers primarily test builds on macOS with Apple's compiler tools, +either Xcode or the Command Line Tools. For these we only support building with +a compiler that includes an SDK that targets the OS on the build machine, that is +the version of Xcode that shipped with the OS version or one newer. + +For example, for macOS 12 we support Xcode 13 and Xcode 14 (or the corresponding +Command Line Tools). + +Building with other compilers, such as GCC, likely works, but is not actively supported. + macOS specific arguments to configure ===================================== diff --git a/Makefile.pre.in b/Makefile.pre.in index 11118354f15def..5c49af36d86736 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -38,7 +38,6 @@ CC= @CC@ CXX= @CXX@ LINKCC= @LINKCC@ AR= @AR@ -READELF= @READELF@ SOABI= @SOABI@ LDVERSION= @LDVERSION@ LIBPYTHON= @LIBPYTHON@ @@ -285,7 +284,7 @@ BUILDPYTHON= python$(BUILDEXE) HOSTRUNNER= @HOSTRUNNER@ PYTHON_FOR_REGEN?=@PYTHON_FOR_REGEN@ -UPDATE_FILE=$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/update_file.py +UPDATE_FILE=$(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/update_file.py PYTHON_FOR_BUILD=@PYTHON_FOR_BUILD@ # Single-platform builds depend on $(BUILDPYTHON). Cross builds use an # external "build Python" and have an empty PYTHON_FOR_BUILD_DEPS. @@ -426,6 +425,7 @@ PYTHON_OBJS= \ Python/formatter_unicode.o \ Python/fileutils.o \ Python/suggestions.o \ + Python/perf_trampoline.o \ Python/$(DYNLOADFILE) \ $(LIBOBJS) \ $(MACHDEP_OBJS) \ @@ -479,7 +479,6 @@ OBJECT_OBJS= \ Objects/unicodectype.o \ Objects/unionobject.o \ Objects/weakrefobject.o \ - Objects/perf_trampoline.o \ @PERF_TRAMPOLINE_OBJ@ DEEPFREEZE_OBJS = Python/deepfreeze/deepfreeze.o @@ -705,7 +704,7 @@ coverage-report: regen-token regen-frozen .PHONY=clinic clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c $(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py --make --srcdir $(srcdir) - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_global_objects.py + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_global_objects.py # Build the interpreter $(BUILDPYTHON): Programs/python.o $(LINK_PYTHON_DEPS) @@ -907,7 +906,7 @@ sharedmods: $(SHAREDMODS) pybuilddir.txt # dependency on BUILDPYTHON ensures that the target is run last checksharedmods: sharedmods $(PYTHON_FOR_BUILD_DEPS) $(BUILDPYTHON) - @$(RUNSHARED) $(PYTHON_FOR_BUILD) $(srcdir)/Tools/scripts/check_extension_modules.py + @$(RUNSHARED) $(PYTHON_FOR_BUILD) $(srcdir)/Tools/build/check_extension_modules.py rundsymutil: sharedmods $(PYTHON_FOR_BUILD_DEPS) $(BUILDPYTHON) @if [ ! -z $(DSYMUTIL) ] ; then \ @@ -961,13 +960,13 @@ regen-test-frozenmain: $(BUILDPYTHON) .PHONY: regen-test-levenshtein regen-test-levenshtein: # Regenerate Lib/test/levenshtein_examples.json - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_levenshtein_examples.py Lib/test/levenshtein_examples.json + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_levenshtein_examples.py $(srcdir)/Lib/test/levenshtein_examples.json .PHONY: regen-re regen-re: $(BUILDPYTHON) # Regenerate Lib/re/_casefix.py - # using Tools/scripts/generate_re_casefix.py - $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/scripts/generate_re_casefix.py $(srcdir)/Lib/re/_casefix.py + # using Tools/build/generate_re_casefix.py + $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/build/generate_re_casefix.py $(srcdir)/Lib/re/_casefix.py Programs/_testembed: Programs/_testembed.o $(LINK_PYTHON_DEPS) $(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(LINK_PYTHON_OBJS) $(LIBS) $(MODLIBS) $(SYSLIBS) @@ -1013,7 +1012,7 @@ _bootstrap_python: $(LIBRARY_OBJS_OMIT_FROZEN) Programs/_bootstrap_python.o Modu # 2) deepfreeze modules with external build Python. # -# FROZEN_FILES_* are auto-generated by Tools/scripts/freeze_modules.py. +# FROZEN_FILES_* are auto-generated by Tools/build/freeze_modules.py. FROZEN_FILES_IN = \ Lib/importlib/_bootstrap.py \ Lib/importlib/_bootstrap_external.py \ @@ -1149,11 +1148,11 @@ Python/frozen_modules/frozen_only.h: Tools/freeze/flag.py $(FREEZE_MODULE_DEPS) # END: freezing modules -Tools/scripts/freeze_modules.py: $(FREEZE_MODULE) +Tools/build/freeze_modules.py: $(FREEZE_MODULE) .PHONY: regen-frozen -regen-frozen: Tools/scripts/freeze_modules.py $(FROZEN_FILES_IN) - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/freeze_modules.py +regen-frozen: Tools/build/freeze_modules.py $(FROZEN_FILES_IN) + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/freeze_modules.py @echo "The Makefile was updated, you may need to re-run make." ############################################################################ @@ -1162,11 +1161,11 @@ regen-frozen: Tools/scripts/freeze_modules.py $(FROZEN_FILES_IN) .PHONY: regen-deepfreeze regen-deepfreeze: $(DEEPFREEZE_OBJS) -DEEPFREEZE_DEPS=$(srcdir)/Tools/scripts/deepfreeze.py $(FREEZE_MODULE_DEPS) $(FROZEN_FILES_OUT) +DEEPFREEZE_DEPS=$(srcdir)/Tools/build/deepfreeze.py $(FREEZE_MODULE_DEPS) $(FROZEN_FILES_OUT) # BEGIN: deepfreeze modules Python/deepfreeze/deepfreeze.c: $(DEEPFREEZE_DEPS) - $(PYTHON_FOR_FREEZE) $(srcdir)/Tools/scripts/deepfreeze.py \ + $(PYTHON_FOR_FREEZE) $(srcdir)/Tools/build/deepfreeze.py \ Python/frozen_modules/importlib._bootstrap.h:importlib._bootstrap \ Python/frozen_modules/importlib._bootstrap_external.h:importlib._bootstrap_external \ Python/frozen_modules/zipimport.h:zipimport \ @@ -1203,8 +1202,8 @@ regen-importlib: regen-frozen # Global objects .PHONY: regen-global-objects -regen-global-objects: $(srcdir)/Tools/scripts/generate_global_objects.py - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_global_objects.py +regen-global-objects: $(srcdir)/Tools/build/generate_global_objects.py + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_global_objects.py @echo "Note: Global objects can be added or removed by other tools (e.g. deepfreeze), " @echo " so be sure to re-run regen-global-objects after those tools." @@ -1220,12 +1219,12 @@ check-abidump: all abidiff $(srcdir)/Doc/data/python$(LDVERSION).abi "libpython$(LDVERSION).so" --drop-private-types --no-architecture --no-added-syms regen-limited-abi: all - $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/scripts/stable_abi.py --generate-all $(srcdir)/Misc/stable_abi.toml + $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/build/stable_abi.py --generate-all $(srcdir)/Misc/stable_abi.toml ############################################################################ # Regenerate all generated files -regen-all: regen-opcode regen-opcode-targets regen-typeslots \ +regen-all: regen-cases regen-opcode regen-opcode-targets regen-typeslots \ regen-token regen-ast regen-keyword regen-sre regen-frozen clinic \ regen-pegen-metaparser regen-pegen regen-test-frozenmain \ regen-test-levenshtein regen-global-objects @@ -1331,8 +1330,8 @@ regen-ast: .PHONY: regen-opcode regen-opcode: # Regenerate Include/opcode.h from Lib/opcode.py - # using Tools/scripts/generate_opcode_h.py - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_opcode_h.py \ + # using Tools/build/generate_opcode_h.py + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_opcode_h.py \ $(srcdir)/Lib/opcode.py \ $(srcdir)/Include/opcode.h.new \ $(srcdir)/Include/internal/pycore_opcode.h.new @@ -1342,23 +1341,23 @@ regen-opcode: .PHONY: regen-token regen-token: # Regenerate Doc/library/token-list.inc from Grammar/Tokens - # using Tools/scripts/generate_token.py - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py rst \ + # using Tools/build/generate_token.py + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_token.py rst \ $(srcdir)/Grammar/Tokens \ $(srcdir)/Doc/library/token-list.inc # Regenerate Include/internal/pycore_token.h from Grammar/Tokens - # using Tools/scripts/generate_token.py - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py h \ + # using Tools/build/generate_token.py + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_token.py h \ $(srcdir)/Grammar/Tokens \ $(srcdir)/Include/internal/pycore_token.h # Regenerate Parser/token.c from Grammar/Tokens - # using Tools/scripts/generate_token.py - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py c \ + # using Tools/build/generate_token.py + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_token.py c \ $(srcdir)/Grammar/Tokens \ $(srcdir)/Parser/token.c # Regenerate Lib/token.py from Grammar/Tokens - # using Tools/scripts/generate_token.py - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py py \ + # using Tools/build/generate_token.py + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_token.py py \ $(srcdir)/Grammar/Tokens \ $(srcdir)/Lib/token.py @@ -1375,16 +1374,16 @@ regen-keyword: .PHONY: regen-stdlib-module-names regen-stdlib-module-names: all Programs/_testembed # Regenerate Python/stdlib_module_names.h - # using Tools/scripts/generate_stdlib_module_names.py + # using Tools/build/generate_stdlib_module_names.py $(RUNSHARED) ./$(BUILDPYTHON) \ - $(srcdir)/Tools/scripts/generate_stdlib_module_names.py \ + $(srcdir)/Tools/build/generate_stdlib_module_names.py \ > $(srcdir)/Python/stdlib_module_names.h.new $(UPDATE_FILE) $(srcdir)/Python/stdlib_module_names.h $(srcdir)/Python/stdlib_module_names.h.new regen-sre: # Regenerate Modules/_sre/sre_constants.h and Modules/_sre/sre_targets.h - # from Lib/re/_constants.py using Tools/scripts/generate_sre_constants.py - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_sre_constants.py \ + # from Lib/re/_constants.py using Tools/build/generate_sre_constants.py + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_sre_constants.py \ $(srcdir)/Lib/re/_constants.py \ $(srcdir)/Modules/_sre/sre_constants.h \ $(srcdir)/Modules/_sre/sre_targets.h @@ -1445,7 +1444,19 @@ regen-opcode-targets: $(srcdir)/Python/opcode_targets.h.new $(UPDATE_FILE) $(srcdir)/Python/opcode_targets.h $(srcdir)/Python/opcode_targets.h.new -Python/ceval.o: $(srcdir)/Python/opcode_targets.h $(srcdir)/Python/condvar.h +.PHONY: regen-cases +regen-cases: + # Regenerate Python/generated_cases.c.h from Python/bytecodes.c + # using Tools/cases_generator/generate_cases.py + PYTHONPATH=$(srcdir)/Tools/cases_generator \ + $(PYTHON_FOR_REGEN) \ + $(srcdir)/Tools/cases_generator/generate_cases.py \ + -i $(srcdir)/Python/bytecodes.c \ + -o $(srcdir)/Python/generated_cases.c.h.new + $(UPDATE_FILE) $(srcdir)/Python/generated_cases.c.h $(srcdir)/Python/generated_cases.c.h.new + +Python/ceval.o: $(srcdir)/Python/opcode_targets.h $(srcdir)/Python/condvar.h $(srcdir)/Python/generated_cases.c.h + Python/frozen.o: $(FROZEN_FILES_OUT) @@ -1574,6 +1585,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/listobject.h \ $(srcdir)/Include/cpython/longintrepr.h \ $(srcdir)/Include/cpython/longobject.h \ + $(srcdir)/Include/cpython/memoryobject.h \ $(srcdir)/Include/cpython/methodobject.h \ $(srcdir)/Include/cpython/modsupport.h \ $(srcdir)/Include/cpython/object.h \ @@ -1616,6 +1628,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_condvar.h \ $(srcdir)/Include/internal/pycore_context.h \ $(srcdir)/Include/internal/pycore_dict.h \ + $(srcdir)/Include/internal/pycore_dict_state.h \ $(srcdir)/Include/internal/pycore_descrobject.h \ $(srcdir)/Include/internal/pycore_dtoa.h \ $(srcdir)/Include/internal/pycore_exceptions.h \ @@ -1627,6 +1640,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_getopt.h \ $(srcdir)/Include/internal/pycore_gil.h \ $(srcdir)/Include/internal/pycore_global_objects.h \ + $(srcdir)/Include/internal/pycore_global_objects_fini_generated.h \ $(srcdir)/Include/internal/pycore_hamt.h \ $(srcdir)/Include/internal/pycore_hashtable.h \ $(srcdir)/Include/internal/pycore_import.h \ @@ -1638,12 +1652,15 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_moduleobject.h \ $(srcdir)/Include/internal/pycore_namespace.h \ $(srcdir)/Include/internal/pycore_object.h \ + $(srcdir)/Include/internal/pycore_obmalloc.h \ + $(srcdir)/Include/internal/pycore_obmalloc_init.h \ $(srcdir)/Include/internal/pycore_pathconfig.h \ $(srcdir)/Include/internal/pycore_pyarena.h \ $(srcdir)/Include/internal/pycore_pyerrors.h \ $(srcdir)/Include/internal/pycore_pyhash.h \ $(srcdir)/Include/internal/pycore_pylifecycle.h \ $(srcdir)/Include/internal/pycore_pymem.h \ + $(srcdir)/Include/internal/pycore_pymem_init.h \ $(srcdir)/Include/internal/pycore_pystate.h \ $(srcdir)/Include/internal/pycore_range.h \ $(srcdir)/Include/internal/pycore_runtime.h \ @@ -1662,6 +1679,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_ucnhash.h \ $(srcdir)/Include/internal/pycore_unionobject.h \ $(srcdir)/Include/internal/pycore_unicodeobject.h \ + $(srcdir)/Include/internal/pycore_unicodeobject_generated.h \ $(srcdir)/Include/internal/pycore_warnings.h \ $(DTRACE_HEADERS) \ @PLATFORM_HEADERS@ \ @@ -1936,7 +1954,6 @@ LIBSUBDIRS= asyncio \ ctypes ctypes/macholib \ curses \ dbm \ - distutils distutils/command \ email email/mime \ encodings \ ensurepip ensurepip/_bundled \ @@ -1963,8 +1980,7 @@ LIBSUBDIRS= asyncio \ xmlrpc \ zoneinfo \ __phello__ -TESTSUBDIRS= distutils/tests \ - idlelib/idle_test \ +TESTSUBDIRS= idlelib/idle_test \ test test/audiodata \ test/capath test/cjkencodings \ test/data test/decimaltestdata \ @@ -2038,9 +2054,12 @@ TESTSUBDIRS= distutils/tests \ test/test_zoneinfo test/test_zoneinfo/data \ test/test_unittest test/test_unittest/testmock \ test/tracedmodules \ + test/typinganndata \ test/xmltestdata test/xmltestdata/c14n-20 \ test/ziptestdata +COMPILEALL_OPTS=-j0 + TEST_MODULES=@TEST_MODULES@ libinstall: all $(srcdir)/Modules/xxmodule.c @for i in $(SCRIPTDIR) $(LIBDEST); \ @@ -2110,36 +2129,15 @@ libinstall: all $(srcdir)/Modules/xxmodule.c $(INSTALL_DATA) `cat pybuilddir.txt`/_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH).py \ $(DESTDIR)$(LIBDEST); \ $(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt - if test -d $(DESTDIR)$(LIBDEST)/distutils/tests; then \ - $(INSTALL_DATA) $(srcdir)/Modules/xxmodule.c \ - $(DESTDIR)$(LIBDEST)/distutils/tests ; \ - fi - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST) -f \ - -x 'bad_coding|badsyntax|site-packages|test/test_lib2to3/data' \ - $(DESTDIR)$(LIBDEST) + @ # Build PYC files for the 3 optimization levels (0, 1, 2) -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST) -f \ - -x 'bad_coding|badsyntax|site-packages|test/test_lib2to3/data' \ - $(DESTDIR)$(LIBDEST) - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST) -f \ + $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ + -o 0 -o 1 -o 2 $(COMPILEALL_OPTS) -d $(LIBDEST) -f \ -x 'bad_coding|badsyntax|site-packages|test/test_lib2to3/data' \ $(DESTDIR)$(LIBDEST) -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST)/site-packages -f \ - -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST)/site-packages -f \ - -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \ - -j0 -d $(LIBDEST)/site-packages -f \ + -o 0 -o 1 -o 2 $(COMPILEALL_OPTS) -d $(LIBDEST)/site-packages -f \ -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ $(PYTHON_FOR_BUILD) -m lib2to3.pgen2.driver $(DESTDIR)$(LIBDEST)/lib2to3/Grammar.txt @@ -2369,7 +2367,7 @@ config.status: $(srcdir)/configure .PRECIOUS: config.status $(BUILDPYTHON) Makefile Makefile.pre -Objects/asm_trampoline.o: $(srcdir)/Objects/asm_trampoline.S +Python/asm_trampoline.o: $(srcdir)/Python/asm_trampoline.S $(CC) -c $(PY_CORE_CFLAGS) -o $@ $< # Some make's put the object file in the current directory @@ -2385,7 +2383,7 @@ Python/dtoa.o: Python/dtoa.c # Run reindent on the library reindent: - ./$(BUILDPYTHON) $(srcdir)/Tools/scripts/reindent.py -r $(srcdir)/Lib + ./$(BUILDPYTHON) $(srcdir)/Tools/patchcheck/reindent.py -r $(srcdir)/Lib # Rerun configure with the same options as it was run last time, # provided the config.status script exists @@ -2437,7 +2435,7 @@ rmtestturds: -rm -f gb-18030-2000.xml docclean: - $(MAKE) -C Doc clean + $(MAKE) -C $(srcdir)/Doc clean # like the 'clean' target but retain the profile guided optimization (PGO) # data. The PGO data is only valid if source code remains unchanged. @@ -2510,7 +2508,7 @@ distclean: clobber docclean # Check that all symbols exported by libpython start with "Py" or "_Py" smelly: all - $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/scripts/smelly.py + $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/build/smelly.py # Find files with funny names funny: @@ -2545,10 +2543,10 @@ funny: # Perform some verification checks on any modified files. patchcheck: all - $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/scripts/patchcheck.py + $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/patchcheck/patchcheck.py check-limited-abi: all - $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/scripts/stable_abi.py --all $(srcdir)/Misc/stable_abi.toml + $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/build/stable_abi.py --all $(srcdir)/Misc/stable_abi.toml .PHONY: update-config update-config: @@ -2576,17 +2574,19 @@ Python/thread.o: @THREADHEADERS@ $(srcdir)/Python/condvar.h ########################################################################## # Module dependencies and platform-specific files -MODULE_DEPS=$(PYTHON_HEADERS) Modules/config.c $(EXPORTSYMS) +# force rebuild when header file or module build flavor (static/shared) is changed +MODULE_DEPS_STATIC=Modules/config.c +MODULE_DEPS_SHARED=$(MODULE_DEPS_STATIC) $(EXPORTSYMS) MODULE_CMATH_DEPS=$(srcdir)/Modules/_math.h MODULE_MATH_DEPS=$(srcdir)/Modules/_math.h -MODULE_PYEXPAT_DEPS=$(LIBEXPAT_HEADERS) @LIBEXPAT_INTERNAL@ +MODULE_PYEXPAT_DEPS=@LIBEXPAT_INTERNAL@ MODULE_UNICODEDATA_DEPS=$(srcdir)/Modules/unicodedata_db.h $(srcdir)/Modules/unicodename_db.h MODULE__BLAKE2_DEPS=$(srcdir)/Modules/_blake2/impl/blake2-config.h $(srcdir)/Modules/_blake2/impl/blake2-impl.h $(srcdir)/Modules/_blake2/impl/blake2.h $(srcdir)/Modules/_blake2/impl/blake2b-load-sse2.h $(srcdir)/Modules/_blake2/impl/blake2b-load-sse41.h $(srcdir)/Modules/_blake2/impl/blake2b-ref.c $(srcdir)/Modules/_blake2/impl/blake2b-round.h $(srcdir)/Modules/_blake2/impl/blake2b.c $(srcdir)/Modules/_blake2/impl/blake2s-load-sse2.h $(srcdir)/Modules/_blake2/impl/blake2s-load-sse41.h $(srcdir)/Modules/_blake2/impl/blake2s-load-xop.h $(srcdir)/Modules/_blake2/impl/blake2s-ref.c $(srcdir)/Modules/_blake2/impl/blake2s-round.h $(srcdir)/Modules/_blake2/impl/blake2s.c $(srcdir)/Modules/_blake2/blake2module.h $(srcdir)/Modules/hashlib.h MODULE__CTYPES_DEPS=$(srcdir)/Modules/_ctypes/ctypes.h $(srcdir)/Modules/_ctypes/darwin/dlfcn.h MODULE__CTYPES_MALLOC_CLOSURE=@MODULE__CTYPES_MALLOC_CLOSURE@ -MODULE__DECIMAL_DEPS=$(srcdir)/Modules/_decimal/docstrings.h $(LIBMPDEC_HEADERS) @LIBMPDEC_INTERNAL@ -MODULE__ELEMENTTREE_DEPS=$(srcdir)/Modules/pyexpat.c $(LIBEXPAT_HEADERS) @LIBEXPAT_INTERNAL@ +MODULE__DECIMAL_DEPS=$(srcdir)/Modules/_decimal/docstrings.h @LIBMPDEC_INTERNAL@ +MODULE__ELEMENTTREE_DEPS=$(srcdir)/Modules/pyexpat.c @LIBEXPAT_INTERNAL@ MODULE__HASHLIB_DEPS=$(srcdir)/Modules/hashlib.h MODULE__IO_DEPS=$(srcdir)/Modules/_io/_iomodule.h MODULE__MD5_DEPS=$(srcdir)/Modules/hashlib.h @@ -2596,7 +2596,7 @@ MODULE__SHA3_DEPS=$(srcdir)/Modules/_sha3/sha3.c $(srcdir)/Modules/_sha3/sha3.h MODULE__SHA512_DEPS=$(srcdir)/Modules/hashlib.h MODULE__SOCKET_DEPS=$(srcdir)/Modules/socketmodule.h $(srcdir)/Modules/addrinfo.h $(srcdir)/Modules/getaddrinfo.c $(srcdir)/Modules/getnameinfo.c MODULE__SSL_DEPS=$(srcdir)/Modules/_ssl.h $(srcdir)/Modules/_ssl/cert.c $(srcdir)/Modules/_ssl/debughelpers.c $(srcdir)/Modules/_ssl/misc.c $(srcdir)/Modules/_ssl_data.h $(srcdir)/Modules/_ssl_data_111.h $(srcdir)/Modules/_ssl_data_300.h $(srcdir)/Modules/socketmodule.h -MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/testcapi_long.h $(srcdir)/Modules/_testcapi/parts.h +MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/_testcapi/testcapi_long.h $(srcdir)/Modules/_testcapi/parts.h MODULE__SQLITE3_DEPS=$(srcdir)/Modules/_sqlite/connection.h $(srcdir)/Modules/_sqlite/cursor.h $(srcdir)/Modules/_sqlite/microprotocols.h $(srcdir)/Modules/_sqlite/module.h $(srcdir)/Modules/_sqlite/prepare_protocol.h $(srcdir)/Modules/_sqlite/row.h $(srcdir)/Modules/_sqlite/util.h # IF YOU PUT ANYTHING HERE IT WILL GO AWAY diff --git a/Misc/ACKS b/Misc/ACKS index ec5e326847b8ca..5d97067b85d3d4 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -344,6 +344,7 @@ Hervé Coatanhay Riccardo Coccioli Nick Coghlan Josh Cogliati +Noam Cohen Dave Cole Terrence Cole Benjamin Collar @@ -1439,6 +1440,7 @@ Pierre Quentel Brian Quinlan Anders Qvist Thomas Rachel +Domenico Ragusa Ram Rachum Jeffrey Rackauckas Jérôme Radix diff --git a/Misc/NEWS.d/3.12.0a1.rst b/Misc/NEWS.d/3.12.0a1.rst new file mode 100644 index 00000000000000..2a943fe481af99 --- /dev/null +++ b/Misc/NEWS.d/3.12.0a1.rst @@ -0,0 +1,6194 @@ +.. date: 2022-09-28-17-09-37 +.. gh-issue: 97616 +.. nonce: K1e3Xs +.. release date: 2022-10-25 +.. section: Security + +Fix multiplying a list by an integer (``list *= int``): detect the integer +overflow when the new allocated length is close to the maximum size. Issue +reported by Jordan Limor. Patch by Victor Stinner. + +.. + +.. date: 2022-09-07-10-42-00 +.. gh-issue: 97514 +.. nonce: Yggdsl +.. section: Security + +On Linux the :mod:`multiprocessing` module returns to using filesystem +backed unix domain sockets for communication with the *forkserver* process +instead of the Linux abstract socket namespace. Only code that chooses to +use the :ref:`"forkserver" start method ` is +affected. + +Abstract sockets have no permissions and could allow any user on the system +in the same `network namespace +`_ (often +the whole system) to inject code into the multiprocessing *forkserver* +process. This was a potential privilege escalation. Filesystem based socket +permissions restrict this to the *forkserver* process user as was the +default in Python 3.8 and earlier. + +This prevents Linux `CVE-2022-42919 +`_. + +.. + +.. date: 2022-06-15-20-09-23 +.. gh-issue: 87389 +.. nonce: QVaC3f +.. section: Security + +:mod:`http.server`: Fix an open redirection vulnerability in the HTTP server +when an URI path starts with ``//``. Vulnerability discovered, and initial +fix proposed, by Hamza Avvan. + +.. + +.. date: 2022-06-03-12-52-53 +.. gh-issue: 79096 +.. nonce: YVoxgC +.. section: Security + +LWPCookieJar and MozillaCookieJar create files with file mode 600 instead of +644 (Microsoft Windows is not affected) + +.. + +.. date: 2022-05-19-08-53-07 +.. gh-issue: 92888 +.. nonce: TLtR9W +.. section: Security + +Fix ``memoryview`` use after free when accessing the backing buffer in +certain cases. + +.. + +.. date: 2022-04-27-18-25-30 +.. gh-issue: 68966 +.. nonce: gjS8zs +.. section: Security + +The deprecated mailcap module now refuses to inject unsafe text (filenames, +MIME types, parameters) into shell commands. Instead of using such text, it +will warn and act as if a match was not found (or for test commands, as if +the test failed). + +.. + +.. date: 2022-10-19-23-48-46 +.. gh-issue: 98374 +.. nonce: eOBh8M +.. section: Core and Builtins + +Suppress ImportError for invalid query for help() command. Patch by Dong-hee +Na. + +.. + +.. date: 2022-10-19-20-53-38 +.. gh-issue: 98461 +.. nonce: iNmPDV +.. section: Core and Builtins + +Fix source location in bytecode for list, set and dict comprehensions as +well as generator expressions. + +.. + +.. date: 2022-10-19-18-03-28 +.. gh-issue: 98354 +.. nonce: GRGta3 +.. section: Core and Builtins + +Added unicode check for ``name`` attribute of ``spec`` argument passed in +:func:`_imp.create_builtin` function. + +.. + +.. date: 2022-10-18-16-17-44 +.. gh-issue: 98398 +.. nonce: x4rYK_ +.. section: Core and Builtins + +Fix source location of 'assert' bytecodes. + +.. + +.. date: 2022-10-18-14-11-32 +.. gh-issue: 98390 +.. nonce: H1sxJu +.. section: Core and Builtins + +Fix location of sub-expressions of boolean expressions, by reducing their +scope to that of the sub-expression. + +.. + +.. date: 2022-10-13-23-23-01 +.. gh-issue: 98254 +.. nonce: bC8IKt +.. section: Core and Builtins + +Modules from the standard library are now potentially suggested as part of +the error messages displayed by the interpreter when an :exc:`NameError` is +raised to the top level. Patch by Pablo Galindo + +.. + +.. date: 2022-10-06-23-13-34 +.. gh-issue: 97997 +.. nonce: JQaJKF +.. section: Core and Builtins + +Add running column offset to the tokenizer state to avoid calculating AST +column information with pointer arithmetic. + +.. + +.. date: 2022-10-06-20-41-29 +.. gh-issue: 97973 +.. nonce: gB-xWi +.. section: Core and Builtins + +Modify the tokenizer to return all necessary information the parser needs to +set location information in the AST nodes, so that the parser does not have +to calculate those doing pointer arithmetic. + +.. + +.. date: 2022-10-06-15-45-57 +.. gh-issue: 96078 +.. nonce: fS-6mU +.. section: Core and Builtins + +:func:`os.sched_yield` now release the GIL while calling sched_yield(2). +Patch by Dong-hee Na. + +.. + +.. date: 2022-10-06-14-14-28 +.. gh-issue: 97955 +.. nonce: Nq5VXD +.. section: Core and Builtins + +Migrate :mod:`zoneinfo` to Argument Clinic. + +.. + +.. date: 2022-10-06-06-36-29 +.. gh-issue: 97912 +.. nonce: jGRJpa +.. section: Core and Builtins + +The compiler now avoids quadratic behavior when finding which instructions +should use the :opcode:`LOAD_FAST_CHECK` opcode. + +.. + +.. date: 2022-10-06-02-11-34 +.. gh-issue: 97002 +.. nonce: Zvsk71 +.. section: Core and Builtins + +Fix an issue where several frame objects could be backed by the same +interpreter frame, possibly leading to corrupted memory and hard crashes of +the interpreter. + +.. + +.. date: 2022-10-05-17-02-22 +.. gh-issue: 97943 +.. nonce: LYAWlE +.. section: Core and Builtins + +Bugfix: :func:`PyFunction_GetAnnotations` should return a borrowed +reference. It was returning a new reference. + +.. + +.. date: 2022-10-05-11-37-15 +.. gh-issue: 97922 +.. nonce: Zu9Bge +.. section: Core and Builtins + +The Garbage Collector now runs only on the eval breaker mechanism of the +Python bytecode evaluation loop instead on object allocations. The GC can +also run when :c:func:`PyErr_CheckSignals` is called so C extensions that +need to run for a long time without executing any Python code also have a +chance to execute the GC periodically. + +.. + +.. date: 2022-10-05-00-37-27 +.. gh-issue: 65961 +.. nonce: z0Ys0y +.. section: Core and Builtins + +When ``__package__`` is different than ``__spec__.parent``, raise a +``DeprecationWarning`` instead of ``ImportWarning``. + +Also remove ``importlib.util.set_package()`` which was scheduled for +removal. + +.. + +.. date: 2022-10-04-17-02-18 +.. gh-issue: 97850 +.. nonce: E3QTRA +.. section: Core and Builtins + +Long deprecated, ``module_repr()`` should now be completely eradicated. + +.. + +.. date: 2022-10-04-14-04-40 +.. gh-issue: 86298 +.. nonce: QVM7G1 +.. section: Core and Builtins + +In cases where ``warnings.warn_explicit()`` consults the module's loader, an +``DeprecationWarning`` is issued when ``m.__loader__`` differs from +``m.__spec__.loader``. + +.. + +.. date: 2022-10-04-02-00-10 +.. gh-issue: 97779 +.. nonce: f3N1hI +.. section: Core and Builtins + +Ensure that all Python frame objects are backed by "complete" frames. + +.. + +.. date: 2022-10-03-16-12-39 +.. gh-issue: 91052 +.. nonce: MsYL9d +.. section: Core and Builtins + +Add API for subscribing to modification events on selected dictionaries. + +.. + +.. date: 2022-10-03-13-35-48 +.. gh-issue: 97752 +.. nonce: 0xTjJY +.. section: Core and Builtins + +Fix possible data corruption or crashes when accessing the ``f_back`` member +of newly-created generator or coroutine frames. + +.. + +.. date: 2022-10-01-08-55-09 +.. gh-issue: 97591 +.. nonce: pw6kkH +.. section: Core and Builtins + +Fixed a missing incref/decref pair in ``Exception.__setstate__()``. Patch by +Ofey Chan. + +.. + +.. date: 2022-09-30-13-26-58 +.. gh-issue: 97670 +.. nonce: n61vMR +.. section: Core and Builtins + +Remove the :func:`sys.getdxp` function and the +``Tools/scripts/analyze_dxp.py`` script. DXP stands for "dynamic execution +pairs". They were related to ``DYNAMIC_EXECUTION_PROFILE`` and ``DXPAIRS`` +macros which have been removed in Python 3.11. Python can now be built with +:option:`./configure --enable-pystats <--enable-pystats>` to gather +statistics on Python opcodes. Patch by Victor Stinner. + +.. + +.. date: 2022-09-29-15-19-29 +.. gh-issue: 94526 +.. nonce: wq5m6T +.. section: Core and Builtins + +Fix the Python path configuration used to initialized :data:`sys.path` at +Python startup. Paths are no longer encoded to UTF-8/strict to avoid +encoding errors if it contains surrogate characters (bytes paths are decoded +with the surrogateescape error handler). Patch by Victor Stinner. + +.. + +.. date: 2022-09-27-11-59-13 +.. gh-issue: 96670 +.. nonce: XrBBit +.. section: Core and Builtins + +The parser now raises :exc:`SyntaxError` when parsing source code containing +null bytes. Patch by Pablo Galindo + +.. + +.. date: 2022-09-21-16-06-37 +.. gh-issue: 96975 +.. nonce: BmE0XY +.. section: Core and Builtins + +Fix a crash occurring when :c:func:`PyEval_GetFrame` is called while the +topmost Python frame is in a partially-initialized state. + +.. + +.. date: 2022-09-21-14-38-31 +.. gh-issue: 96848 +.. nonce: WuoLzU +.. section: Core and Builtins + +Fix command line parsing: reject :option:`-X int_max_str_digits <-X>` option +with no value (invalid) when the :envvar:`PYTHONINTMAXSTRDIGITS` environment +variable is set to a valid limit. Patch by Victor Stinner. + +.. + +.. date: 2022-09-20-11-06-45 +.. gh-issue: 95921 +.. nonce: dkcRQn +.. section: Core and Builtins + +Fix overly-broad source position information for chained comparisons used as +branching conditions. + +.. + +.. date: 2022-09-19-03-35-01 +.. gh-issue: 96821 +.. nonce: izK6JA +.. section: Core and Builtins + +Fix undefined behaviour in ``audioop.c``. + +.. + +.. date: 2022-09-18-08-47-40 +.. gh-issue: 96821 +.. nonce: Co2iOq +.. section: Core and Builtins + +Fix undefined behaviour in ``_testcapimodule.c``. + +.. + +.. date: 2022-09-16-19-02-40 +.. gh-issue: 95778 +.. nonce: cJmnst +.. section: Core and Builtins + +When :exc:`ValueError` is raised if an integer is larger than the limit, +mention the :func:`sys.set_int_max_str_digits` function in the error +message. Patch by Victor Stinner. + +.. + +.. date: 2022-09-16-16-54-35 +.. gh-issue: 96387 +.. nonce: GRzewg +.. section: Core and Builtins + +At Python exit, sometimes a thread holding the GIL can wait forever for a +thread (usually a daemon thread) which requested to drop the GIL, whereas +the thread already exited. To fix the race condition, the thread which +requested the GIL drop now resets its request before exiting. Issue +discovered and analyzed by Mingliang ZHAO. Patch by Victor Stinner. + +.. + +.. date: 2022-09-16-12-36-13 +.. gh-issue: 96864 +.. nonce: PLU3i8 +.. section: Core and Builtins + +Fix a possible assertion failure, fatal error, or :exc:`SystemError` if a +line tracing event raises an exception while opcode tracing is enabled. + +.. + +.. date: 2022-09-13-21-45-07 +.. gh-issue: 95778 +.. nonce: Oll4_5 +.. section: Core and Builtins + +The ``PyLong_FromString`` function was refactored to make it more +maintainable and extensible. + +.. + +.. date: 2022-09-13-12-06-46 +.. gh-issue: 96678 +.. nonce: NqGFyb +.. section: Core and Builtins + +Fix undefined behaviour in C code of null pointer arithmetic. + +.. + +.. date: 2022-09-12-16-58-22 +.. gh-issue: 96754 +.. nonce: 0GRme5 +.. section: Core and Builtins + +Make sure that all frame objects created are created from valid interpreter +frames. Prevents the possibility of invalid frames in backtraces and signal +handlers. + +.. + +.. date: 2022-09-12-15-15-04 +.. gh-issue: 90997 +.. nonce: sZO8c9 +.. section: Core and Builtins + +Improve the performance of reading and writing inline bytecode caches on +some platforms. + +.. + +.. date: 2022-09-11-12-43-43 +.. gh-issue: 96751 +.. nonce: anRT6a +.. section: Core and Builtins + +Remove dead code from ``CALL_FUNCTION_EX`` opcode. + +.. + +.. date: 2022-09-11-00-37-50 +.. gh-issue: 90751 +.. nonce: VE8-zf +.. section: Core and Builtins + +:class:`memoryview` now supports half-floats. Patch by Dong-hee Na and +Antoine Pitrou. + +.. + +.. date: 2022-09-09-13-13-27 +.. gh-issue: 96678 +.. nonce: vMxi9F +.. section: Core and Builtins + +Fix case of undefined behavior in ceval.c + +.. + +.. date: 2022-09-08-20-58-10 +.. gh-issue: 64373 +.. nonce: AfCi36 +.. section: Core and Builtins + +Convert :mod:`_functools` to argument clinic. + +.. + +.. date: 2022-09-07-13-38-37 +.. gh-issue: 96641 +.. nonce: wky0Fc +.. section: Core and Builtins + +Do not expose ``KeyWrapper`` in :mod:`_functools`. + +.. + +.. date: 2022-09-07-12-02-11 +.. gh-issue: 96636 +.. nonce: YvN-K6 +.. section: Core and Builtins + +Ensure that tracing, ``sys.setrace()``, is turned on immediately. In +pre-release versions of 3.11, some tracing events might have been lost when +turning on tracing in a ``__del__`` method or interrupt. + +.. + +.. date: 2022-09-06-16-54-49 +.. gh-issue: 96572 +.. nonce: 8DRsaW +.. section: Core and Builtins + +Fix use after free in trace refs build mode. Patch by Kumar Aditya. + +.. + +.. date: 2022-09-06-16-22-13 +.. gh-issue: 96611 +.. nonce: 14wIX8 +.. section: Core and Builtins + +When loading a file with invalid UTF-8 inside a multi-line string, a correct +SyntaxError is emitted. + +.. + +.. date: 2022-09-06-14-26-36 +.. gh-issue: 96612 +.. nonce: P4ZbeY +.. section: Core and Builtins + +Make sure that incomplete frames do not show up in tracemalloc traces. + +.. + +.. date: 2022-09-06-11-19-03 +.. gh-issue: 90230 +.. nonce: YOtzs5 +.. section: Core and Builtins + +Fix compiler warnings and test failures when building with +``--enable-pystats``. + +.. + +.. date: 2022-09-05-19-20-44 +.. gh-issue: 96587 +.. nonce: bVxhX2 +.. section: Core and Builtins + +Correctly raise ``SyntaxError`` on exception groups (:pep:`654`) on python +versions prior to 3.11 + +.. + +.. date: 2022-09-05-16-43-44 +.. gh-issue: 96569 +.. nonce: 9lmTCC +.. section: Core and Builtins + +Remove two cases of undefined behavoir, by adding NULL checks. + +.. + +.. date: 2022-09-05-15-07-25 +.. gh-issue: 96582 +.. nonce: HEsL5s +.. section: Core and Builtins + +Fix possible ``NULL`` pointer dereference in ``_PyThread_CurrentFrames``. +Patch by Kumar Aditya. + +.. + +.. date: 2022-09-05-09-56-32 +.. gh-issue: 91079 +.. nonce: H4-DdU +.. section: Core and Builtins + +Separate Python recursion checking from C recursion checking which reduces +the chance of C stack overflow and allows the recursion limit to be +increased safely. + +.. + +.. date: 2022-09-02-16-47-52 +.. gh-issue: 93911 +.. nonce: vF-GWe +.. section: Core and Builtins + +Fix an issue that could prevent :opcode:`LOAD_ATTR` from specializing +properly when accessing properties. + +.. + +.. date: 2022-08-31-18-46-13 +.. gh-issue: 96348 +.. nonce: xzCoTP +.. section: Core and Builtins + +Emit a DeprecationWarning when :meth:`~generator.throw`, +:meth:`~coroutine.throw` or :meth:`~agen.athrow` are called with more than +one argument. + +.. + +.. date: 2022-08-29-13-06-58 +.. gh-issue: 95196 +.. nonce: eGRR4b +.. section: Core and Builtins + +Disable incorrect pickling of the C implemented classmethod descriptors. + +.. + +.. date: 2022-08-29-00-37-21 +.. gh-issue: 96364 +.. nonce: c-IVyb +.. section: Core and Builtins + +Fix text signatures of ``list.__getitem__`` and ``dict.__getitem__``. + +.. + +.. date: 2022-08-28-10-51-19 +.. gh-issue: 96352 +.. nonce: jTLD2d +.. section: Core and Builtins + +Fix :exc:`AttributeError` missing ``name`` and ``obj`` attributes in +:meth:`object.__getattribute__`. Patch by Philip Georgi. + +.. + +.. date: 2022-08-26-18-46-32 +.. gh-issue: 93554 +.. nonce: QEaCcK +.. section: Core and Builtins + +Change the jump opcodes so that all conditional jumps are forward jumps. +Backward jumps are converted by the assembler into a conditional forward +jump whose target is the fallthrough block (and with a reversed condition), +followed by an unconditional backward jump. For example: + +``POP_JUMP_IF_TRUE BACKWARD_TARGET`` becomes ``POP_JUMP_IF_FALSE NEXT_BLOCK; +JUMP BACKWARD_TARGET``. + +All the directed conditional jump opcodes were removed: +``POP_JUMP_FORWARD_IF_TRUE``, ``POP_JUMP_BACKWARD_IF_TRUE``, +``POP_JUMP_FORWARD_IF_FALSE``, ``POP_JUMP_BACKWARD_IF_FALSE``, +``POP_JUMP_FORWARD_IF_NONE``, ``POP_JUMP_BACKWARD_IF_NONE``, +``POP_JUMP_FORWARD_IF_NOT_NONE``, ``POP_JUMP_BACKWARD_IF_NOT_NONE``. + +The corresponding opcodes without direction are no longer +pseudo-instructions, and they implement the forward conditional jumps. + +.. + +.. date: 2022-08-25-10-19-34 +.. gh-issue: 96268 +.. nonce: AbYrLB +.. section: Core and Builtins + +Loading a file with invalid UTF-8 will now report the broken character at +the correct location. + +.. + +.. date: 2022-08-24-14-30-26 +.. gh-issue: 96237 +.. nonce: msif5f +.. section: Core and Builtins + +The internal field ``_PyInterpreterFrame.f_func`` is renamed to +``_PyInterpreterFrame.f_funcobj`` and may be any object. The ``f_globals`` +and ``f_builtin`` fields may hold junk values. + +It is safest to treat the ``_PyInterpreterFrame`` struct as opaque. + +.. + +.. date: 2022-08-22-21-33-28 +.. gh-issue: 96187 +.. nonce: W_6SRG +.. section: Core and Builtins + +Fixed a bug that caused ``_PyCode_GetExtra`` to return garbage for negative +indexes. Patch by Pablo Galindo + +.. + +.. date: 2022-08-20-18-36-40 +.. gh-issue: 96143 +.. nonce: nh3GFM +.. section: Core and Builtins + +Add a new ``-X perf`` Python command line option as well as +:func:`sys.activate_stack_trampoline` and +:func:`sys.deactivate_stack_trampoline` function in the :mod:`sys` module +that allows to set/unset the interpreter in a way that the Linux ``perf`` +profiler can detect Python calls. The new +:func:`sys.is_stack_trampoline_active` function allows to query the state of +the perf trampoline. Design by Pablo Galindo. Patch by Pablo Galindo and +Christian Heimes with contributions from Gregory P. Smith [Google] and Mark +Shannon. + +.. + +.. date: 2022-08-19-06-51-17 +.. gh-issue: 96071 +.. nonce: mVgPAo +.. section: Core and Builtins + +Fix a deadlock in :c:func:`PyGILState_Ensure` when allocating new thread +state. Patch by Kumar Aditya. + +.. + +.. date: 2022-08-18-13-47-59 +.. gh-issue: 96046 +.. nonce: 5Hqbka +.. section: Core and Builtins + +:c:func:`PyType_Ready` now initializes ``ht_cached_keys`` and performs +additional checks to ensure that type objects are properly configured. This +avoids crashes in 3rd party packages that don't use regular API to create +new types. + +.. + +.. date: 2022-08-15-21-08-11 +.. gh-issue: 96005 +.. nonce: 6eoc8k +.. section: Core and Builtins + +On WASI :data:`~errno.ENOTCAPABLE` is now mapped to :exc:`PermissionError`. +The :mod:`errno` modules exposes the new error number. ``getpath.py`` now +ignores :exc:`PermissionError` when it cannot open landmark files +``pybuilddir.txt`` and ``pyenv.cfg``. + +.. + +.. date: 2022-08-15-20-52-41 +.. gh-issue: 93678 +.. nonce: X7GuIJ +.. section: Core and Builtins + +Added test a harness for direct unit tests of the compiler's optimization +stage. The ``_testinternalcapi.optimize_cfg()`` function runs the optimiser +on a sequence of instructions. The ``CfgOptimizationTestCase`` class in +``test.support`` has utilities for invoking the optimizer and checking the +output. + +.. + +.. date: 2022-08-15-12-41-14 +.. gh-issue: 95245 +.. nonce: N4gOUV +.. section: Core and Builtins + +Reduces the size of a "simple" Python object from 8 to 6 words by moving the +weakreflist pointer into the pre-header directly before the object's +dict/values pointer. + +.. + +.. date: 2022-08-15-11-58-05 +.. gh-issue: 90997 +.. nonce: bWwV8Q +.. section: Core and Builtins + +Compile virtual :keyword:`try`/:keyword:`except` blocks to handle exceptions +raised during :meth:`~generator.close` or :meth:`~generator.throw` calls +through a suspended frame. + +.. + +.. date: 2022-08-14-10-04-44 +.. gh-issue: 95977 +.. nonce: gCTZb9 +.. section: Core and Builtins + +Optimized calling :meth:`~object.__get__` with vectorcall. Patch by Kumar +Aditya. + +.. + +.. date: 2022-08-12-18-13-49 +.. gh-issue: 91210 +.. nonce: AWMSLj +.. section: Core and Builtins + +Improve error message when a parameter without a default value follows one +with a default value, and show the same message, even when the +non-default/default sequence is preceded by positional-only parameters. + +.. + +.. date: 2022-08-12-13-04-25 +.. gh-issue: 95922 +.. nonce: YNCtyX +.. section: Core and Builtins + +Fixed bug where the compiler's ``eliminate_empty_basic_blocks`` function +ignores the last block of the code unit. + +.. + +.. date: 2022-08-11-11-01-56 +.. gh-issue: 95818 +.. nonce: iClLdl +.. section: Core and Builtins + +Skip over incomplete frames in :c:func:`PyThreadState_GetFrame`. + +.. + +.. date: 2022-08-11-09-19-55 +.. gh-issue: 95876 +.. nonce: YpQfoV +.. section: Core and Builtins + +Fix format string in ``_PyPegen_raise_error_known_location`` that can lead +to memory corruption on some 64bit systems. The function was building a +tuple with ``i`` (int) instead of ``n`` (Py_ssize_t) for Py_ssize_t +arguments. + +.. + +.. date: 2022-08-04-18-46-54 +.. gh-issue: 95605 +.. nonce: FbpCoG +.. section: Core and Builtins + +Fix misleading contents of error message when converting an all-whitespace +string to :class:`float`. + +.. + +.. date: 2022-07-31-13-23-12 +.. gh-issue: 95150 +.. nonce: 67FXVo +.. section: Core and Builtins + +Update code object hashing and equality to consider all debugging and +exception handling tables. This fixes an issue where certain non-identical +code objects could be "deduplicated" during compilation. + +.. + +.. date: 2022-07-31-03-22-58 +.. gh-issue: 91146 +.. nonce: Y2Hziy +.. section: Core and Builtins + +Reduce allocation size of :class:`list` from :meth:`str.split` and +:meth:`str.rsplit`. Patch by Dong-hee Na and Inada Naoki. + +.. + +.. date: 2022-07-28-19-07-06 +.. gh-issue: 87092 +.. nonce: 73IPS1 +.. section: Core and Builtins + +Create a 'jump target label' abstraction in the compiler so that the +compiler's codegen stage does not work directly with basic blocks. This +prepares the code for changes to the underlying CFG generation mechanism. + +.. + +.. date: 2022-07-28-08-33-31 +.. gh-issue: 95355 +.. nonce: yN4XVk +.. section: Core and Builtins + +``_PyPegen_Parser_New`` now properly detects token memory allocation errors. +Patch by Honglin Zhu. + +.. + +.. date: 2022-07-27-14-21-57 +.. gh-issue: 90081 +.. nonce: HVAS5x +.. section: Core and Builtins + +Run Python code in tracer/profiler function at full speed. Fixes slowdown in +earlier versions of 3.11. + +.. + +.. date: 2022-07-27-14-05-07 +.. gh-issue: 95324 +.. nonce: 28Q5u7 +.. section: Core and Builtins + +Emit a warning in debug mode if an object does not call +:c:func:`PyObject_GC_UnTrack` before deallocation. Patch by Pablo Galindo. + +.. + +.. date: 2022-07-26-12-59-03 +.. gh-issue: 95245 +.. nonce: GHWczn +.. section: Core and Builtins + +Merge managed dict and values pointer into a single tagged pointer to save +one word in the pre-header. + +.. + +.. date: 2022-07-26-09-31-12 +.. gh-issue: 93678 +.. nonce: W8vvgT +.. section: Core and Builtins + +Add cfg_builder struct and refactor the relevant code so that a cfg can be +constructed without an instance of the compiler struct. + +.. + +.. date: 2022-07-24-00-27-47 +.. gh-issue: 95185 +.. nonce: ghYTZx +.. section: Core and Builtins + +Prevented crashes in the AST constructor when compiling some absurdly long +expressions like ``"+0"*1000000``. :exc:`RecursionError` is now raised +instead. Patch by Pablo Galindo + +.. + +.. date: 2022-07-23-19-16-25 +.. gh-issue: 93351 +.. nonce: 0Jyvu- +.. section: Core and Builtins + +:class:`ast.AST` node positions are now validated when provided to +:func:`compile` and other related functions. If invalid positions are +detected, a :exc:`ValueError` will be raised. + +.. + +.. date: 2022-07-22-12-53-34 +.. gh-issue: 94438 +.. nonce: hNqACc +.. section: Core and Builtins + +Fix an issue that caused extended opcode arguments and some conditional pops +to be ignored when calculating valid jump targets for assignments to the +``f_lineno`` attribute of frame objects. In some cases, this could cause +inconsistent internal state, resulting in a hard crash of the interpreter. + +.. + +.. date: 2022-07-21-19-19-20 +.. gh-issue: 95060 +.. nonce: 4xdT1f +.. section: Core and Builtins + +Undocumented ``PyCode_Addr2Location`` function now properly returns when +``addrq`` argument is less than zero. + +.. + +.. date: 2022-07-21-17-54-52 +.. gh-issue: 95113 +.. nonce: NnSLpT +.. section: Core and Builtins + +Replace all ``EXTENDED_ARG_QUICK`` instructions with basic +:opcode:`EXTENDED_ARG` instructions in unquickened code. Consumers of +non-adaptive bytecode should be able to handle extended arguments the same +way they were handled in CPython 3.10 and older. + +.. + +.. date: 2022-07-20-13-46-01 +.. gh-issue: 91409 +.. nonce: dhL8Zo +.. section: Core and Builtins + +Fix incorrect source location info caused by certain optimizations in the +bytecode compiler. + +.. + +.. date: 2022-07-20-09-04-55 +.. gh-issue: 95023 +.. nonce: bs-xd7 +.. section: Core and Builtins + +Implement :func:`os.setns` and :func:`os.unshare` for Linux. Patch by Noam +Cohen. + +.. + +.. date: 2022-07-19-16-30-59 +.. gh-issue: 94036 +.. nonce: _6Utkm +.. section: Core and Builtins + +Fix incorrect source location info for some multi-line attribute accesses +and method calls. + +.. + +.. date: 2022-07-19-09-41-55 +.. gh-issue: 94938 +.. nonce: xYBlM7 +.. section: Core and Builtins + +Fix error detection in some builtin functions when keyword argument name is +an instance of a str subclass with overloaded ``__eq__`` and ``__hash__``. +Previously it could cause SystemError or other undesired behavior. + +.. + +.. date: 2022-07-19-04-34-56 +.. gh-issue: 94996 +.. nonce: dV564A +.. section: Core and Builtins + +:func:`ast.parse` will no longer parse function definitions with +positional-only params when passed ``feature_version`` less than ``(3, 8)``. +Patch by Shantanu Jain. + +.. + +.. date: 2022-07-18-14-19-21 +.. gh-issue: 94739 +.. nonce: NQJQi7 +.. section: Core and Builtins + +Allow jumping within, out of, and across exception handlers in the debugger. + +.. + +.. date: 2022-07-18-05-10-29 +.. gh-issue: 94949 +.. nonce: OsZ7_s +.. section: Core and Builtins + +:func:`ast.parse` will no longer parse parenthesized context managers when +passed ``feature_version`` less than ``(3, 9)``. Patch by Shantanu Jain. + +.. + +.. date: 2022-07-18-04-48-34 +.. gh-issue: 94947 +.. nonce: df9gUw +.. section: Core and Builtins + +:func:`ast.parse` will no longer parse assignment expressions when passed +``feature_version`` less than ``(3, 8)``. Patch by Shantanu Jain. + +.. + +.. date: 2022-07-17-15-54-29 +.. gh-issue: 91256 +.. nonce: z7i7Q5 +.. section: Core and Builtins + +Ensures the program name is known for help text during interpreter startup. + +.. + +.. date: 2022-07-16-08-14-17 +.. gh-issue: 94869 +.. nonce: eRwMsX +.. section: Core and Builtins + +Fix the column offsets for some expressions in multi-line f-strings +:mod:`ast` nodes. Patch by Pablo Galindo. + +.. + +.. date: 2022-07-15-22-47-44 +.. gh-issue: 94893 +.. nonce: YiJYcW +.. section: Core and Builtins + +Fix an issue where frame object manipulations could corrupt inline bytecode +caches. + +.. + +.. date: 2022-07-15-22-16-08 +.. gh-issue: 94822 +.. nonce: zRRzBN +.. section: Core and Builtins + +Fix an issue where lookups of metaclass descriptors may be ignored when an +identically-named attribute also exists on the class itself. + +.. + +.. date: 2022-07-15-16-15-04 +.. gh-issue: 91153 +.. nonce: HiBmtt +.. section: Core and Builtins + +Fix an issue where a :class:`bytearray` item assignment could crash if it's +resized by the new value's :meth:`__index__` method. + +.. + +.. date: 2022-07-14-10-07-53 +.. gh-issue: 90699 +.. nonce: x3aG9m +.. section: Core and Builtins + +Fix reference counting bug in :meth:`bool.__repr__`. Patch by Kumar Aditya. + +.. + +.. date: 2022-07-08-16-44-11 +.. gh-issue: 94694 +.. nonce: VkL2CM +.. section: Core and Builtins + +Fix an issue that could cause code with multi-line method lookups to have +misleading or incorrect column offset information. In some cases (when +compiling a hand-built AST) this could have resulted in a hard crash of the +interpreter. + +.. + +.. date: 2022-07-08-11-44-45 +.. gh-issue: 93252 +.. nonce: i2358c +.. section: Core and Builtins + +Fix an issue that caused internal frames to outlive failed Python function +calls, possibly resulting in memory leaks or hard interpreter crashes. + +.. + +.. date: 2022-07-07-21-13-25 +.. gh-issue: 94215 +.. nonce: _Sv9Ms +.. section: Core and Builtins + +Fix an issue where exceptions raised by line-tracing events would cause +frames to be left in an invalid state, possibly resulting in a hard crash of +the interpreter. + +.. + +.. date: 2022-07-06-14-02-26 +.. gh-issue: 92228 +.. nonce: 44Cbly +.. section: Core and Builtins + +Disable the compiler's inline-small-exit-blocks optimization for exit blocks +that are associated with source code lines. This fixes a bug where the +debugger cannot tell where an exception handler ends and the following code +block begins. + +.. + +.. date: 2022-07-01-20-00-19 +.. gh-issue: 94485 +.. nonce: mo5st7 +.. section: Core and Builtins + +Line number of a module's ``RESUME`` instruction is set to 0 as specified in +:pep:`626`. + +.. + +.. date: 2022-06-30-15-07-26 +.. gh-issue: 94438 +.. nonce: btzHSk +.. section: Core and Builtins + +Account for instructions that can push NULL to the stack when setting line +number in a frame. Prevents some (unlikely) crashes. + +.. + +.. date: 2022-06-29-22-18-36 +.. gh-issue: 91719 +.. nonce: 3APYYI +.. section: Core and Builtins + +Reload ``opcode`` when raising ``unknown opcode error`` in the interpreter +main loop, for C compilers to generate dispatching code independently. + +.. + +.. date: 2022-06-29-15-45-04 +.. gh-issue: 94329 +.. nonce: olUQyk +.. section: Core and Builtins + +Compile and run code with unpacking of extremely large sequences (1000s of +elements). Such code failed to compile. It now compiles and runs correctly. + +.. + +.. date: 2022-06-28-14-20-36 +.. gh-issue: 94360 +.. nonce: DiEnen +.. section: Core and Builtins + +Fixed a tokenizer crash when reading encoded files with syntax errors from +``stdin`` with non utf-8 encoded text. Patch by Pablo Galindo + +.. + +.. date: 2022-06-28-12-41-17 +.. gh-issue: 88116 +.. nonce: A7fEl_ +.. section: Core and Builtins + +Fix an issue when reading line numbers from code objects if the encoded line +numbers are close to ``INT_MIN``. Patch by Pablo Galindo + +.. + +.. date: 2022-06-28-10-08-06 +.. gh-issue: 94262 +.. nonce: m-HWUZ +.. section: Core and Builtins + +Don't create frame objects for incomplete frames. Prevents the creation of +generators and closures from being observable to Python and C extensions, +restoring the behavior of 3.10 and earlier. + +.. + +.. date: 2022-06-26-14-37-03 +.. gh-issue: 94192 +.. nonce: ab7tn7 +.. section: Core and Builtins + +Fix error for dictionary literals with invalid expression as value. + +.. + +.. date: 2022-06-25-10-19-43 +.. gh-issue: 87995 +.. nonce: aMDHnp +.. section: Core and Builtins + +:class:`types.MappingProxyType` instances are now hashable if the underlying +mapping is hashable. + +.. + +.. date: 2022-06-24-14-06-20 +.. gh-issue: 93883 +.. nonce: 8jVQQ4 +.. section: Core and Builtins + +Revise the display strategy of traceback enhanced error locations. The +indicators are only shown when the location doesn't span the whole line. + +.. + +.. date: 2022-06-23-12-10-39 +.. gh-issue: 94163 +.. nonce: SqAfQq +.. section: Core and Builtins + +Add :opcode:`BINARY_SLICE` and :opcode:`STORE_SLICE` instructions for more +efficient handling and better specialization of slicing operations, where +the slice is explicit in the source code. + +.. + +.. date: 2022-06-20-13-48-57 +.. gh-issue: 94021 +.. nonce: o78q3G +.. section: Core and Builtins + +Fix unreachable code warning in ``Python/specialize.c``. + +.. + +.. date: 2022-06-18-17-00-33 +.. gh-issue: 93911 +.. nonce: y286of +.. section: Core and Builtins + +Specialize ``LOAD_ATTR`` for objects with custom ``__getattribute__``. + +.. + +.. date: 2022-06-17-16-30-24 +.. gh-issue: 93955 +.. nonce: LmiAe9 +.. section: Core and Builtins + +Improve performance of attribute lookups on objects with custom +``__getattribute__`` and ``__getattr__``. Patch by Ken Jin. + +.. + +.. date: 2022-06-16-16-53-22 +.. gh-issue: 93911 +.. nonce: RDwIiK +.. section: Core and Builtins + +Specialize ``LOAD_ATTR`` for ``property()`` attributes. + +.. + +.. date: 2022-06-15-16-45-53 +.. gh-issue: 93678 +.. nonce: 1I_ZT3 +.. section: Core and Builtins + +Refactor compiler optimisation code so that it no longer needs the ``struct +assembler`` and ``struct compiler`` passed around. Instead, each function +takes the CFG and other data that it actually needs. This will make it +possible to test this code directly. + +.. + +.. date: 2022-06-15-11-16-13 +.. gh-issue: 93841 +.. nonce: 06zqX3 +.. section: Core and Builtins + +When built with ``-enable-pystats``, ``sys._stats_on()``, +``sys._stats_off()``, ``sys._stats_clear()`` and ``sys._stats_dump()`` +functions have been added to enable gathering stats for parts of programs. + +.. + +.. date: 2022-06-13-13-55-34 +.. gh-issue: 93516 +.. nonce: HILrDl +.. section: Core and Builtins + +Store offset of first traceable instruction in code object to avoid having +to recompute it for each instruction when tracing. + +.. + +.. date: 2022-06-13-10-48-09 +.. gh-issue: 93516 +.. nonce: yJSait +.. section: Core and Builtins + +Lazily create a table mapping bytecode offsets to line numbers to speed up +calculation of line numbers when tracing. + +.. + +.. date: 2022-06-12-19-31-56 +.. gh-issue: 89828 +.. nonce: bq02M7 +.. section: Core and Builtins + +:class:`types.GenericAlias` no longer relays the ``__class__`` attribute. +For example, ``isinstance(list[int], type)`` no longer returns ``True``. + +.. + +.. date: 2022-06-10-16-57-35 +.. gh-issue: 93678 +.. nonce: 1WBnHt +.. section: Core and Builtins + +Refactor the compiler to reduce boilerplate and repetition. + +.. + +.. date: 2022-06-10-12-03-17 +.. gh-issue: 93671 +.. nonce: idkQqG +.. section: Core and Builtins + +Fix some exponential backtrace case happening with deeply nested sequence +patterns in match statements. Patch by Pablo Galindo + +.. + +.. date: 2022-06-10-10-31-18 +.. gh-issue: 93662 +.. nonce: -7RSC1 +.. section: Core and Builtins + +Make sure that the end column offsets are correct in multi-line method +calls. Previously, the end column could precede the column offset. + +.. + +.. date: 2022-06-09-19-19-02 +.. gh-issue: 93461 +.. nonce: 5DqP1e +.. section: Core and Builtins + +:func:`importlib.invalidate_caches` now drops entries from +:data:`sys.path_importer_cache` with a relative path as name. This solves a +caching issue when a process changes its current working directory. + +``FileFinder`` no longer inserts a dot in the path, e.g. ``/egg/./spam`` is +now ``/egg/spam``. + +.. + +.. date: 2022-06-09-09-08-29 +.. gh-issue: 93621 +.. nonce: -_Pn1d +.. section: Core and Builtins + +Change order of bytecode instructions emitted for :keyword:`with` and +:keyword:`async with` to reduce the number of entries in the exception +table. + +.. + +.. date: 2022-06-06-14-28-24 +.. gh-issue: 93533 +.. nonce: lnC0CC +.. section: Core and Builtins + +Reduce the size of the inline cache for ``LOAD_METHOD`` by 2 bytes. + +.. + +.. date: 2022-06-02-23-00-08 +.. gh-issue: 93444 +.. nonce: m63DIs +.. section: Core and Builtins + +Removed redundant fields from the compiler's basicblock struct: +``b_nofallthrough``, ``b_exit``, ``b_return``. They can be easily calculated +from the opcode of the last instruction of the block. + +.. + +.. date: 2022-06-02-08-28-55 +.. gh-issue: 93429 +.. nonce: DZTWHx +.. section: Core and Builtins + +``LOAD_METHOD`` instruction has been removed. It was merged back into +``LOAD_ATTR``. + +.. + +.. date: 2022-06-01-17-47-40 +.. gh-issue: 93418 +.. nonce: 24dJuc +.. section: Core and Builtins + +Fixed an assert where an f-string has an equal sign '=' following an +expression, but there's no trailing brace. For example, f"{i=". + +.. + +.. date: 2022-05-31-16-36-30 +.. gh-issue: 93382 +.. nonce: Jf6gAj +.. section: Core and Builtins + +Cache the result of :c:func:`PyCode_GetCode` function to restore the O(1) +lookup of the :attr:`~types.CodeType.co_code` attribute. + +.. + +.. date: 2022-05-30-19-00-38 +.. gh-issue: 93359 +.. nonce: zXV3A0 +.. section: Core and Builtins + +Ensure that custom :mod:`ast` nodes without explicit end positions can be +compiled. Patch by Pablo Galindo. + +.. + +.. date: 2022-05-30-15-51-11 +.. gh-issue: 93356 +.. nonce: l5wnzW +.. section: Core and Builtins + +Code for exception handlers is emitted at the end of the code unit's +bytecode. This avoids one jump when no exception is raised. + +.. + +.. date: 2022-05-30-15-35-42 +.. gh-issue: 93354 +.. nonce: RZk8gs +.. section: Core and Builtins + +Use exponential backoff for specialization counters in the interpreter. Can +reduce the number of failed specializations significantly and avoid slowdown +for those parts of a program that are not suitable for specialization. + +.. + +.. date: 2022-05-30-14-50-03 +.. gh-issue: 93283 +.. nonce: XDO2ZQ +.. section: Core and Builtins + +Improve error message for invalid syntax of conversion character in f-string +expressions. + +.. + +.. date: 2022-05-30-10-22-46 +.. gh-issue: 93345 +.. nonce: gi1A4L +.. section: Core and Builtins + +Fix a crash in substitution of a ``TypeVar`` in nested generic alias after +``TypeVarTuple``. + +.. + +.. date: 2022-05-25-21-56-25 +.. gh-issue: 93223 +.. nonce: gTOGVZ +.. section: Core and Builtins + +When a bytecode instruction jumps to an unconditional jump instruction, the +first instruction can often be optimized to target the unconditional jump's +target directly. For tracing reasons, this would previously only occur if +both instructions have the same line number. This also now occurs if the +unconditional jump is artificial, i.e., if it has no associated line number. + +.. + +.. date: 2022-05-25-12-30-12 +.. gh-issue: 84694 +.. nonce: 5sjy2w +.. section: Core and Builtins + +The ``--experimental-isolated-subinterpreters`` configure option and +``EXPERIMENTAL_ISOLATED_SUBINTERPRETERS`` macro have been removed. + +.. + +.. date: 2022-05-25-04-07-22 +.. gh-issue: 91924 +.. nonce: -UyO4q +.. section: Core and Builtins + +Fix ``__lltrace__`` debug feature if the stdout encoding is not UTF-8. Patch +by Victor Stinner. + +.. + +.. date: 2022-05-24-14-35-48 +.. gh-issue: 93040 +.. nonce: 9X6Ofu +.. section: Core and Builtins + +Wraps unused parameters in ``Objects/obmalloc.c`` with ``Py_UNUSED``. + +.. + +.. date: 2022-05-23-18-36-07 +.. gh-issue: 93143 +.. nonce: X1Yqxm +.. section: Core and Builtins + +Avoid ``NULL`` checks for uninitialized local variables by determining at +compile time which variables must be initialized. + +.. + +.. date: 2022-05-22-02-37-50 +.. gh-issue: 93061 +.. nonce: r70Imp +.. section: Core and Builtins + +Backward jumps after ``async for`` loops are no longer given dubious line +numbers. + +.. + +.. date: 2022-05-21-23-21-37 +.. gh-issue: 93065 +.. nonce: 5I18WC +.. section: Core and Builtins + +Fix contextvars HAMT implementation to handle iteration over deep trees. + +The bug was discovered and fixed by Eli Libman. See +`MagicStack/immutables#84 +`_ for more details. + +.. + +.. date: 2022-05-20-13-32-24 +.. gh-issue: 93012 +.. nonce: e9B-pv +.. section: Core and Builtins + +Added the new function :c:func:`PyType_FromMetaclass`, which generalizes the +existing :c:func:`PyType_FromModuleAndSpec` using an additional metaclass +argument. This is useful for language binding tools, where it can be used to +intercept type-related operations like subclassing or static attribute +access by specifying a metaclass with custom slots. + +Importantly, :c:func:`PyType_FromMetaclass` is available in the Limited API, +which provides a path towards migrating more binding tools onto the Stable +ABI. + +.. + +.. date: 2022-05-20-09-25-34 +.. gh-issue: 93021 +.. nonce: k3Aji2 +.. section: Core and Builtins + +Fix the :attr:`__text_signature__` for :meth:`__get__` methods implemented +in C. Patch by Jelle Zijlstra. + +.. + +.. date: 2022-05-19-15-29-53 +.. gh-issue: 89914 +.. nonce: 8bAffH +.. section: Core and Builtins + +The operand of the ``YIELD_VALUE`` instruction is set to the stack depth. +This is done to help frame handling on ``yield`` and may assist debuggers. + +.. + +.. date: 2022-05-19-13-25-50 +.. gh-issue: 92955 +.. nonce: kmNV33 +.. section: Core and Builtins + +Fix memory leak in code object's lines and positions iterators as they were +not finalized at exit. Patch by Kumar Aditya. + +.. + +.. date: 2022-05-18-18-34-45 +.. gh-issue: 92930 +.. nonce: kpYPOb +.. section: Core and Builtins + +Fixed a crash in ``_pickle.c`` from mutating collections during +``__reduce__`` or ``persistent_id``. + +.. + +.. date: 2022-05-18-12-55-35 +.. gh-issue: 90690 +.. nonce: TKuoTa +.. section: Core and Builtins + +The PRECALL instruction has been removed. It offered only a small advantage +for specialization and is not needed in the vast majority of cases. + +.. + +.. date: 2022-05-18-08-32-33 +.. gh-issue: 92914 +.. nonce: tJUeTD +.. section: Core and Builtins + +Always round the allocated size for lists up to the nearest even number. + +.. + +.. date: 2022-05-17-20-41-43 +.. gh-issue: 92858 +.. nonce: eIXJTn +.. section: Core and Builtins + +Improve error message for some suites with syntax error before ':' + +.. + +.. date: 2022-05-15-15-25-05 +.. gh-issue: 90473 +.. nonce: MoPHYW +.. section: Core and Builtins + +Decrease default recursion limit on WASI to address limited call stack size. + +.. + +.. date: 2022-05-14-13-22-11 +.. gh-issue: 92804 +.. nonce: rAqpI2 +.. section: Core and Builtins + +Fix memory leak in ``memoryview`` iterator as it was not finalized at exit. +Patch by Kumar Aditya. + +.. + +.. date: 2022-05-13-12-36-10 +.. gh-issue: 92777 +.. nonce: Odo4vP +.. section: Core and Builtins + +Specialize ``LOAD_METHOD`` for objects with lazy dictionaries. Patch by Ken +Jin. + +.. + +.. date: 2022-05-13-00-57-18 +.. gh-issue: 92658 +.. nonce: YdhFE2 +.. section: Core and Builtins + +Add support for connecting and binding to Hyper-V sockets on Windows Hyper-V +hosts and guests. + +.. + +.. date: 2022-05-12-13-23-19 +.. gh-issue: 92236 +.. nonce: sDRzUe +.. section: Core and Builtins + +Remove spurious "LINE" event when starting a generator or coroutine, visible +tracing functions implemented in C. + +.. + +.. date: 2022-05-11-09-16-54 +.. gh-issue: 91102 +.. nonce: lenv9h +.. section: Core and Builtins + +:meth:`_warnings.warn_explicit` is ported to Argument Clinic. + +.. + +.. date: 2022-05-10-11-34-35 +.. gh-issue: 92619 +.. nonce: u0V0lY +.. section: Core and Builtins + +Make the compiler duplicate an exit block only if none of its instructions +have a lineno (previously only the first instruction in the block was +checked, leading to unnecessarily duplicated blocks). + +.. + +.. date: 2022-05-08-19-43-31 +.. gh-issue: 88750 +.. nonce: 1BjJg- +.. section: Core and Builtins + +The deprecated debug build only ``PYTHONTHREADDEBUG`` environment variable +no longer does anything. + +.. + +.. date: 2022-05-03-20-12-18 +.. gh-issue: 92261 +.. nonce: aigLnb +.. section: Core and Builtins + +Fix hang when trying to iterate over a ``typing.Union``. + +.. + +.. date: 2022-04-24-02-22-10 +.. gh-issue: 91432 +.. nonce: YPJAK6 +.. section: Core and Builtins + +Specialized the :opcode:`FOR_ITER` opcode using the PEP 659 machinery + +.. + +.. date: 2022-04-16-15-37-55 +.. gh-issue: 91399 +.. nonce: trLbK6 +.. section: Core and Builtins + +Removed duplicate '{0, 0, 0, 0, 0, 0}' entry in 'Objects/unicodetype_db.h'. + +.. + +.. date: 2022-04-15-22-12-53 +.. gh-issue: 91578 +.. nonce: rDOtyK +.. section: Core and Builtins + +Updates the error message for abstract class. + +.. + +.. bpo: 47091 +.. date: 2022-03-22-13-12-27 +.. nonce: tJcy-P +.. section: Core and Builtins + +Improve performance of repetition of :class:`list` and :class:`tuple` by +using ``memcpy`` to copy data and performing the reference increments in one +step. + +.. + +.. bpo: 46142 +.. date: 2022-01-02-14-53-59 +.. nonce: WayjgT +.. section: Core and Builtins + +Make ``--help`` output shorter by moving some info to the new ``--help-env`` +and ``--help-xoptions`` command-line options. Also add ``--help-all`` option +to print complete usage. + +.. + +.. bpo: 42316 +.. date: 2020-11-15-02-08-43 +.. nonce: LqdkWK +.. section: Core and Builtins + +Document some places where an assignment expression needs parentheses. + +.. + +.. date: 2022-10-23-18-30-39 +.. gh-issue: 89237 +.. nonce: kBui30 +.. section: Library + +Fix hang on Windows in ``subprocess.wait_closed()`` in :mod:`asyncio` with +:class:`~asyncio.ProactorEventLoop`. Patch by Kumar Aditya. + +.. + +.. date: 2022-10-19-09-29-12 +.. gh-issue: 97928 +.. nonce: xj3im7 +.. section: Library + +:meth:`tkinter.Text.count` raises now an exception for options starting with +"-" instead of silently ignoring them. + +.. + +.. date: 2022-10-18-15-41-37 +.. gh-issue: 98393 +.. nonce: vhPu4L +.. section: Library + +The :mod:`os` module no longer accepts bytes-like paths, like +:class:`bytearray` and :class:`memoryview` types: only the exact +:class:`bytes` type is accepted for bytes strings. Patch by Victor Stinner. + +.. + +.. date: 2022-10-17-12-49-02 +.. gh-issue: 98363 +.. nonce: aFmSP- +.. section: Library + +Added itertools.batched() to batch data into lists of a given length with +the last list possibly being shorter than the others. + +.. + +.. date: 2022-10-16-15-31-50 +.. gh-issue: 98331 +.. nonce: Y5kPOX +.. section: Library + +Update the bundled copies of pip and setuptools to versions 22.3 and 65.5.0 +respectively. + +.. + +.. date: 2022-10-16-06-18-59 +.. gh-issue: 98307 +.. nonce: b2_CDu +.. section: Library + +A :meth:`~logging.handlers.SysLogHandler.createSocket` method was added to +:class:`~logging.handlers.SysLogHandler`. + +.. + +.. date: 2022-10-14-19-57-37 +.. gh-issue: 96035 +.. nonce: 0xcX-p +.. section: Library + +Fix bug in :func:`urllib.parse.urlparse` that causes certain port numbers +containing whitespace, underscores, plus and minus signs, or non-ASCII +digits to be incorrectly accepted. + +.. + +.. date: 2022-10-14-12-29-05 +.. gh-issue: 98257 +.. nonce: aMSMs2 +.. section: Library + +Make :func:`sys.setprofile` and :func:`sys.settrace` functions reentrant. +They can no long fail with: ``RuntimeError("Cannot install a trace function +while another trace function is being installed")``. Patch by Victor +Stinner. + +.. + +.. date: 2022-10-14-11-46-31 +.. gh-issue: 98251 +.. nonce: Uxc9al +.. section: Library + +Allow :mod:`venv` to pass along :envvar:`PYTHON*` variables to ``ensurepip`` +and ``pip`` when they do not impact path resolution + +.. + +.. date: 2022-10-12-11-20-54 +.. gh-issue: 94597 +.. nonce: GYJZlb +.. section: Library + +Deprecated :meth:`asyncio.AbstractEventLoopPolicy.get_child_watcher` and +:meth:`asyncio.AbstractEventLoopPolicy.set_child_watcher` methods to be +removed in Python 3.14. Patch by Kumar Aditya. + +.. + +.. date: 2022-10-12-10-00-40 +.. gh-issue: 98178 +.. nonce: hspH51 +.. section: Library + +On macOS, fix a crash in :func:`syslog.syslog` in multi-threaded +applications. On macOS, the libc ``syslog()`` function is not thread-safe, +so :func:`syslog.syslog` no longer releases the GIL to call it. Patch by +Victor Stinner. + +.. + +.. date: 2022-10-10-09-52-21 +.. gh-issue: 44098 +.. nonce: okcqJt +.. section: Library + +Release the GIL when creating :class:`mmap.mmap` objects on Unix. + +.. + +.. date: 2022-10-09-12-12-38 +.. gh-issue: 87730 +.. nonce: ClgP3f +.. section: Library + +Wrap network errors consistently in urllib FTP support, so the test suite +doesn't fail when a network is available but the public internet is not +reachable. + +.. + +.. date: 2022-10-08-06-59-46 +.. gh-issue: 94597 +.. nonce: TsS0oT +.. section: Library + +The child watcher classes :class:`~asyncio.MultiLoopChildWatcher`, +:class:`~asyncio.FastChildWatcher` and :class:`~asyncio.SafeChildWatcher` +are deprecated and will be removed in Python 3.14. Patch by Kumar Aditya. + +.. + +.. date: 2022-10-07-09-52-37 +.. gh-issue: 98023 +.. nonce: aliEcl +.. section: Library + +Change default child watcher to :class:`~asyncio.PidfdChildWatcher` on Linux +systems which supports it. Patch by Kumar Aditya. + +.. + +.. date: 2022-10-06-23-42-00 +.. gh-issue: 90985 +.. nonce: s280JY +.. section: Library + +Earlier in 3.11 we deprecated ``asyncio.Task.cancel("message")``. We +realized we were too harsh, and have undeprecated it. + +.. + +.. date: 2022-10-06-17-59-22 +.. gh-issue: 65961 +.. nonce: SXlQnI +.. section: Library + +Do not rely solely on ``__cached__`` on modules; code will also support +``__spec__.cached``. + +.. + +.. date: 2022-10-05-20-52-17 +.. gh-issue: 97646 +.. nonce: Q4fVww +.. section: Library + +Replace deprecated ``application/javascript`` with ``text/javascript`` in +:mod:`mimetypes`. See :rfc:`9239`. Patch by Noam Cohen. + +.. + +.. date: 2022-10-05-16-10-24 +.. gh-issue: 97930 +.. nonce: NPSrzE +.. section: Library + +Apply changes from importlib_resources 5.8 and 5.9: ``Traversable.joinpath`` +provides a concrete implementation. ``as_file`` now supports directories of +resources. + +.. + +.. date: 2022-10-05-11-40-02 +.. gh-issue: 97850 +.. nonce: NzdREm +.. section: Library + +Remove deprecated :func:`importlib.utils.set_loader` and +:func:`importlib.utils.module_for_loader` from :mod:`importlib.utils`. + +.. + +.. date: 2022-10-04-21-21-41 +.. gh-issue: 97837 +.. nonce: 19q-eg +.. section: Library + +Change deprecate warning message in :mod:`unittest` from + +``It is deprecated to return a value!=None`` + +to + +``It is deprecated to return a value that is not None from a test case`` + +.. + +.. date: 2022-10-04-07-55-19 +.. gh-issue: 97825 +.. nonce: mNdv1l +.. section: Library + +Fixes :exc:`AttributeError` when :meth:`subprocess.check_output` is used +with argument ``input=None`` and either of the arguments *encoding* or +*errors* are used. + +.. + +.. date: 2022-10-04-00-43-43 +.. gh-issue: 97008 +.. nonce: 3rjtt6 +.. section: Library + +:exc:`NameError` and :exc:`AttributeError` spelling suggestions provided +since :gh:`82711` are now also emitted by the pure Python :mod:`traceback` +module. Tests for those suggestions now exercise both implementations to +ensure they are equivalent. Patch by Carl Friedrich Bolz-Tereick and Łukasz +Langa. + +.. + +.. date: 2022-10-03-14-42-13 +.. gh-issue: 97799 +.. nonce: Y1iJvf +.. section: Library + +:mod:`dataclass` now uses :func:`inspect.get_annotations` to examine the +annotations on class objects. + +.. + +.. date: 2022-10-03-13-25-19 +.. gh-issue: 97781 +.. nonce: gCLLef +.. section: Library + +Removed deprecated interfaces in ``importlib.metadata`` (entry points +accessed as dictionary, implicit dictionary construction of sequence of +``EntryPoint`` objects, mutablility of ``EntryPoints`` result, access of +entry point by index). ``entry_points`` now has a simpler, more +straightforward API (returning ``EntryPoints``). + +.. + +.. date: 2022-09-30-15-56-20 +.. gh-issue: 96827 +.. nonce: lzy1iw +.. section: Library + +Avoid spurious tracebacks from :mod:`asyncio` when default executor cleanup +is delayed until after the event loop is closed (e.g. as the result of a +keyboard interrupt). + +.. + +.. date: 2022-09-30-09-22-37 +.. gh-issue: 95534 +.. nonce: ndEfPj +.. section: Library + +:meth:`gzip.GzipFile.read` reads 10% faster. + +.. + +.. date: 2022-09-29-23-22-24 +.. gh-issue: 97592 +.. nonce: tpJg_J +.. section: Library + +Avoid a crash in the C version of +:meth:`asyncio.Future.remove_done_callback` when an evil argument is passed. + +.. + +.. date: 2022-09-29-08-15-55 +.. gh-issue: 97639 +.. nonce: JSjWYW +.. section: Library + +Remove ``tokenize.NL`` check from :mod:`tabnanny`. + +.. + +.. date: 2022-09-25-23-24-52 +.. gh-issue: 97545 +.. nonce: HZLSNt +.. section: Library + +Make Semaphore run faster. + +.. + +.. date: 2022-09-25-20-42-33 +.. gh-issue: 73588 +.. nonce: uVtjEA +.. section: Library + +Fix generation of the default name of :class:`tkinter.Checkbutton`. +Previously, checkbuttons in different parent widgets could have the same +short name and share the same state if arguments "name" and "variable" are +not specified. Now they are globally unique. + +.. + +.. date: 2022-09-24-18-56-23 +.. gh-issue: 96865 +.. nonce: o9WUkW +.. section: Library + +fix Flag to use boundary CONFORM + +This restores previous Flag behavior of allowing flags with non-sequential +values to be combined; e.g. + +class Skip(Flag): TWO = 2 EIGHT = 8 + +Skip.TWO | Skip.EIGHT -> + +.. + +.. date: 2022-09-22-14-35-02 +.. gh-issue: 97005 +.. nonce: yf21Q7 +.. section: Library + +Update bundled libexpat to 2.4.9 + +.. + +.. date: 2022-09-22-11-50-29 +.. gh-issue: 85760 +.. nonce: DETTPd +.. section: Library + +Fix race condition in :mod:`asyncio` where +:meth:`~asyncio.SubprocessProtocol.process_exited` called before the +:meth:`~asyncio.SubprocessProtocol.pipe_data_received` leading to +inconsistent output. Patch by Kumar Aditya. + +.. + +.. date: 2022-09-18-04-51-30 +.. gh-issue: 96704 +.. nonce: DmamRX +.. section: Library + +Pass the correct ``contextvars.Context`` when a ``asyncio`` exception +handler is called on behalf of a task or callback handle. This adds a new +``Task`` method, ``get_context``, and also a new ``Handle`` method with the +same name. If this method is not found on a task object (perhaps because it +is a third-party library that does not yet provide this method), the context +prevailing at the time the exception handler is called is used. + +.. + +.. date: 2022-09-17-13-15-10 +.. gh-issue: 96819 +.. nonce: 6RfqM7 +.. section: Library + +Fixed check in :mod:`multiprocessing.resource_tracker` that guarantees that +the length of a write to a pipe is not greater than ``PIPE_BUF``. + +.. + +.. date: 2022-09-16-07-53-29 +.. gh-issue: 95865 +.. nonce: oHjX0A +.. section: Library + +Reduce :func:`urllib.parse.quote_from_bytes` memory use on large values. + +Contributed by Dennis Sweeney. + +.. + +.. date: 2022-09-15-00-37-33 +.. gh-issue: 96741 +.. nonce: 4b6czN +.. section: Library + +Corrected type annotation for dataclass attribute +``pstats.FunctionProfile.ncalls`` to be ``str``. + +.. + +.. date: 2022-09-13-15-12-31 +.. gh-issue: 96734 +.. nonce: G08vjz +.. section: Library + +Update :mod:`unicodedata` database to Unicode 15.0.0. + +.. + +.. date: 2022-09-10-16-46-16 +.. gh-issue: 96735 +.. nonce: 0YzJuG +.. section: Library + +Fix undefined behaviour in :func:`struct.unpack`. + +.. + +.. date: 2022-09-08-20-12-48 +.. gh-issue: 46412 +.. nonce: r_cfTh +.. section: Library + +Improve performance of ``bool(db)`` for large ndb/gdb databases. Previously +this would call ``len(db)`` which would iterate over all keys -- the answer +(empty or not) is known after the first key. + +.. + +.. date: 2022-09-07-22-49-37 +.. gh-issue: 96652 +.. nonce: YqOKxI +.. section: Library + +Fix the faulthandler implementation of ``faulthandler.register(signal, +chain=True)`` if the ``sigaction()`` function is not available: don't call +the previous signal handler if it's NULL. Patch by Victor Stinner. + +.. + +.. date: 2022-09-04-12-32-52 +.. gh-issue: 68163 +.. nonce: h6TJCc +.. section: Library + +Correct conversion of :class:`numbers.Rational`'s to :class:`float`. + +.. + +.. date: 2022-09-03-18-39-05 +.. gh-issue: 96538 +.. nonce: W156-D +.. section: Library + +Speed up ``bisect.bisect()`` functions by taking advantage of +type-stability. + +.. + +.. date: 2022-09-01-13-54-38 +.. gh-issue: 96465 +.. nonce: 0IJmrH +.. section: Library + +Fraction hashes are now cached. + +.. + +.. date: 2022-08-31-11-10-21 +.. gh-issue: 96079 +.. nonce: uqrXdJ +.. section: Library + +In :mod:`typing`, fix missing field ``name`` and incorrect ``__module__`` in +_AnnotatedAlias. + +.. + +.. date: 2022-08-30-12-32-00 +.. gh-issue: 96415 +.. nonce: 6W7ORH +.. section: Library + +Remove ``types._cell_factory`` from module namespace. + +.. + +.. date: 2022-08-30-11-46-36 +.. gh-issue: 95987 +.. nonce: CV7_u4 +.. section: Library + +Fix ``repr`` of ``Any`` subclasses. + +.. + +.. date: 2022-08-29-16-54-36 +.. gh-issue: 96388 +.. nonce: dCpJcu +.. section: Library + +Work around missing socket functions in :class:`~socket.socket`'s +``__repr__``. + +.. + +.. date: 2022-08-29-15-28-39 +.. gh-issue: 96385 +.. nonce: uLRTsf +.. section: Library + +Fix ``TypeVarTuple.__typing_prepare_subst__``. ``TypeError`` was not raised +when using more than one ``TypeVarTuple``, like ``[*T, *V]`` in type alias +substitutions. + +.. + +.. date: 2022-08-29-12-49-30 +.. gh-issue: 96142 +.. nonce: PdCMez +.. section: Library + +Add ``match_args``, ``kw_only``, ``slots``, and ``weakref_slot`` to +``_DataclassParams``. + +.. + +.. date: 2022-08-29-12-35-28 +.. gh-issue: 96073 +.. nonce: WaGstf +.. section: Library + +In :mod:`inspect`, fix overeager replacement of "``typing.``" in formatting +annotations. + +.. + +.. date: 2022-08-29-07-04-03 +.. gh-issue: 89258 +.. nonce: ri7ncj +.. section: Library + +Added a :meth:`~logging.Logger.getChildren` method to +:class:`logging.Logger`, to get the immediate child loggers of a logger. + +.. + +.. date: 2022-08-27-23-16-09 +.. gh-issue: 96346 +.. nonce: jJX14I +.. section: Library + +Use double caching for compiled RE patterns. + +.. + +.. date: 2022-08-27-21-26-52 +.. gh-issue: 96349 +.. nonce: XyYLlO +.. section: Library + +Fixed a minor performance regression in :func:`threading.Event.__init__` + +.. + +.. date: 2022-08-27-14-38-49 +.. gh-issue: 90467 +.. nonce: VOOB0p +.. section: Library + +Fix :class:`asyncio.streams.StreamReaderProtocol` to keep a strong reference +to the created task, so that it's not garbage collected + +.. + +.. date: 2022-08-23-13-30-30 +.. gh-issue: 96172 +.. nonce: 7WTHer +.. section: Library + +Fix a bug in ``unicodedata``: ``east_asian_width`` used to return the wrong +value for unassigned characters; and for yet unassigned, but reserved +characters. + +.. + +.. date: 2022-08-22-18-42-17 +.. gh-issue: 96159 +.. nonce: 3bFU39 +.. section: Library + +Fix a performance regression in logging TimedRotatingFileHandler. Only check +for special files when the rollover time has passed. + +.. + +.. date: 2022-08-22-13-54-20 +.. gh-issue: 96175 +.. nonce: bH7zGU +.. section: Library + +Fix unused ``localName`` parameter in the ``Attr`` class in +:mod:`xml.dom.minidom`. + +.. + +.. date: 2022-08-20-12-56-15 +.. gh-issue: 96145 +.. nonce: 8ah3pE +.. section: Library + +Add AttrDict to JSON module for use with object_hook. + +.. + +.. date: 2022-08-20-10-31-01 +.. gh-issue: 96052 +.. nonce: a6FhaD +.. section: Library + +Fix handling compiler warnings (SyntaxWarning and DeprecationWarning) in +:func:`codeop.compile_command` when checking for incomplete input. +Previously it emitted warnings and raised a SyntaxError. Now it always +returns ``None`` for incomplete input without emitting any warnings. + +.. + +.. date: 2022-08-19-18-21-01 +.. gh-issue: 96125 +.. nonce: ODcF1Y +.. section: Library + +Fix incorrect condition that causes ``sys.thread_info.name`` to be wrong on +pthread platforms. + +.. + +.. date: 2022-08-19-10-19-32 +.. gh-issue: 96019 +.. nonce: b7uAVP +.. section: Library + +Fix a bug in the ``makeunicodedata.py`` script leading to about 13 KiB of +space saving in the ``unicodedata`` module, specifically the character +decomposition data. + +.. + +.. date: 2022-08-18-14-53-53 +.. gh-issue: 95463 +.. nonce: GpP05c +.. section: Library + +Remove an incompatible change from :issue:`28080` that caused a regression +that ignored the utf8 in ``ZipInfo.flag_bits``. Patch by Pablo Galindo. + +.. + +.. date: 2022-08-14-18-59-54 +.. gh-issue: 69142 +.. nonce: 6is5Pq +.. section: Library + +Add ``%:z`` strftime format code (generates tzoffset with colons as +separator), see :ref:`strftime-strptime-behavior`. + +.. + +.. date: 2022-08-11-18-52-17 +.. gh-issue: 95899 +.. nonce: _Bi4uG +.. section: Library + +Fix :class:`asyncio.Runner` to call :func:`asyncio.set_event_loop` only once +to avoid calling :meth:`~asyncio.AbstractChildWatcher.attach_loop` multiple +times on child watchers. Patch by Kumar Aditya. + +.. + +.. date: 2022-08-11-18-22-29 +.. gh-issue: 95736 +.. nonce: LzRZXe +.. section: Library + +Fix :class:`unittest.IsolatedAsyncioTestCase` to set event loop before +calling setup functions. Patch by Kumar Aditya. + +.. + +.. date: 2022-08-11-03-16-48 +.. gh-issue: 95865 +.. nonce: 0IOkFP +.. section: Library + +Speed up :func:`urllib.parse.quote_from_bytes` by replacing a list +comprehension with ``map()``. + +.. + +.. date: 2022-08-10-17-34-07 +.. gh-issue: 95861 +.. nonce: qv-T5s +.. section: Library + +Add support for computing Spearman's correlation coefficient to the existing +statistics.correlation() function. + +.. + +.. date: 2022-08-10-11-54-04 +.. gh-issue: 95804 +.. nonce: i5FCFK +.. section: Library + +Fix ``logging`` shutdown handler so it respects +``MemoryHandler.flushOnClose``. + +.. + +.. date: 2022-08-08-01-42-11 +.. gh-issue: 95704 +.. nonce: MOPFfX +.. section: Library + +When a task catches :exc:`asyncio.CancelledError` and raises some other +error, the other error should generally not silently be suppressed. + +.. + +.. date: 2022-08-07-14-56-23 +.. gh-issue: 95149 +.. nonce: U0c6Ib +.. section: Library + +The :class:`HTTPStatus ` enum offers a couple of properties +to indicate the HTTP status category e.g. ``HTTPStatus.OK.is_success``. + +.. + +.. date: 2022-08-03-21-01-17 +.. gh-issue: 95609 +.. nonce: xxyjyX +.. section: Library + +Update bundled pip to 22.2.2. + +.. + +.. date: 2022-08-03-16-52-32 +.. gh-issue: 95289 +.. nonce: FMnHlV +.. section: Library + +Fix :class:`asyncio.TaskGroup` to propagate exception when +:exc:`asyncio.CancelledError` was replaced with another exception by a +context manger. Patch by Kumar Aditya and Guido van Rossum. + +.. + +.. date: 2022-07-29-20-58-37 +.. gh-issue: 94909 +.. nonce: YjMusj +.. section: Library + +Fix incorrect joining of relative Windows paths with drives in +:class:`pathlib.PurePath` initializer. + +.. + +.. date: 2022-07-28-17-14-38 +.. gh-issue: 95385 +.. nonce: 6YlsDI +.. section: Library + +Faster ``json.dumps()`` when sorting of keys is not requested (default). + +.. + +.. date: 2022-07-27-19-47-51 +.. gh-issue: 83901 +.. nonce: OSw06c +.. section: Library + +Improve :meth:`Signature.bind ` error message for +missing keyword-only arguments. + +.. + +.. date: 2022-07-27-19-43-07 +.. gh-issue: 95339 +.. nonce: NuVQ68 +.. section: Library + +Update bundled pip to 22.2.1. + +.. + +.. date: 2022-07-27-11-35-45 +.. gh-issue: 95045 +.. nonce: iysT-Q +.. section: Library + +Fix GC crash when deallocating ``_lsprof.Profiler`` by untracking it before +calling any callbacks. Patch by Kumar Aditya. + +.. + +.. date: 2022-07-25-15-45-06 +.. gh-issue: 95231 +.. nonce: i807-g +.. section: Library + +Fail gracefully if :data:`~errno.EPERM` or :data:`~errno.ENOSYS` is raised +when loading :mod:`crypt` methods. This may happen when trying to load +``MD5`` on a Linux kernel with :abbr:`FIPS (Federal Information Processing +Standard)` enabled. + +.. + +.. date: 2022-07-24-18-00-42 +.. gh-issue: 95097 +.. nonce: lu5qNf +.. section: Library + +Fix :func:`asyncio.run` for :class:`asyncio.Task` implementations without +:meth:`~asyncio.Task.uncancel` method. Patch by Kumar Aditya. + +.. + +.. date: 2022-07-24-12-59-02 +.. gh-issue: 95087 +.. nonce: VvqXkN +.. section: Library + +Fix IndexError in parsing invalid date in the :mod:`email` module. + +.. + +.. date: 2022-07-24-12-00-06 +.. gh-issue: 95199 +.. nonce: -5A64k +.. section: Library + +Upgrade bundled setuptools to 63.2.0. + +.. + +.. date: 2022-07-24-09-15-35 +.. gh-issue: 95194 +.. nonce: ERVmqG +.. section: Library + +Upgrade bundled pip to 22.2. + +.. + +.. date: 2022-07-23-10-50-05 +.. gh-issue: 93899 +.. nonce: VT34A5 +.. section: Library + +Fix check for existence of :data:`os.EFD_CLOEXEC`, :data:`os.EFD_NONBLOCK` +and :data:`os.EFD_SEMAPHORE` flags on older kernel versions where these +flags are not present. Patch by Kumar Aditya. + +.. + +.. date: 2022-07-23-10-42-05 +.. gh-issue: 95166 +.. nonce: xw6p3C +.. section: Library + +Fix :meth:`concurrent.futures.Executor.map` to cancel the currently waiting +on future on an error - e.g. TimeoutError or KeyboardInterrupt. + +.. + +.. date: 2022-07-22-21-18-17 +.. gh-issue: 95132 +.. nonce: n9anlw +.. section: Library + +Fix a :mod:`sqlite3` regression where ``*args`` and ``**kwds`` were +incorrectly relayed from :py:func:`~sqlite3.connect` to the +:class:`~sqlite3.Connection` factory. The regression was introduced in +3.11a1 with PR 24421 (:gh:`85128`). Patch by Erlend E. Aasland.` + +.. + +.. date: 2022-07-22-17-19-57 +.. gh-issue: 93157 +.. nonce: RXByAk +.. section: Library + +Fix :mod:`fileinput` module didn't support ``errors`` option when +``inplace`` is true. + +.. + +.. date: 2022-07-22-09-09-08 +.. gh-issue: 91212 +.. nonce: 53O8Ab +.. section: Library + +Fixed flickering of the turtle window when the tracer is turned off. Patch +by Shin-myoung-serp. + +.. + +.. date: 2022-07-22-00-58-49 +.. gh-issue: 95077 +.. nonce: 4Z6CNC +.. section: Library + +Add deprecation warning for enum ``member.member`` access (e.g. +``Color.RED.BLUE``). + +.. + +.. date: 2022-07-21-22-59-22 +.. gh-issue: 95109 +.. nonce: usxA9r +.. section: Library + +Ensure that timeouts scheduled with :class:`asyncio.Timeout` that have +already expired are delivered promptly. + +.. + +.. date: 2022-07-21-19-55-49 +.. gh-issue: 95105 +.. nonce: BIX2Km +.. section: Library + +:meth:`wsgiref.types.InputStream.__iter__` should return +``Iterator[bytes]``, not ``Iterable[bytes]``. Patch by Shantanu Jain. + +.. + +.. date: 2022-07-20-22-49-48 +.. gh-issue: 95066 +.. nonce: TuCu0E +.. section: Library + +Replaced assert with exception in :func:`ast.parse`, when +``feature_version`` has an invalid major version. Patch by Shantanu Jain. + +.. + +.. date: 2022-07-20-00-23-58 +.. gh-issue: 77617 +.. nonce: XGaqSQ +.. section: Library + +Add :mod:`sqlite3` :ref:`command-line interface `. Patch by +Erlend Aasland. + +.. + +.. date: 2022-07-19-15-37-11 +.. gh-issue: 95005 +.. nonce: iRmZ74 +.. section: Library + +Replace :c:expr:`_PyAccu` with :c:expr:`_PyUnicodeWriter` in JSON encoder +and StringIO and remove the :c:expr:`_PyAccu` implementation. + +.. + +.. date: 2022-07-17-22-31-32 +.. gh-issue: 90085 +.. nonce: c4FWcS +.. section: Library + +Remove ``-c/--clock`` and ``-t/--time`` CLI options of :mod:`timeit`. The +options had been deprecated since Python 3.3 and the functionality was +removed in Python 3.7. Patch by Shantanu Jain. + +.. + +.. date: 2022-07-15-08-13-51 +.. gh-issue: 94857 +.. nonce: 9_KvZJ +.. section: Library + +Fix refleak in ``_io.TextIOWrapper.reconfigure``. Patch by Kumar Aditya. + +.. + +.. date: 2022-07-14-00-43-52 +.. gh-issue: 94821 +.. nonce: e17ghU +.. section: Library + +Fix binding of unix socket to empty address on Linux to use an available +address from the abstract namespace, instead of "\0". + +.. + +.. date: 2022-07-11-10-41-48 +.. gh-issue: 94736 +.. nonce: EbsgeK +.. section: Library + +Fix crash when deallocating an instance of a subclass of +``_multiprocessing.SemLock``. Patch by Kumar Aditya. + +.. + +.. date: 2022-07-09-15-17-02 +.. gh-issue: 81620 +.. nonce: L0O_bV +.. section: Library + +Add random.binomialvariate(). + +.. + +.. date: 2022-07-09-08-55-04 +.. gh-issue: 74116 +.. nonce: 0XwYC1 +.. section: Library + +Allow :meth:`asyncio.StreamWriter.drain` to be awaited concurrently by +multiple tasks. Patch by Kumar Aditya. + +.. + +.. date: 2022-07-08-17-49-12 +.. gh-issue: 87822 +.. nonce: F9dzkf +.. section: Library + +When called with ``capture_locals=True``, the :mod:`traceback` module +functions swallow exceptions raised from calls to ``repr()`` on local +variables of frames. This is in order to prioritize the original exception +over rendering errors. An indication of the failure is printed in place of +the missing value. (Patch by Simon-Martin Schroeder). + +.. + +.. date: 2022-07-08-08-39-35 +.. gh-issue: 88050 +.. nonce: 0aOC_m +.. section: Library + +Fix :mod:`asyncio` subprocess transport to kill process cleanly when process +is blocked and avoid ``RuntimeError`` when loop is closed. Patch by Kumar +Aditya. + +.. + +.. date: 2022-07-07-15-46-55 +.. gh-issue: 94637 +.. nonce: IYEiUM +.. section: Library + +:meth:`SSLContext.set_default_verify_paths` now releases the GIL around +``SSL_CTX_set_default_verify_paths`` call. The function call performs I/O +and CPU intensive work. + +.. + +.. date: 2022-07-06-22-41-51 +.. gh-issue: 94309 +.. nonce: _XswsX +.. section: Library + +Deprecate aliases :class:`typing.Hashable` and :class:`typing.Sized` + +.. + +.. date: 2022-07-06-21-24-03 +.. gh-issue: 92546 +.. nonce: s5Upkh +.. section: Library + +An undocumented ``python -m pprint`` benchmark is moved into ``pprint`` +suite of pyperformance. Patch by Oleg Iarygin. + +.. + +.. date: 2022-07-06-16-01-08 +.. gh-issue: 94607 +.. nonce: Q6RYfz +.. section: Library + +Fix subclassing complex generics with type variables in :mod:`typing`. +Previously an error message saying ``Some type variables ... are not listed +in Generic[...]`` was shown. :mod:`typing` no longer populates +``__parameters__`` with the ``__parameters__`` of a Python class. + +.. + +.. date: 2022-07-06-14-57-33 +.. gh-issue: 94619 +.. nonce: PRqKVX +.. section: Library + +Remove the long-deprecated ``module_repr()`` from :mod:`importlib`. + +.. + +.. date: 2022-07-06-14-45-12 +.. gh-issue: 93910 +.. nonce: iZcp67 +.. section: Library + +The ability to access the other values of an enum on an enum (e.g. +``Color.RED.BLUE``) has been restored in order to fix a performance +regression. + +.. + +.. date: 2022-07-06-06-02-02 +.. gh-issue: 93896 +.. nonce: vIgWGr +.. section: Library + +Fix :func:`asyncio.run` and :class:`unittest.IsolatedAsyncioTestCase` to +always the set event loop as it was done in Python 3.10 and earlier. Patch +by Kumar Aditya. + +.. + +.. date: 2022-07-05-17-22-00 +.. gh-issue: 94343 +.. nonce: kf4H5r +.. section: Library + +Allow setting the attributes of ``reprlib.Repr`` during object +initialization + +.. + +.. date: 2022-07-03-16-41-03 +.. gh-issue: 94382 +.. nonce: zuVZeM +.. section: Library + +Port static types of ``_multiprocessing`` module to heap types. Patch by +Kumar Aditya. + +.. + +.. date: 2022-07-03-16-26-35 +.. gh-issue: 78724 +.. nonce: XNiJzf +.. section: Library + +Fix crash in :class:`struct.Struct` when it was not completely initialized +by initializing it in :meth:`~object.__new__``. Patch by Kumar Aditya. + +.. + +.. date: 2022-07-02-19-46-30 +.. gh-issue: 94510 +.. nonce: xOatDC +.. section: Library + +Re-entrant calls to :func:`sys.setprofile` and :func:`sys.settrace` now +raise :exc:`RuntimeError`. Patch by Pablo Galindo. + +.. + +.. date: 2022-06-29-09-48-37 +.. gh-issue: 92336 +.. nonce: otA6c6 +.. section: Library + +Fix bug where :meth:`linecache.getline` fails on bad files with +:exc:`UnicodeDecodeError` or :exc:`SyntaxError`. It now returns an empty +string as per the documentation. + +.. + +.. date: 2022-06-29-04-42-56 +.. gh-issue: 94398 +.. nonce: YOq_bJ +.. section: Library + +Once a :class:`asyncio.TaskGroup` has started shutting down (i.e., at least +one task has failed and the task group has started cancelling the remaining +tasks), it should not be possible to add new tasks to the task group. + +.. + +.. date: 2022-06-28-14-41-22 +.. gh-issue: 94383 +.. nonce: CXnquo +.. section: Library + +:mod:`xml.etree`: Remove the ``ElementTree.Element.copy()`` method of the +pure Python implementation, deprecated in Python 3.10, use the +:func:`copy.copy` function instead. The C implementation of :mod:`xml.etree` +has no ``copy()`` method, only a ``__copy__()`` method. Patch by Victor +Stinner. + +.. + +.. date: 2022-06-28-14-29-21 +.. gh-issue: 94379 +.. nonce: RrgKfh +.. section: Library + +:mod:`zipimport`: Remove ``find_loader()`` and ``find_module()`` methods, +deprecated in Python 3.10: use the ``find_spec()`` method instead. See +:pep:`451` for the rationale. Patch by Victor Stinner. + +.. + +.. date: 2022-06-28-00-24-48 +.. gh-issue: 94352 +.. nonce: JY1Ayt +.. section: Library + +:func:`shlex.split`: Passing ``None`` for *s* argument now raises an +exception, rather than reading :data:`sys.stdin`. The feature was deprecated +in Python 3.9. Patch by Victor Stinner. + +.. + +.. date: 2022-06-27-10-33-18 +.. gh-issue: 94318 +.. nonce: jR4_QV +.. section: Library + +Strip trailing spaces in :mod:`pydoc` text output. + +.. + +.. date: 2022-06-26-10-59-15 +.. gh-issue: 89988 +.. nonce: K8rnmt +.. section: Library + +Fix memory leak in :class:`pickle.Pickler` when looking up +:attr:`dispatch_table`. Patch by Kumar Aditya. + +.. + +.. date: 2022-06-25-23-44-44 +.. gh-issue: 90016 +.. nonce: EB409s +.. section: Library + +Deprecate :mod:`sqlite3` :ref:`default adapters and converters +`. Patch by Erlend E. Aasland. + +.. + +.. date: 2022-06-25-16-27-02 +.. gh-issue: 94254 +.. nonce: beP16v +.. section: Library + +Fixed types of :mod:`struct` module to be immutable. Patch by Kumar Aditya. + +.. + +.. date: 2022-06-25-13-38-53 +.. gh-issue: 93259 +.. nonce: FAGw-2 +.. section: Library + +Now raise ``ValueError`` when ``None`` or an empty string are passed to +``Distribution.from_name`` (and other callers). + +.. + +.. date: 2022-06-25-09-12-23 +.. gh-issue: 74696 +.. nonce: fxC9ua +.. section: Library + +:func:`shutil.make_archive` now passes the *root_dir* argument to custom +archivers which support it. + +.. + +.. date: 2022-06-24-20-00-57 +.. gh-issue: 94216 +.. nonce: hxnQPu +.. section: Library + +The :mod:`dis` module now has the opcodes for pseudo instructions (those +which are used by the compiler during code generation but then removed or +replaced by real opcodes before the final bytecode is emitted). + +.. + +.. date: 2022-06-24-19-40-40 +.. gh-issue: 93096 +.. nonce: 3RlK2d +.. section: Library + +Removed undocumented ``python -m codecs``. Use ``python -m unittest +test.test_codecs.EncodedFileTest`` instead. + +.. + +.. date: 2022-06-24-19-23-59 +.. gh-issue: 94207 +.. nonce: VhS1eS +.. section: Library + +Made :class:`_struct.Struct` GC-tracked in order to fix a reference leak in +the :mod:`_struct` module. + +.. + +.. date: 2022-06-24-19-16-09 +.. gh-issue: 93096 +.. nonce: r1_oIc +.. section: Library + +Removed undocumented ``-t`` argument of ``python -m base64``. Use ``python +-m unittest test.test_base64.LegacyBase64TestCase.test_encodebytes`` +instead. + +.. + +.. date: 2022-06-24-18-20-42 +.. gh-issue: 94226 +.. nonce: 8ZL4Fm +.. section: Library + +Remove the :func:`locale.format` function, deprecated in Python 3.7: use +:func:`locale.format_string` instead. Patch by Victor Stinner. + +.. + +.. date: 2022-06-24-17-11-33 +.. gh-issue: 94199 +.. nonce: 7releN +.. section: Library + +Remove the :func:`ssl.match_hostname` function. The +:func:`ssl.match_hostname` was deprecated in Python 3.7. OpenSSL performs +hostname matching since Python 3.7, Python no longer uses the +:func:`ssl.match_hostname` function. Patch by Victor Stinner. + +.. + +.. date: 2022-06-24-14-25-26 +.. gh-issue: 94214 +.. nonce: 03pXR5 +.. section: Library + +Document the ``context`` object used in the ``venv.EnvBuilder`` class, and +add the new environment's library path to it. + +.. + +.. date: 2022-06-24-10-39-56 +.. gh-issue: 94199 +.. nonce: MIuckY +.. section: Library + +Remove the :func:`ssl.wrap_socket` function, deprecated in Python 3.7: +instead, create a :class:`ssl.SSLContext` object and call its +:class:`ssl.SSLContext.wrap_socket` method. Any package that still uses +:func:`ssl.wrap_socket` is broken and insecure. The function neither sends a +SNI TLS extension nor validates server hostname. Code is subject to `CWE-295 +`_: Improper Certificate +Validation. Patch by Victor Stinner. + +.. + +.. date: 2022-06-24-10-29-19 +.. gh-issue: 94199 +.. nonce: pfehmz +.. section: Library + +Remove the :func:`ssl.RAND_pseudo_bytes` function, deprecated in Python 3.6: +use :func:`os.urandom` or :func:`ssl.RAND_bytes` instead. Patch by Victor +Stinner. + +.. + +.. date: 2022-06-24-10-18-59 +.. gh-issue: 94199 +.. nonce: kYOo8g +.. section: Library + +:mod:`hashlib`: Remove the pure Python implementation of +:func:`hashlib.pbkdf2_hmac()`, deprecated in Python 3.10. Python 3.10 and +newer requires OpenSSL 1.1.1 (:pep:`644`): this OpenSSL version provides a C +implementation of :func:`~hashlib.pbkdf2_hmac()` which is faster. Patch by +Victor Stinner. + +.. + +.. date: 2022-06-24-09-41-41 +.. gh-issue: 94196 +.. nonce: r2KyfS +.. section: Library + +:mod:`gzip`: Remove the ``filename`` attribute of :class:`gzip.GzipFile`, +deprecated since Python 2.6, use the :attr:`~gzip.GzipFile.name` attribute +instead. In write mode, the ``filename`` attribute added ``'.gz'`` file +extension if it was not present. Patch by Victor Stinner. + +.. + +.. date: 2022-06-24-08-49-47 +.. gh-issue: 94182 +.. nonce: Wknau0 +.. section: Library + +run the :class:`asyncio.PidfdChildWatcher` on the running loop, this allows +event loops to run subprocesses when there is no default event loop running +on the main thread + +.. + +.. date: 2022-06-23-14-35-10 +.. gh-issue: 94169 +.. nonce: jeba90 +.. section: Library + +Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python +3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) +function is a built-in function. Since Python 3.10, :func:`_pyio.open` is +also a static method. Patch by Victor Stinner. + +.. + +.. date: 2022-06-23-13-12-05 +.. gh-issue: 91742 +.. nonce: sNytVX +.. section: Library + +Fix :mod:`pdb` crash after jump caused by a null pointer dereference. Patch +by Kumar Aditya. + +.. + +.. date: 2022-06-22-11-16-11 +.. gh-issue: 94101 +.. nonce: V9vDG8 +.. section: Library + +Manual instantiation of :class:`ssl.SSLSession` objects is no longer allowed +as it lead to misconfigured instances that crashed the interpreter when +attributes where accessed on them. + +.. + +.. date: 2022-06-21-11-40-31 +.. gh-issue: 84753 +.. nonce: FW1pxO +.. section: Library + +:func:`inspect.iscoroutinefunction`, :func:`inspect.isgeneratorfunction`, +and :func:`inspect.isasyncgenfunction` now properly return ``True`` for +duck-typed function-like objects like instances of +:class:`unittest.mock.AsyncMock`. + +This makes :func:`inspect.iscoroutinefunction` consistent with the behavior +of :func:`asyncio.iscoroutinefunction`. Patch by Mehdi ABAAKOUK. + +.. + +.. date: 2022-06-20-23-14-43 +.. gh-issue: 94028 +.. nonce: UofEcX +.. section: Library + +Fix a regression in the :mod:`sqlite3` where statement objects were not +properly cleared and reset after use in cursor iters. The regression was +introduced by PR 27884 in Python 3.11a1. Patch by Erlend E. Aasland. + +.. + +.. date: 2022-06-18-15-06-54 +.. gh-issue: 93973 +.. nonce: 4y6UQT +.. section: Library + +Add keyword argument ``all_errors`` to ``asyncio.create_connection`` so that +multiple connection errors can be raised as an ``ExceptionGroup``. + +.. + +.. date: 2022-06-17-16-00-55 +.. gh-issue: 93963 +.. nonce: 8YYZ-2 +.. section: Library + +Officially deprecate from ``importlib.abc`` classes moved to +``importlib.resources.abc``. + +.. + +.. date: 2022-06-17-12-02-30 +.. gh-issue: 93858 +.. nonce: R49ARc +.. section: Library + +Prevent error when activating venv in nested fish instances. + +.. + +.. date: 2022-06-16-11-16-53 +.. gh-issue: 93820 +.. nonce: 00X0Y5 +.. section: Library + +Pickle :class:`enum.Flag` by name. + +.. + +.. date: 2022-06-16-09-24-50 +.. gh-issue: 93847 +.. nonce: kuv8bN +.. section: Library + +Fix repr of enum of generic aliases. + +.. + +.. date: 2022-06-15-21-35-11 +.. gh-issue: 91404 +.. nonce: 39TZzW +.. section: Library + +Revert the :mod:`re` memory leak when a match is terminated by a signal or +memory allocation failure as the implemented fix caused a major performance +regression. + +.. + +.. date: 2022-06-15-21-28-16 +.. gh-issue: 83499 +.. nonce: u3DQJ- +.. section: Library + +Fix double closing of file description in :mod:`tempfile`. + +.. + +.. date: 2022-06-15-21-20-02 +.. gh-issue: 93820 +.. nonce: FAMLY8 +.. section: Library + +Fixed a regression when :func:`copy.copy`-ing :class:`enum.Flag` with +multiple flag members. + +.. + +.. date: 2022-06-11-13-32-17 +.. gh-issue: 79512 +.. nonce: A1KTDr +.. section: Library + +Fixed names and ``__module__`` value of :mod:`weakref` classes +:class:`~weakref.ReferenceType`, :class:`~weakref.ProxyType`, +:class:`~weakref.CallableProxyType`. It makes them pickleable. + +.. + +.. date: 2022-06-09-17-15-26 +.. gh-issue: 91389 +.. nonce: OE4vS5 +.. section: Library + +Fix an issue where :mod:`dis` utilities could report missing or incorrect +position information in the presence of ``CACHE`` entries. + +.. + +.. date: 2022-06-09-14-44-21 +.. gh-issue: 93626 +.. nonce: sfghs46 +.. section: Library + +Set ``__future__.annotations`` to have a ``None`` mandatoryRelease to +indicate that it is currently 'TBD'. + +.. + +.. date: 2022-06-09-10-12-55 +.. gh-issue: 90473 +.. nonce: 683m_C +.. section: Library + +Emscripten and WASI have no home directory and cannot provide :pep:`370` +user site directory. + +.. + +.. date: 2022-06-08-20-11-02 +.. gh-issue: 90494 +.. nonce: LIZT85 +.. section: Library + +:func:`copy.copy` and :func:`copy.deepcopy` now always raise a TypeError if +``__reduce__()`` returns a tuple with length 6 instead of silently ignore +the 6th item or produce incorrect result. + +.. + +.. date: 2022-06-07-14-53-46 +.. gh-issue: 90549 +.. nonce: T4FMKY +.. section: Library + +Fix a multiprocessing bug where a global named resource (such as a +semaphore) could leak when a child process is spawned (as opposed to +forked). + +.. + +.. date: 2022-06-06-13-19-43 +.. gh-issue: 93521 +.. nonce: _vE8m9 +.. section: Library + +Fixed a case where dataclasses would try to add ``__weakref__`` into the +``__slots__`` for a dataclass that specified ``weakref_slot=True`` when it +was already defined in one of its bases. This resulted in a ``TypeError`` +upon the new class being created. + +.. + +.. date: 2022-06-06-12-58-27 +.. gh-issue: 79579 +.. nonce: e8rB-M +.. section: Library + +:mod:`sqlite3` now correctly detects DML queries with leading comments. +Patch by Erlend E. Aasland. + +.. + +.. date: 2022-06-05-22-22-42 +.. gh-issue: 93421 +.. nonce: 43UO_8 +.. section: Library + +Update :data:`sqlite3.Cursor.rowcount` when a DML statement has run to +completion. This fixes the row count for SQL queries like ``UPDATE ... +RETURNING``. Patch by Erlend E. Aasland. + +.. + +.. date: 2022-06-04-00-11-54 +.. gh-issue: 93475 +.. nonce: vffFw1 +.. section: Library + +Expose ``FICLONE`` and ``FICLONERANGE`` constants in :mod:`fcntl`. Patch by +Illia Volochii. + +.. + +.. date: 2022-06-03-22-13-28 +.. gh-issue: 93370 +.. nonce: tjfu9L +.. section: Library + +Deprecate :data:`sqlite3.version` and :data:`sqlite3.version_info`. + +.. + +.. date: 2022-06-02-08-40-58 +.. gh-issue: 91810 +.. nonce: Gtk44w +.. section: Library + +Suppress writing an XML declaration in open files in ``ElementTree.write()`` +with ``encoding='unicode'`` and ``xml_declaration=None``. + +.. + +.. date: 2022-06-01-11-24-13 +.. gh-issue: 91162 +.. nonce: NxvU_u +.. section: Library + +Support splitting of unpacked arbitrary-length tuple over ``TypeVar`` and +``TypeVarTuple`` parameters. For example: + +* ``A[T, *Ts][*tuple[int, ...]]`` -> ``A[int, *tuple[int, ...]]`` +* ``A[*Ts, T][*tuple[int, ...]]`` -> ``A[*tuple[int, ...], int]`` + +.. + +.. date: 2022-05-31-14-58-40 +.. gh-issue: 93353 +.. nonce: 9Hvm6o +.. section: Library + +Fix the :func:`importlib.resources.as_file` context manager to remove the +temporary file if destroyed late during Python finalization: keep a local +reference to the :func:`os.remove` function. Patch by Victor Stinner. + +.. + +.. date: 2022-05-30-21-42-50 +.. gh-issue: 83658 +.. nonce: 01Ntx0 +.. section: Library + +Make :class:`multiprocessing.Pool` raise an exception if +``maxtasksperchild`` is not ``None`` or a positive int. + +.. + +.. date: 2022-05-28-08-02-55 +.. gh-issue: 93312 +.. nonce: HY0Uzj +.. section: Library + +Add :data:`os.PIDFD_NONBLOCK` flag to open a file descriptor for a process +with :func:`os.pidfd_open` in non-blocking mode. Patch by Kumar Aditya. + +.. + +.. date: 2022-05-27-22-17-11 +.. gh-issue: 88123 +.. nonce: mkYl5q +.. section: Library + +Implement Enum __contains__ that returns True or False to replace the +deprecated behaviour that would sometimes raise a TypeError. + +.. + +.. date: 2022-05-27-13-18-18 +.. gh-issue: 93297 +.. nonce: e2zuHz +.. section: Library + +Make asyncio task groups prevent child tasks from being GCed + +.. + +.. date: 2022-05-27-10-52-06 +.. gh-issue: 85308 +.. nonce: K6r-tJ +.. section: Library + +Changed :class:`argparse.ArgumentParser` to use :term:`filesystem encoding +and error handler` instead of default text encoding to read arguments from +file (e.g. ``fromfile_prefix_chars`` option). This change affects Windows; +argument file should be encoded with UTF-8 instead of ANSI Codepage. + +.. + +.. date: 2022-05-26-23-10-55 +.. gh-issue: 93156 +.. nonce: 4XfDVN +.. section: Library + +Accessing the :attr:`pathlib.PurePath.parents` sequence of an absolute path +using negative index values produced incorrect results. + +.. + +.. date: 2022-05-26-09-24-41 +.. gh-issue: 93162 +.. nonce: W1VuhU +.. section: Library + +Add the ability for :func:`logging.config.dictConfig` to usefully configure +:class:`~logging.handlers.QueueHandler` and +:class:`~logging.handlers.QueueListener` as a pair, and add +:func:`logging.getHandlerByName` and :func:`logging.getHandlerNames` APIs to +allow access to handlers by name. + +.. + +.. date: 2022-05-26-08-41-34 +.. gh-issue: 93243 +.. nonce: uw6x5z +.. section: Library + +The :mod:`smtpd` module was removed per the schedule in :pep:`594`. + +.. + +.. date: 2022-05-25-22-09-38 +.. gh-issue: 92886 +.. nonce: ylwDSc +.. section: Library + +Replace ``assert`` statements with ``raise AssertionError()`` in +:class:`~wsgiref.BaseHandler` so that the tested behaviour is maintained +running with optimizations ``(-O)``. + +.. + +.. date: 2022-05-25-15-57-39 +.. gh-issue: 90155 +.. nonce: YMstB5 +.. section: Library + +Fix broken :class:`asyncio.Semaphore` when acquire is cancelled. + +.. + +.. date: 2022-05-25-02-45-41 +.. gh-issue: 90817 +.. nonce: yxANgU +.. section: Library + +The :func:`locale.resetlocale` function is deprecated and will be removed in +Python 3.13. Use ``locale.setlocale(locale.LC_ALL, "")`` instead. Patch by +Victor Stinner. + +.. + +.. date: 2022-05-25-00-21-28 +.. gh-issue: 91513 +.. nonce: 9VyCT4 +.. section: Library + +Added ``taskName`` attribute to :mod:`logging` module for use with +:mod:`asyncio` tasks. + +.. + +.. date: 2022-05-24-11-19-04 +.. gh-issue: 74696 +.. nonce: -cnf-A +.. section: Library + +:func:`shutil.make_archive` no longer temporarily changes the current +working directory during creation of standard ``.zip`` or tar archives. + +.. + +.. date: 2022-05-24-10-59-02 +.. gh-issue: 92728 +.. nonce: zxTifq +.. section: Library + +The :func:`re.template` function and the corresponding :const:`re.TEMPLATE` +and :const:`re.T` flags are restored after they were removed in 3.11.0b1, +but they are now deprecated, so they might be removed from Python 3.13. + +.. + +.. date: 2022-05-22-23-46-18 +.. gh-issue: 93033 +.. nonce: wZfiL- +.. section: Library + +Search in some strings (platform dependent i.e [U+0xFFFF, U+0x0100] on +Windows or [U+0xFFFFFFFF, U+0x00010000] on Linux 64-bit) are now up to 10 +times faster. + +.. + +.. date: 2022-05-22-16-08-01 +.. gh-issue: 89973 +.. nonce: jc-Q4g +.. section: Library + +Fix :exc:`re.error` raised in :mod:`fnmatch` if the pattern contains a +character range with upper bound lower than lower bound (e.g. ``[c-a]``). +Now such ranges are interpreted as empty ranges. + +.. + +.. date: 2022-05-21-13-16-16 +.. gh-issue: 93044 +.. nonce: eJ_XkZ +.. section: Library + +No longer convert the database argument of :func:`sqlite3.connect` to bytes +before passing it to the factory. + +.. + +.. date: 2022-05-20-15-52-43 +.. gh-issue: 93010 +.. nonce: WF-cAc +.. section: Library + +In a very special case, the email package tried to append the nonexistent +``InvalidHeaderError`` to the defect list. It should have been +``InvalidHeaderDefect``. + +.. + +.. date: 2022-05-19-22-34-42 +.. gh-issue: 92986 +.. nonce: e6uKxj +.. section: Library + +Fix :func:`ast.unparse` when ``ImportFrom.level`` is None + +.. + +.. date: 2022-05-19-17-49-58 +.. gh-issue: 92932 +.. nonce: o2peTh +.. section: Library + +Now :func:`~dis.dis` and :func:`~dis.get_instructions` handle operand values +for instructions prefixed by ``EXTENDED_ARG_QUICK``. Patch by Sam Gross and +Dong-hee Na. + +.. + +.. date: 2022-05-19-13-33-18 +.. gh-issue: 92675 +.. nonce: ZeerMZ +.. section: Library + +Fix :func:`venv.ensure_directories` to accept :class:`pathlib.Path` +arguments in addition to :class:`str` paths. Patch by David Foster. + +.. + +.. date: 2022-05-18-21-04-09 +.. gh-issue: 87901 +.. nonce: lnf041 +.. section: Library + +Removed the ``encoding`` argument from :func:`os.popen` that was added in +3.11b1. + +.. + +.. date: 2022-05-18-17-18-41 +.. gh-issue: 91922 +.. nonce: DwWIsJ +.. section: Library + +Fix function :func:`sqlite.connect` and the :class:`sqlite.Connection` +constructor on non-UTF-8 locales. Also, they now support bytes paths +non-decodable with the current FS encoding. + +.. + +.. date: 2022-05-17-06-27-39 +.. gh-issue: 92869 +.. nonce: t8oBkw +.. section: Library + +Added :class:`~ctypes.c_time_t` to :mod:`ctypes`, which has the same size as +the :c:type:`time_t` type in C. + +.. + +.. date: 2022-05-16-14-35-39 +.. gh-issue: 92839 +.. nonce: owSMyo +.. section: Library + +Fixed crash resulting from calling bisect.insort() or bisect.insort_left() +with the key argument not equal to None. + +.. + +.. date: 2022-05-14-11-41-23 +.. gh-issue: 90473 +.. nonce: kPdOZl +.. section: Library + +:mod:`subprocess` now fails early on Emscripten and WASI platforms to work +around missing :func:`os.pipe` on WASI. + +.. + +.. date: 2022-05-14-09-01-38 +.. gh-issue: 89325 +.. nonce: ys-2BZ +.. section: Library + +Removed many old deprecated :mod:`unittest` features: +:class:`~unittest.TestCase` method aliases, undocumented and broken +:class:`~unittest.TestCase` method ``assertDictContainsSubset``, +undocumented :meth:`TestLoader.loadTestsFromModule +` parameter *use_load_tests*, and +an underscored alias of the :class:`~unittest.TextTestResult` class. + +.. + +.. date: 2022-05-12-15-19-00 +.. gh-issue: 92734 +.. nonce: d0wjDt +.. section: Library + +Allow multi-element reprs emitted by :mod:`reprlib` to be pretty-printed +using configurable indentation. + +.. + +.. date: 2022-05-11-19-33-27 +.. gh-issue: 92671 +.. nonce: KE4v6a +.. section: Library + +Fixed :func:`ast.unparse` for empty tuples in the assignment target context. + +.. + +.. date: 2022-05-11-14-34-09 +.. gh-issue: 91581 +.. nonce: glkou2 +.. section: Library + +:meth:`~datetime.datetime.utcfromtimestamp` no longer attempts to resolve +``fold`` in the pure Python implementation, since the fold is never 1 in +UTC. In addition to being slightly faster in the common case, this also +prevents some errors when the timestamp is close to :attr:`datetime.min +`. Patch by Paul Ganssle. + +.. + +.. date: 2022-05-11-10-06-31 +.. gh-issue: 86388 +.. nonce: 7ivUtT +.. section: Library + +Removed randrange() functionality deprecated since Python 3.10. Formerly, +randrange(10.0) losslessly converted to randrange(10). Now, it raises a +TypeError. Also, the exception raised for non-integral values such as +randrange(10.5) or randrange('10') has been changed from ValueError to +TypeError. + +.. + +.. date: 2022-05-10-16-30-40 +.. gh-issue: 90385 +.. nonce: 1_wBRQ +.. section: Library + +Add :meth:`pathlib.Path.walk` as an alternative to :func:`os.walk`. + +.. + +.. date: 2022-05-10-07-57-27 +.. gh-issue: 92550 +.. nonce: Rk_UzM +.. section: Library + +Fix :meth:`pathlib.Path.rglob` for empty pattern. + +.. + +.. date: 2022-05-09-22-27-11 +.. gh-issue: 92591 +.. nonce: V7RCk2 +.. section: Library + +Allow :mod:`logging` filters to return a :class:`logging.LogRecord` instance +so that filters attached to :class:`logging.Handler`\ s can enrich records +without side effects on other handlers. + +.. + +.. date: 2022-05-09-21-31-41 +.. gh-issue: 92445 +.. nonce: tJosdm +.. section: Library + +Fix a bug in :mod:`argparse` where ``nargs="*"`` would raise an error +instead of returning an empty list when 0 arguments were supplied if choice +was also defined in ``parser.add_argument``. + +.. + +.. date: 2022-05-09-11-55-04 +.. gh-issue: 92547 +.. nonce: CzVZft +.. section: Library + +Remove undocumented :mod:`sqlite3` features deprecated in Python 3.10: + +* ``sqlite3.enable_shared_cache()`` +* ``sqlite3.OptimizedUnicode`` + +Patch by Erlend E. Aasland. + +.. + +.. date: 2022-05-09-09-28-02 +.. gh-issue: 92530 +.. nonce: M4Q1RS +.. section: Library + +Fix an issue that occurred after interrupting +:func:`threading.Condition.notify`. + +.. + +.. date: 2022-05-09-01-27-25 +.. gh-issue: 92531 +.. nonce: vV7S_O +.. section: Library + +The statistics.median_grouped() function now always return a float. +Formerly, it did not convert the input type when for sequences of length +one. + +.. + +.. date: 2022-05-08-19-21-14 +.. gh-issue: 84131 +.. nonce: rG5kI7 +.. section: Library + +The :class:`pathlib.Path` deprecated method ``link_to`` has been removed. +Use 3.10's :meth:`~pathlib.Path.hardlink_to` method instead as its semantics +are consistent with that of :meth:`~pathlib.Path.symlink_to`. + +.. + +.. date: 2022-05-08-18-51-14 +.. gh-issue: 89336 +.. nonce: TL6ip7 +.. section: Library + +Removed :mod:`configparser` module APIs: the ``SafeConfigParser`` class +alias, the ``ParsingError.filename`` property and parameter, and the +``ConfigParser.readfp`` method, all of which were deprecated since Python +3.2. + +.. + +.. date: 2022-05-06-13-00-57 +.. gh-issue: 92391 +.. nonce: s-Lase +.. section: Library + +Add :meth:`~object.__class_getitem__` to :class:`csv.DictReader` and +:class:`csv.DictWriter`, allowing them to be parameterized at runtime. Patch +by Marc Mueller. + +.. + +.. date: 2022-04-26-18-37-24 +.. gh-issue: 91968 +.. nonce: fuuH1_ +.. section: Library + +Add ``SO_RTABLE`` and ``SO_USER_COOKIE`` constants to :mod:`socket`. + +.. + +.. date: 2022-04-25-10-23-01 +.. gh-issue: 91810 +.. nonce: DOHa6B +.. section: Library + +:class:`~xml.etree.ElementTree.ElementTree` method +:meth:`~xml.etree.ElementTree.ElementTree.write` and function +:func:`~xml.etree.ElementTree.tostring` now use the text file's encoding +("UTF-8" if not available) instead of locale encoding in XML declaration +when ``encoding="unicode"`` is specified. + +.. + +.. date: 2022-04-24-22-26-45 +.. gh-issue: 81790 +.. nonce: M5Rvpm +.. section: Library + +:func:`os.path.splitdrive` now understands DOS device paths with UNC links +(beginning ``\\?\UNC\``). Contributed by Barney Gale. + +.. + +.. date: 2022-04-21-19-14-29 +.. gh-issue: 91760 +.. nonce: 54AR-m +.. section: Library + +Apply more strict rules for numerical group references and group names in +regular expressions. Only sequence of ASCII digits is now accepted as a +numerical reference. The group name in bytes patterns and replacement +strings can now only contain ASCII letters and digits and underscore. + +.. + +.. date: 2022-04-15-22-07-36 +.. gh-issue: 90622 +.. nonce: 0C6l8h +.. section: Library + +Worker processes for :class:`concurrent.futures.ProcessPoolExecutor` are no +longer spawned on demand (a feature added in 3.9) when the multiprocessing +context start method is ``"fork"`` as that can lead to deadlocks in the +child processes due to a fork happening while threads are running. + +.. + +.. date: 2022-04-15-17-38-55 +.. gh-issue: 91577 +.. nonce: Ah7cLL +.. section: Library + +Move imports in :class:`~multiprocessing.SharedMemory` methods to module +level so that they can be executed late in python finalization. + +.. + +.. date: 2022-04-15-13-16-25 +.. gh-issue: 91581 +.. nonce: 9OGsrN +.. section: Library + +Remove an unhandled error case in the C implementation of calls to +:meth:`datetime.fromtimestamp ` with no +time zone (i.e. getting a local time from an epoch timestamp). This should +have no user-facing effect other than giving a possibly more accurate error +message when called with timestamps that fall on 10000-01-01 in the local +time. Patch by Paul Ganssle. + +.. + +.. date: 2022-04-15-11-29-38 +.. gh-issue: 91539 +.. nonce: 7WgVuA +.. section: Library + +Improve performance of ``urllib.request.getproxies_environment`` when there +are many environment variables + +.. + +.. date: 2022-04-14-08-37-16 +.. gh-issue: 91524 +.. nonce: g8PiIu +.. section: Library + +Speed up the regular expression substitution (functions :func:`re.sub` and +:func:`re.subn` and corresponding :class:`re.Pattern` methods) for +replacement strings containing group references by 2--3 times. + +.. + +.. date: 2022-04-12-18-05-40 +.. gh-issue: 91447 +.. nonce: N_Fs4H +.. section: Library + +Fix findtext in the xml module to only give an empty string when the text +attribute is set to None. + +.. + +.. date: 2022-04-11-16-55-41 +.. gh-issue: 91456 +.. nonce: DK3KKl +.. section: Library + +Deprecate current default auto() behavior: In 3.13 the default will be for +for auto() to always return the largest member value incremented by 1, and +to raise if incompatible value types are used. + +.. + +.. bpo: 47231 +.. date: 2022-04-08-22-12-11 +.. nonce: lvyglt +.. section: Library + +Fixed an issue with inconsistent trailing slashes in tarfile longname +directories. + +.. + +.. bpo: 39064 +.. date: 2022-04-03-19-40-09 +.. nonce: 76PbIz +.. section: Library + +:class:`zipfile.ZipFile` now raises :exc:`zipfile.BadZipFile` instead of +``ValueError`` when reading a corrupt zip file in which the central +directory offset is negative. + +.. + +.. bpo: 41287 +.. date: 2022-04-03-11-25-02 +.. nonce: 8CTdwf +.. section: Library + +Fix handling of the ``doc`` argument in subclasses of :func:`property`. + +.. + +.. date: 2022-04-01-12-35-44 +.. gh-issue: 90005 +.. nonce: pvaLHQ +.. section: Library + +:mod:`ctypes` dependency ``libffi`` is now detected with ``pkg-config``. + +.. + +.. bpo: 32547 +.. date: 2022-04-01-09-43-54 +.. nonce: NIUiNC +.. section: Library + +The constructors for :class:`~csv.DictWriter` and :class:`~csv.DictReader` +now coerce the ``fieldnames`` argument to a :class:`list` if it is an +iterator. + +.. + +.. bpo: 35540 +.. date: 2022-03-22-18-28-55 +.. nonce: nyijX9 +.. section: Library + +Fix :func:`dataclasses.asdict` crash when :class:`collections.defaultdict` +is present in the attributes. + +.. + +.. bpo: 47063 +.. date: 2022-03-19-04-41-42 +.. nonce: nwRfUo +.. section: Library + +Add an index_pages parameter to support using non-default index page names. + +.. + +.. bpo: 47025 +.. date: 2022-03-16-14-24-14 +.. nonce: qtT3CE +.. section: Library + +Drop support for :class:`bytes` on :attr:`sys.path`. + +.. + +.. bpo: 46951 +.. date: 2022-03-08-04-46-44 +.. nonce: SWAz97 +.. section: Library + +Order the contents of zipapp archives, to make builds more reproducible. + +.. + +.. bpo: 42777 +.. date: 2022-02-21-01-37-00 +.. nonce: nWK3E6 +.. section: Library + +Implement :meth:`pathlib.Path.is_mount` for Windows paths. + +.. + +.. bpo: 46755 +.. date: 2022-02-15-12-40-48 +.. nonce: zePJfx +.. section: Library + +In :class:`QueueHandler`, clear ``stack_info`` from :class:`LogRecord` to +prevent stack trace from being written twice. + +.. + +.. bpo: 45393 +.. date: 2022-02-09-23-44-27 +.. nonce: 9v5Y8U +.. section: Library + +Fix the formatting for ``await x`` and ``not x`` in the operator precedence +table when using the :func:`help` system. + +.. + +.. bpo: 46642 +.. date: 2022-02-05-18-46-54 +.. nonce: YI6nHQ +.. section: Library + +Improve error message when trying to subclass an instance of +:data:`typing.TypeVar`, :data:`typing.ParamSpec`, +:data:`typing.TypeVarTuple`, etc. Based on patch by Gregory Beauregard. + +.. + +.. bpo: 46364 +.. date: 2022-01-14-10-49-20 +.. nonce: SzhlU9 +.. section: Library + +Restrict use of sockets instead of pipes for stdin of subprocesses created +by :mod:`asyncio` to AIX platform only. + +.. + +.. bpo: 28249 +.. date: 2022-01-09-14-23-00 +.. nonce: 4dzB80 +.. section: Library + +Set :attr:`doctest.DocTest.lineno` to ``None`` when object does not have +:attr:`__doc__`. + +.. + +.. bpo: 46197 +.. date: 2022-01-03-15-07-06 +.. nonce: Z0djv6 +.. section: Library + +Fix :mod:`ensurepip` environment isolation for subprocess running ``pip``. + +.. + +.. bpo: 45924 +.. date: 2021-12-27-15-32-15 +.. nonce: 0ZpHX2 +.. section: Library + +Fix :mod:`asyncio` incorrect traceback when future's exception is raised +multiple times. Patch by Kumar Aditya. + +.. + +.. bpo: 45046 +.. date: 2021-08-29-19-59-16 +.. nonce: eGq0NC +.. section: Library + +Add support of context managers in :mod:`unittest`: methods +:meth:`~unittest.TestCase.enterContext` and +:meth:`~unittest.TestCase.enterClassContext` of class +:class:`~unittest.TestCase`, method +:meth:`~unittest.IsolatedAsyncioTestCase.enterAsyncContext` of class +:class:`~unittest.IsolatedAsyncioTestCase` and function +:func:`unittest.enterModuleContext`. + +.. + +.. bpo: 44173 +.. date: 2021-08-27-18-07-35 +.. nonce: oW92Ev +.. section: Library + +Enable fast seeking of uncompressed unencrypted :class:`zipfile.ZipExtFile` + +.. + +.. bpo: 42627 +.. date: 2021-05-22-07-58-59 +.. nonce: EejtD0 +.. section: Library + +Fix incorrect parsing of Windows registry proxy settings + +.. + +.. bpo: 42047 +.. date: 2020-10-15-18-37-12 +.. nonce: XDdoSF +.. section: Library + +Add :func:`threading.get_native_id` support for DragonFly BSD. Patch by +David Carlier. + +.. + +.. bpo: 14243 +.. date: 2020-09-28-04-56-04 +.. nonce: YECnxv +.. section: Library + +The :class:`tempfile.NamedTemporaryFile` function has a new optional +parameter *delete_on_close* + +.. + +.. bpo: 41246 +.. date: 2020-07-08-20-32-13 +.. nonce: 2trYf3 +.. section: Library + +Give the same callback function for when the overlapped operation is done to +the functions ``recv``, ``recv_into``, ``recvfrom``, ``sendto``, ``send`` +and ``sendfile`` inside ``IocpProactor``. + +.. + +.. bpo: 39264 +.. date: 2020-01-09-01-57-12 +.. nonce: GsBL9- +.. section: Library + +Fixed :meth:`collections.UserDict.get` to not call :meth:`__missing__` when +a value is not found. This matches the behavior of :class:`dict`. Patch by +Bar Harel. + +.. + +.. bpo: 38693 +.. date: 2019-11-04-22-21-27 +.. nonce: w_OAov +.. section: Library + +:mod:`importlib` now uses f-strings internally instead of ``str.format``. + +.. + +.. bpo: 38267 +.. date: 2019-09-25-00-37-51 +.. nonce: X9Jb5V +.. section: Library + +Add *timeout* parameter to :meth:`asyncio.loop.shutdown_default_executor`. +The default value is ``None``, which means the executor will be given an +unlimited amount of time. When called from :class:`asyncio.Runner` or +:func:`asyncio.run`, the default timeout is 5 minutes. + +.. + +.. bpo: 34828 +.. date: 2018-09-28-22-18-03 +.. nonce: 5Zyi_S +.. section: Library + +:meth:`sqlite3.Connection.iterdump` now handles databases that use +``AUTOINCREMENT`` in one or more tables. + +.. + +.. bpo: 32990 +.. date: 2018-09-23-07-47-29 +.. nonce: 2FVVTU +.. section: Library + +Support reading wave files with the ``WAVE_FORMAT_EXTENSIBLE`` format in the +:mod:`wave` module. + +.. + +.. bpo: 26253 +.. date: 2017-07-31-13-35-28 +.. nonce: 8v_sCs +.. section: Library + +Allow adjustable compression level for tarfile streams in +:func:`tarfile.open`. + +.. + +.. date: 2022-10-16-17-34-45 +.. gh-issue: 85525 +.. nonce: DvkD0v +.. section: Documentation + +Remove extra row + +.. + +.. date: 2022-10-11-09-40-50 +.. gh-issue: 86404 +.. nonce: dEAb8W +.. section: Documentation + +Deprecated tools ``make suspicious`` and ``rstlint.py`` are now removed. +They have been replaced by `spinx-lint +`_. + +.. + +.. date: 2022-10-02-10-58-52 +.. gh-issue: 97741 +.. nonce: 39l023 +.. section: Documentation + +Fix ``!`` in c domain ref target syntax via a ``conf.py`` patch, so it works +as intended to disable ref target resolution. + +.. + +.. date: 2022-09-01-17-03-04 +.. gh-issue: 96432 +.. nonce: 1EJ1-4 +.. section: Documentation + +Fraction literals now support whitespace around the forward slash, +``Fraction('2 / 3')``. + +.. + +.. date: 2022-08-19-17-07-45 +.. gh-issue: 96098 +.. nonce: nDp43u +.. section: Documentation + +Improve discoverability of the higher level concurrent.futures module by +providing clearer links from the lower level threading and multiprocessing +modules. + +.. + +.. date: 2022-08-13-20-34-51 +.. gh-issue: 95957 +.. nonce: W9ZZAx +.. section: Documentation + +What's New 3.11 now has instructions for how to provide compiler and linker +flags for Tcl/Tk and OpenSSL on RHEL 7 and CentOS 7. + +.. + +.. date: 2022-08-12-01-12-52 +.. gh-issue: 95588 +.. nonce: PA0FI7 +.. section: Documentation + +Clarified the conflicting advice given in the :mod:`ast` documentation about +:func:`ast.literal_eval` being "safe" for use on untrusted input while at +the same time warning that it can crash the process. The latter statement is +true and is deemed unfixable without a large amount of work unsuitable for a +bugfix. So we keep the warning and no longer claim that ``literal_eval`` is +safe. + +.. + +.. date: 2022-08-03-13-35-08 +.. gh-issue: 91207 +.. nonce: eJ4pPf +.. section: Documentation + +Fix stylesheet not working in Windows CHM htmlhelp docs and add warning that +they are deprecated. Contributed by C.A.M. Gerlach. + +.. + +.. date: 2022-07-30-00-23-11 +.. gh-issue: 95454 +.. nonce: we7AFm +.. section: Documentation + +Replaced incorrectly written true/false values in documentiation. Patch by +Robert O'Shea + +.. + +.. date: 2022-07-29-23-02-19 +.. gh-issue: 95451 +.. nonce: -tgB93 +.. section: Documentation + +Update library documentation with :ref:`availability information +` on WebAssembly platforms ``wasm32-emscripten`` and +``wasm32-wasi``. + +.. + +.. date: 2022-07-29-09-04-02 +.. gh-issue: 95415 +.. nonce: LKTyw6 +.. section: Documentation + +Use consistent syntax for platform availability. The directive now supports +a content body and emits a warning when it encounters an unknown platform. + +.. + +.. date: 2022-07-07-08-42-05 +.. gh-issue: 94321 +.. nonce: pmCIPb +.. section: Documentation + +Document the :pep:`246` style protocol type +:class:`sqlite3.PrepareProtocol`. + +.. + +.. date: 2022-06-19-18-18-22 +.. gh-issue: 86128 +.. nonce: 39DDTD +.. section: Documentation + +Document a limitation in ThreadPoolExecutor where its exit handler is +executed before any handlers in atexit. + +.. + +.. date: 2022-06-16-10-10-59 +.. gh-issue: 61162 +.. nonce: 1ypkG8 +.. section: Documentation + +Clarify :mod:`sqlite3` behavior when +:ref:`sqlite3-connection-context-manager`. + +.. + +.. date: 2022-06-15-12-12-49 +.. gh-issue: 87260 +.. nonce: epyI7D +.. section: Documentation + +Align :mod:`sqlite3` argument specs with the actual implementation. + +.. + +.. date: 2022-05-29-21-22-54 +.. gh-issue: 86986 +.. nonce: lFXw8j +.. section: Documentation + +The minimum Sphinx version required to build the documentation is now 3.2. + +.. + +.. date: 2022-05-26-14-51-25 +.. gh-issue: 88831 +.. nonce: 5Cccr5 +.. section: Documentation + +Augmented documentation of asyncio.create_task(). Clarified the need to keep +strong references to tasks and added a code snippet detailing how to to +this. + +.. + +.. date: 2022-05-26-11-33-23 +.. gh-issue: 86438 +.. nonce: kEGGmK +.. section: Documentation + +Clarify that :option:`-W` and :envvar:`PYTHONWARNINGS` are matched literally +and case-insensitively, rather than as regular expressions, in +:mod:`warnings`. + +.. + +.. date: 2022-05-20-18-42-10 +.. gh-issue: 93031 +.. nonce: c2RdJe +.. section: Documentation + +Update tutorial introduction output to use 3.10+ SyntaxError invalid range. + +.. + +.. date: 2022-05-18-23-58-26 +.. gh-issue: 92240 +.. nonce: bHvYiz +.. section: Documentation + +Added release dates for "What's New in Python 3.X" for 3.0, 3.1, 3.2, 3.8 +and 3.10 + +.. + +.. bpo: 47161 +.. date: 2022-03-30-17-56-01 +.. nonce: gesHfS +.. section: Documentation + +Document that :class:`pathlib.PurePath` does not collapse initial double +slashes because they denote UNC paths. + +.. + +.. bpo: 40838 +.. date: 2022-01-13-16-03-15 +.. nonce: k3NVCf +.. section: Documentation + +Document that :func:`inspect.getdoc`, :func:`inspect.getmodule`, and +:func:`inspect.getsourcefile` might return ``None``. + +.. + +.. bpo: 43689 +.. date: 2021-04-01-08-09-34 +.. nonce: mqCfLe +.. section: Documentation + +The ``Differ`` documentation now also mentions other whitespace characters, +which make it harder to understand the diff output. + +.. + +.. bpo: 38056 +.. date: 2019-09-12-08-28-17 +.. nonce: 6ktYkc +.. section: Documentation + +Overhaul the :ref:`error-handlers` documentation in :mod:`codecs`. + +.. + +.. bpo: 13553 +.. date: 2017-12-10-19-13-39 +.. nonce: gQbZs4 +.. section: Documentation + +Document tkinter.Tk args. + +.. + +.. date: 2022-10-20-17-49-50 +.. gh-issue: 95027 +.. nonce: viRpJB +.. section: Tests + +On Windows, when the Python test suite is run with the ``-jN`` option, the +ANSI code page is now used as the encoding for the stdout temporary file, +rather than using UTF-8 which can lead to decoding errors. Patch by Victor +Stinner. + +.. + +.. date: 2022-09-08-18-31-26 +.. gh-issue: 96624 +.. nonce: 5cANM1 +.. section: Tests + +Fixed the failure of repeated runs of ``test.test_unittest`` caused by side +effects in ``test_dotted_but_module_not_loaded``. + +.. + +.. date: 2022-08-22-14-59-42 +.. gh-issue: 95243 +.. nonce: DeD66V +.. section: Tests + +Mitigate the inherent race condition from using find_unused_port() in +testSockName() by trying to find an unused port a few times before failing. +Patch by Ross Burton. + +.. + +.. date: 2022-08-05-09-57-43 +.. gh-issue: 95573 +.. nonce: edMdQB +.. section: Tests + +:source:`Lib/test/test_asyncio/test_ssl.py` exposed a bug in the macOS +kernel where intense concurrent load on non-blocking sockets occasionally +causes :const:`errno.ENOBUFS` ("No buffer space available") to be emitted. +FB11063974 filed with Apple, in the mean time as a workaround buffer size +used in tests on macOS is decreased to avoid intermittent failures. Patch +by Fantix King. + +.. + +.. date: 2022-07-26-15-22-19 +.. gh-issue: 95280 +.. nonce: h8HvbP +.. section: Tests + +Fix problem with ``test_ssl`` ``test_get_ciphers`` on systems that require +perfect forward secrecy (PFS) ciphers. + +.. + +.. date: 2022-07-24-20-19-05 +.. gh-issue: 95212 +.. nonce: fHiU4e +.. section: Tests + +Make multiprocessing test case ``test_shared_memory_recreate`` +parallel-safe. + +.. + +.. date: 2022-07-24-17-24-42 +.. gh-issue: 95218 +.. nonce: zfBLtu +.. section: Tests + +Move tests for importlib.resources into test_importlib.resources. + +.. + +.. date: 2022-07-24-16-28-31 +.. gh-issue: 93963 +.. nonce: UB9azu +.. section: Tests + +Updated tests to use preferred location for ``importlib.resources`` ABCs. + +.. + +.. date: 2022-07-08-12-22-00 +.. gh-issue: 94675 +.. nonce: IiTs5f +.. section: Tests + +Add a regression test for :mod:`re` exponentional slowdown when using +rjsmin. + +.. + +.. date: 2022-07-05-17-53-13 +.. gh-issue: 91330 +.. nonce: Qys5IL +.. section: Tests + +Added more tests for :mod:`dataclasses` to cover behavior with data +descriptor-based fields. + +.. + +.. date: 2022-06-27-21-27-20 +.. gh-issue: 94208 +.. nonce: VR6HX- +.. section: Tests + +``test_ssl`` is now checking for supported TLS version and protocols in more +tests. + +.. + +.. date: 2022-06-27-08-53-40 +.. gh-issue: 94315 +.. nonce: MoZT9t +.. section: Tests + +Tests now check for DAC override capability instead of relying on +:func:`os.geteuid`. + +.. + +.. date: 2022-06-21-17-37-46 +.. gh-issue: 54781 +.. nonce: BjVAVg +.. section: Tests + +Rename test_tk to test_tkinter, and rename test_ttk_guionly to test_ttk. +Patch by Victor Stinner. + +.. + +.. date: 2022-06-20-23-04-52 +.. gh-issue: 93839 +.. nonce: OE3Ybk +.. section: Tests + +Move ``Lib/ctypes/test/`` to ``Lib/test/test_ctypes/``. Patch by Victor +Stinner. + +.. + +.. date: 2022-06-17-15-20-09 +.. gh-issue: 93951 +.. nonce: CW1Vv4 +.. section: Tests + +In test_bdb.StateTestCase.test_skip, avoid including auxiliary importers. + +.. + +.. date: 2022-06-17-13-55-11 +.. gh-issue: 93957 +.. nonce: X4ovYV +.. section: Tests + +Provide nicer error reporting from subprocesses in +test_venv.EnsurePipTest.test_with_pip. + +.. + +.. date: 2022-06-17-13-27-21 +.. gh-issue: 93884 +.. nonce: 5pvPvl +.. section: Tests + +Add test cases for :c:func:`PyNumber_ToBase` that take a large number or a +non-int object as parameter. + +.. + +.. date: 2022-06-16-21-38-18 +.. gh-issue: 93852 +.. nonce: U_Hl6s +.. section: Tests + +test_asyncio, test_logging, test_socket and test_socketserver now create +AF_UNIX domains in the current directory to no longer fail with +``OSError("AF_UNIX path too long")`` if the temporary directory (the +:envvar:`TMPDIR` environment variable) is too long. Patch by Victor Stinner. + +.. + +.. date: 2022-06-16-17-50-58 +.. gh-issue: 93353 +.. nonce: JdpATx +.. section: Tests + +regrtest now checks if a test leaks temporary files or directories if run +with -jN option. Patch by Victor Stinner. + +.. + +.. date: 2022-06-10-21-18-14 +.. gh-issue: 84461 +.. nonce: 9TAb26 +.. section: Tests + +``run_tests.py`` now handles cross compiling env vars correctly and pass +``HOSTRUNNER`` to regression tests. + +.. + +.. date: 2022-06-08-22-32-56 +.. gh-issue: 93616 +.. nonce: e5Kkx2 +.. section: Tests + +``test_modulefinder`` now creates a temporary directory in +``ModuleFinderTest.setUp()`` instead of module scope. + +.. + +.. date: 2022-06-08-14-17-59 +.. gh-issue: 93575 +.. nonce: Xb2LNB +.. section: Tests + +Fix issue with test_unicode test_raiseMemError. The test case now use +``test.support.calcobjsize`` to calculate size of PyUnicode structs. +:func:`sys.getsizeof` may return different size when string has UTF-8 +memory. + +.. + +.. date: 2022-06-05-10-16-45 +.. gh-issue: 90473 +.. nonce: QMu7A8 +.. section: Tests + +WASI does not have a ``chmod(2)`` syscall. :func:`os.chmod` is now a dummy +function on WASI. Skip all tests that depend on working :func:`os.chmod`. + +.. + +.. date: 2022-06-04-12-05-31 +.. gh-issue: 90473 +.. nonce: RSpjF7 +.. section: Tests + +Skip tests on WASI that require symlinks with absolute paths. + +.. + +.. date: 2022-06-03-16-26-04 +.. gh-issue: 57539 +.. nonce: HxWgYO +.. section: Tests + +Increase calendar test coverage for +:meth:`calendar.LocaleTextCalendar.formatweekday`. + +.. + +.. date: 2022-06-03-14-18-37 +.. gh-issue: 90473 +.. nonce: 7iXVRK +.. section: Tests + +Skip symlink tests on WASI. wasmtime uses ``openat2(2)`` with +``RESOLVE_BENEATH`` flag, which prevents symlinks with absolute paths. + +.. + +.. date: 2022-06-03-12-22-44 +.. gh-issue: 89858 +.. nonce: ftBvjE +.. section: Tests + +Fix ``test_embed`` for out-of-tree builds. Patch by Kumar Aditya. + +.. + +.. date: 2022-05-25-23-07-15 +.. gh-issue: 92886 +.. nonce: Aki63_ +.. section: Tests + +Fixing tests that fail when running with optimizations (``-O``) in +``test_imaplib.py``. + +.. + +.. date: 2022-05-25-23-00-35 +.. gh-issue: 92886 +.. nonce: Y-vrWj +.. section: Tests + +Fixing tests that fail when running with optimizations (``-O``) in +``test_zipimport.py`` + +.. + +.. date: 2022-05-25-22-53-30 +.. gh-issue: 92886 +.. nonce: mIfdtz +.. section: Tests + +Fixing tests that fail when running with optimizations (``-O``) in +``test_py_compile.py`` + +.. + +.. date: 2022-05-25-22-43-11 +.. gh-issue: 92886 +.. nonce: 9HQb9e +.. section: Tests + +Fixing tests that fail when running with optimizations (``-O``) in +``test_sys_settrace.py``. + +.. + +.. date: 2022-05-25-22-34-10 +.. gh-issue: 92886 +.. nonce: 1Lkt8S +.. section: Tests + +Fixing tests that fail when running with optimizations (``-O``) in +``_test_multiprocessing.py`` + +.. + +.. date: 2022-05-12-05-51-06 +.. gh-issue: 92670 +.. nonce: 7L43Z_ +.. section: Tests + +Skip ``test_shutil.TestCopy.test_copyfile_nonexistent_dir`` test on AIX as +the test uses a trailing slash to force the OS consider the path as a +directory, but on AIX the trailing slash has no effect and is considered as +a file. + +.. + +.. date: 2022-05-08-15-40-41 +.. gh-issue: 92514 +.. nonce: Xbf5JY +.. section: Tests + +Remove unused ``test.support.BasicTestRunner``. Patch by Jelle Zijlstra. + +.. + +.. bpo: 47016 +.. date: 2022-03-14-23-28-17 +.. nonce: K-t2QX +.. section: Tests + +Create a GitHub Actions workflow for verifying bundled pip and setuptools. +Patch by Illia Volochii and Adam Turner. + +.. + +.. date: 2022-09-20-12-43-44 +.. gh-issue: 96761 +.. nonce: IF29kR +.. section: Build + +Fix the build process of clang compiler for :program:`_bootstrap_python` if +LTO optimization is applied. Patch by Matthias Görgens and Dong-hee Na. + +.. + +.. date: 2022-09-17-11-19-24 +.. gh-issue: 96883 +.. nonce: p_gr62 +.. section: Build + +``wasm32-emscripten`` builds for browsers now include +:mod:`concurrent.futures` for :mod:`asyncio` and :mod:`unittest.mock`. + +.. + +.. date: 2022-09-12-18-34-51 +.. gh-issue: 85936 +.. nonce: tX4VCU +.. section: Build + +CPython now uses the ThinLTO option as the default policy if the Clang +compiler accepts the flag. Patch by Dong-hee Na. + +.. + +.. date: 2022-09-11-14-23-49 +.. gh-issue: 96729 +.. nonce: W4uBWL +.. section: Build + +Ensure that Windows releases built with ``Tools\msi\buildrelease.bat`` are +upgradable to and from official Python releases. + +.. + +.. date: 2022-08-26-11-50-03 +.. gh-issue: 96269 +.. nonce: x_J5h0 +.. section: Build + +Shared module targets now depend on new ``MODULE_DEPS`` variable, which +includes ``EXPORTSYMS``. This fixes a build order issue on unsupported AIX +platform. + +.. + +.. date: 2022-08-26-11-09-11 +.. gh-issue: 84461 +.. nonce: Nsdn_R +.. section: Build + +``wasm32-emscripten`` platform no longer builds :mod:`resource` module, +:func:`~os.getresuid`, :func:`~os.getresgid`, and their setters. The APIs +are stubs and not functional. + +.. + +.. date: 2022-08-15-10-56-07 +.. gh-issue: 95973 +.. nonce: Bsswsc +.. section: Build + +Add a new ``--with-dsymutil`` configure option to to link debug information +in macOS. Patch by Pablo Galindo. + +.. + +.. date: 2022-08-12-13-06-03 +.. gh-issue: 90536 +.. nonce: qMpF6p +.. section: Build + +Use the BOLT post-link optimizer to improve performance, particularly on +medium-to-large applications. + +.. + +.. date: 2022-08-04-15-29-35 +.. gh-issue: 93744 +.. nonce: svRuqm +.. section: Build + +Remove the ``configure --with-cxx-main`` build option: it didn't work for +many years. Remove the ``MAINCC`` variable from ``configure`` and +``Makefile``. Patch by Victor Stinner. + +.. + +.. date: 2022-07-26-18-13-34 +.. gh-issue: 94801 +.. nonce: 9fREfy +.. section: Build + +Fix a regression in ``configure`` script that caused some header checks to +ignore custom ``CPPFLAGS``. The regression was introduced in :gh:`94802`. + +.. + +.. date: 2022-07-25-09-48-43 +.. gh-issue: 95145 +.. nonce: ZNS3dj +.. section: Build + +wasm32-wasi builds no longer depend on WASIX's pthread stubs. Python now has +its own stubbed pthread API. + +.. + +.. date: 2022-07-25-08-59-35 +.. gh-issue: 95174 +.. nonce: g8woUW +.. section: Build + +Python now detects missing ``dup`` function in WASI and works around some +missing :mod:`errno`, :mod:`select`, and :mod:`socket` constants. + +.. + +.. date: 2022-07-23-21-39-09 +.. gh-issue: 95174 +.. nonce: 7cYMZR +.. section: Build + +Python now skips missing :mod:`socket` functions and methods on WASI. WASI +can only create sockets from existing fd / accept and has no netdb. + +.. + +.. date: 2022-07-21-09-17-01 +.. gh-issue: 95085 +.. nonce: E9x2S_ +.. section: Build + +Platforms ``wasm32-unknown-emscripten`` and ``wasm32-unknown-wasi`` have +been promoted to :pep:`11` tier 3 platform support. + +.. + +.. date: 2022-07-14-11-13-26 +.. gh-issue: 94847 +.. nonce: s3Kr5p +.. section: Build + +Fixed ``_decimal`` module build issue on GCC when compiling with LTO and +pydebug. Debug builds no longer force inlining of functions. + +.. + +.. date: 2022-07-14-02-45-44 +.. gh-issue: 94841 +.. nonce: lLRTdf +.. section: Build + +Fix the possible performance regression of :c:func:`PyObject_Free` compiled +with MSVC version 1932. + +.. + +.. date: 2022-07-13-10-13-10 +.. gh-issue: 94801 +.. nonce: 3xUB24 +.. section: Build + +``configure`` now uses custom flags like ``ZLIB_CFLAGS`` and ``ZLIB_LIBS`` +when searching for headers and libraries. + +.. + +.. date: 2022-07-12-13-39-18 +.. gh-issue: 94773 +.. nonce: koHKm5 +.. section: Build + +``deepfreeze.py`` now supports code object with frozensets that contain +incompatible, unsortable types. + +.. + +.. date: 2022-07-08-10-28-23 +.. gh-issue: 94682 +.. nonce: ZtGt_0 +.. section: Build + +Build and test with OpenSSL 1.1.1q + +.. + +.. date: 2022-06-30-17-18-23 +.. gh-issue: 90005 +.. nonce: EIOOla +.. section: Build + +Dependencies of :mod:`readline` and :mod:`curses` module are now detected in +``configure`` script with ``pkg-config``. Only ``ncurses`` / ``ncursesw`` +are detected automatically. The old ``curses`` library is not configured +automatically. Workaround for missing ``termcap`` or ``tinfo`` library has +been removed. + +.. + +.. date: 2022-06-30-17-00-54 +.. gh-issue: 90005 +.. nonce: iiq5qD +.. section: Build + +Fix building ``_ctypes`` extension without ``pkg-config``. + +.. + +.. date: 2022-06-30-09-57-39 +.. gh-issue: 90005 +.. nonce: 9-pQyR +.. section: Build + +``_dbm`` module dependencies are now detected by configure. + +.. + +.. date: 2022-06-29-08-58-31 +.. gh-issue: 94404 +.. nonce: 3MadM6 +.. section: Build + +``makesetup`` now works around an issue with sed on macOS and uses correct +CFLAGS for object files that end up in a shared extension. Module CFLAGS are +used before PY_STDMODULE_CFLAGS to avoid clashes with system headers. + +.. + +.. date: 2022-06-28-09-42-10 +.. gh-issue: 93939 +.. nonce: _VWxKW +.. section: Build + +C extension modules are now built by ``configure`` and ``make`` instead of +``distutils`` and ``setup.py``. + +.. + +.. date: 2022-06-27-11-57-15 +.. gh-issue: 93939 +.. nonce: rv7s8W +.. section: Build + +The ``2to3``, ``idle``, and ``pydoc`` scripts are now generated and +installed by ``Makefile`` instead of ``setup.py``. + +.. + +.. date: 2022-06-25-23-25-47 +.. gh-issue: 94280 +.. nonce: YhEyW_ +.. section: Build + +Updated pegen regeneration script on Windows to find and use Python 3.9 or +higher. Prior to this, pegen regeneration already required 3.9 or higher, +but the script may have used lower versions of Python. + +.. + +.. date: 2022-06-08-14-28-03 +.. gh-issue: 93584 +.. nonce: 0xfHOK +.. section: Build + +Address race condition in ``Makefile`` when installing a PGO build. All +``test`` and ``install`` targets now depend on ``all`` target. + +.. + +.. date: 2022-06-04-12-53-53 +.. gh-issue: 93491 +.. nonce: ehM211 +.. section: Build + +``configure`` now detects and reports :pep:`11` support tiers. + +.. + +.. date: 2022-05-31-18-04-58 +.. gh-issue: 69093 +.. nonce: 6lSa0C +.. section: Build + +Fix ``Modules/Setup.stdlib.in`` rule for ``_sqlite3`` extension. + +.. + +.. date: 2022-05-25-13-56-00 +.. gh-issue: 93207 +.. nonce: B9Rubf +.. section: Build + +``va_start()`` with two parameters, like ``va_start(args, format),`` is now +required to build Python. ``va_start()`` is no longer called with a single +parameter. Patch by Kumar Aditya. + +.. + +.. date: 2022-05-25-05-46-00 +.. gh-issue: 93202 +.. nonce: T37jtj +.. section: Build + +Python now always use the ``%zu`` and ``%zd`` printf formats to format a +``size_t`` or ``Py_ssize_t`` number. Building Python 3.12 requires a C11 +compiler, so these printf formats are now always supported. Patch by Victor +Stinner. + +.. + +.. date: 2022-05-12-10-19-15 +.. gh-issue: 90473 +.. nonce: -syvqK +.. section: Build + +Disable pymalloc and increase stack size on ``wasm32-wasi``. + +.. + +.. bpo: 34449 +.. date: 2018-08-21-11-10-18 +.. nonce: Z3qm3c +.. section: Build + +Drop invalid compiler switch ``-fPIC`` for HP aCC on HP-UX. Patch by Michael +Osipov. + +.. + +.. date: 2022-10-19-20-00-28 +.. gh-issue: 98360 +.. nonce: O2m6YG +.. section: Windows + +Fixes :mod:`multiprocessing` spawning child processes on Windows from a +virtual environment to ensure that child processes that also use +:mod:`multiprocessing` to spawn more children will recognize that they are +in a virtual environment. + +.. + +.. date: 2022-10-19-19-35-37 +.. gh-issue: 98414 +.. nonce: FbHZuS +.. section: Windows + +Fix :file:`py.exe` launcher handling of ``-V:/`` option when +default preferences have been set in environment variables or configuration +files. + +.. + +.. date: 2022-10-02-11-59-23 +.. gh-issue: 97728 +.. nonce: dIdlPE +.. section: Windows + +Fix possible crashes caused by the use of uninitialized variables when pass +invalid arguments in :func:`os.system` on Windows and in Windows-specific +modules (like ``winreg``). + +.. + +.. date: 2022-09-29-23-08-49 +.. gh-issue: 90989 +.. nonce: no89Q2 +.. section: Windows + +Made :ref:`launcher` install per-user by default (unless an all users +install already exists), and clarify some text in the installer. + +.. + +.. date: 2022-09-29-22-27-04 +.. gh-issue: 97649 +.. nonce: bI7OQU +.. section: Windows + +The ``Tools`` directory is no longer installed on Windows + +.. + +.. date: 2022-09-23-15-40-04 +.. gh-issue: 96965 +.. nonce: CsnEGs +.. section: Windows + +Update libffi to 3.4.3 + +.. + +.. date: 2022-09-07-00-11-33 +.. gh-issue: 96577 +.. nonce: kV4K_1 +.. section: Windows + +Fixes a potential buffer overrun in :mod:`msilib`. + +.. + +.. date: 2022-09-05-18-32-47 +.. gh-issue: 96559 +.. nonce: 561sUd +.. section: Windows + +Fixes the Windows launcher not using the compatible interpretation of +default tags found in configuration files when no tag was passed to the +command. + +.. + +.. date: 2022-08-30-12-01-51 +.. gh-issue: 94781 +.. nonce: OxO-Gr +.. section: Windows + +Fix :file:`pcbuild.proj` to clean previous instances of output files in +``Python\deepfreeze`` and ``Python\frozen_modules`` directories on Windows. +Patch by Charlie Zhao. + +.. + +.. date: 2022-08-26-00-11-18 +.. gh-issue: 89545 +.. nonce: zmJMY_ +.. section: Windows + +Updates :mod:`platform` code getting the Windows version to use native +Windows Management Instrumentation (WMI) queries to determine OS version, +type, and architecture. + +.. + +.. date: 2022-08-10-22-46-48 +.. gh-issue: 95733 +.. nonce: 2_urOp +.. section: Windows + +Make certain requirements of the Windows Store package optional to allow +installing on earlier updates of Windows. + +.. + +.. date: 2022-08-04-18-47-54 +.. gh-issue: 95656 +.. nonce: VJ1d13 +.. section: Windows + +Enable the :meth:`~sqlite3.Connection.enable_load_extension` :mod:`sqlite3` +API. + +.. + +.. date: 2022-08-04-01-12-27 +.. gh-issue: 95587 +.. nonce: Fvdv5q +.. section: Windows + +Fixes some issues where the Windows installer would incorrectly detect +certain features of an existing install when upgrading. + +.. + +.. date: 2022-08-03-00-49-46 +.. gh-issue: 94399 +.. nonce: KvxHc0 +.. section: Windows + +Restores the behaviour of :ref:`launcher` for ``/usr/bin/env`` shebang +lines, which will now search :envvar:`PATH` for an executable matching the +given command. If none is found, the usual search process is used. + +.. + +.. date: 2022-07-30-14-18-33 +.. gh-issue: 95445 +.. nonce: mjrTaq +.. section: Windows + +Fixes the unsuccessful removal of the HTML document directory when +uninstalling with Windows msi. + +.. + +.. date: 2022-07-28-20-21-38 +.. gh-issue: 95359 +.. nonce: ywMrgu +.. section: Windows + +Fix :ref:`launcher` handling of :file:`py.ini` commands (it was incorrectly +expecting a ``py_`` prefix on keys) and crashes when reading per-user +configuration file. + +.. + +.. date: 2022-07-26-20-33-12 +.. gh-issue: 95285 +.. nonce: w6fa22 +.. section: Windows + +Fix :ref:`launcher` handling of command lines where it is only passed a +short executable name. + +.. + +.. date: 2022-07-16-16-18-32 +.. gh-issue: 90844 +.. nonce: vwITT3 +.. section: Windows + +Allow virtual environments to correctly launch when they have spaces in the +path. + +.. + +.. date: 2022-07-12-20-45-43 +.. gh-issue: 94772 +.. nonce: uNMmdG +.. section: Windows + +Fix incorrect handling of shebang lines in py.exe launcher + +.. + +.. date: 2022-06-20-22-32-14 +.. gh-issue: 94018 +.. nonce: bycC3A +.. section: Windows + +:mod:`zipfile` will now remove trailing spaces from path components when +extracting files on Windows. + +.. + +.. date: 2022-06-15-01-03-52 +.. gh-issue: 93824 +.. nonce: mR4mxu +.. section: Windows + +Drag and drop of files onto Python files in Windows Explorer has been +enabled for Windows ARM64. + +.. + +.. date: 2022-05-28-19-36-13 +.. gh-issue: 43414 +.. nonce: NGMJ3g +.. section: Windows + +:func:`os.get_terminal_size` now attempts to read the size from any provided +handle, rather than only supporting file descriptors 0, 1 and 2. + +.. + +.. date: 2022-05-19-21-44-25 +.. gh-issue: 92817 +.. nonce: Jrf-Kv +.. section: Windows + +Ensures that :file:`py.exe` will prefer an active virtual environment over +default tags specified with environment variables or through a +:file:`py.ini` file. + +.. + +.. date: 2022-05-19-14-01-30 +.. gh-issue: 92984 +.. nonce: Dsxnlr +.. section: Windows + +Explicitly disable incremental linking for non-Debug builds + +.. + +.. date: 2022-05-16-11-45-06 +.. gh-issue: 92841 +.. nonce: NQx107 +.. section: Windows + +:mod:`asyncio` no longer throws ``RuntimeError: Event loop is closed`` on +interpreter exit after asynchronous socket activity. Patch by Oleg Iarygin. + +.. + +.. bpo: 46907 +.. date: 2022-05-05-06-27-59 +.. nonce: IW-uvT +.. section: Windows + +Update Windows installer to use SQLite 3.38.4. + +.. + +.. date: 2022-04-12-18-35-20 +.. gh-issue: 91061 +.. nonce: x40hSK +.. section: Windows + +Accept os.PathLike for the argument to winsound.PlaySound + +.. + +.. bpo: 42658 +.. date: 2022-03-20-15-47-35 +.. nonce: 16eXtb +.. section: Windows + +Support native Windows case-insensitive path comparisons by using +``LCMapStringEx`` instead of :func:`str.lower` in :func:`ntpath.normcase`. +Add ``LCMapStringEx`` to the :mod:`_winapi` module. + +.. + +.. bpo: 38704 +.. date: 2020-01-10-23-33-03 +.. nonce: 2Idtdn +.. section: Windows + +Prevent installation on unsupported Windows versions. + +.. + +.. date: 2022-10-05-15-26-58 +.. gh-issue: 97897 +.. nonce: Rf-C6u +.. section: macOS + +The macOS 13 SDK includes support for the ``mkfifoat`` and ``mknodat`` +system calls. Using the ``dir_fd`` option with either :func:`os.mkfifo` or +:func:`os.mknod` could result in a segfault if cpython is built with the +macOS 13 SDK but run on an earlier version of macOS. Prevent this by adding +runtime support for detection of these system calls ("weaklinking") as is +done for other newer syscalls on macOS. + +.. + +.. date: 2022-10-15-21-20-40 +.. gh-issue: 97527 +.. nonce: otAHJM +.. section: IDLE + +Fix a bug in the previous bugfix that caused IDLE to not start when run with +3.10.8, 3.12.0a1, and at least Microsoft Python 3.10.2288.0 installed +without the Lib/test package. 3.11.0 was never affected. + +.. + +.. date: 2022-08-04-20-07-51 +.. gh-issue: 65802 +.. nonce: xnThWe +.. section: IDLE + +Document handling of extensions in Save As dialogs. + +.. + +.. date: 2022-08-01-23-31-48 +.. gh-issue: 95191 +.. nonce: U7vryB +.. section: IDLE + +Include prompts when saving Shell (interactive input and output). + +.. + +.. date: 2022-07-31-22-15-14 +.. gh-issue: 95511 +.. nonce: WX6PmB +.. section: IDLE + +Fix the Shell context menu copy-with-prompts bug of copying an extra line +when one selects whole lines. + +.. + +.. date: 2022-07-30-15-10-39 +.. gh-issue: 95471 +.. nonce: z3scVG +.. section: IDLE + +In the Edit menu, move ``Select All`` and add a new separator. + +.. + +.. date: 2022-07-29-11-08-52 +.. gh-issue: 95411 +.. nonce: dazlqH +.. section: IDLE + +Enable using IDLE's module browser with .pyw files. + +.. + +.. date: 2022-07-28-18-56-57 +.. gh-issue: 89610 +.. nonce: hcosiM +.. section: IDLE + +Add .pyi as a recognized extension for IDLE on macOS. This allows opening +stub files by double clicking on them in the Finder. + +.. + +.. date: 2022-10-07-22-06-11 +.. gh-issue: 68686 +.. nonce: 6KNIQ4 +.. section: Tools/Demos + +Remove ptags and eptags scripts. + +.. + +.. date: 2022-09-30-18-35-11 +.. gh-issue: 97681 +.. nonce: -KO1Ba +.. section: Tools/Demos + +Remove the ``Tools/demo/`` directory which contained old demo scripts. A +copy can be found in the `old-demos project +`_. Patch by Victor Stinner. + +.. + +.. date: 2022-09-30-14-30-12 +.. gh-issue: 97669 +.. nonce: gvbgcg +.. section: Tools/Demos + +Remove outdated example scripts of the ``Tools/scripts/`` directory. A copy +can be found in the `old-demos project +`_. Patch by Victor Stinner. + +.. + +.. date: 2022-08-29-17-25-13 +.. gh-issue: 95853 +.. nonce: Ce17cT +.. section: Tools/Demos + +The ``wasm_build.py`` script now pre-builds Emscripten ports, checks for +broken EMSDK versions, and warns about pkg-config env vars. + +.. + +.. date: 2022-08-10-17-08-43 +.. gh-issue: 95853 +.. nonce: HCjC2m +.. section: Tools/Demos + +The new tool ``Tools/wasm/wasm_builder.py`` automates configure, compile, +and test steps for building CPython on WebAssembly platforms. + +.. + +.. date: 2022-08-05-23-25-59 +.. gh-issue: 95731 +.. nonce: N2KohU +.. section: Tools/Demos + +Fix handling of module docstrings in :file:`Tools/i18n/pygettext.py`. + +.. + +.. date: 2022-07-04-10-02-02 +.. gh-issue: 93939 +.. nonce: U6sW6H +.. section: Tools/Demos + +Add script ``Tools/scripts/check_modules.py`` to check and validate builtin +and shared extension modules. The script also handles ``Modules/Setup`` and +will eventually replace ``setup.py``. + +.. + +.. date: 2022-07-04-01-37-42 +.. gh-issue: 94538 +.. nonce: 1rgy1Y +.. section: Tools/Demos + +Fix Argument Clinic output to custom file destinations. Patch by Erlend E. +Aasland. + +.. + +.. date: 2022-06-29-22-47-11 +.. gh-issue: 94430 +.. nonce: hdov8L +.. section: Tools/Demos + +Allow parameters named ``module`` and ``self`` with custom C names in +Argument Clinic. Patch by Erlend E. Aasland + +.. + +.. date: 2022-06-19-14-56-33 +.. gh-issue: 86087 +.. nonce: R8MkRy +.. section: Tools/Demos + +The ``Tools/scripts/parseentities.py`` script used to parse HTML4 entities +has been removed. + +.. + +.. date: 2022-10-18-16-16-27 +.. gh-issue: 98393 +.. nonce: 55u4BF +.. section: C API + +The :c:func:`PyUnicode_FSDecoder` function no longer accepts bytes-like +paths, like :class:`bytearray` and :class:`memoryview` types: only the exact +:class:`bytes` type is accepted for bytes strings. Patch by Victor Stinner. + +.. + +.. date: 2022-10-05-10-43-32 +.. gh-issue: 91051 +.. nonce: ODDRsQ +.. section: C API + +Add :c:func:`PyType_Watch` and related APIs to allow callbacks on +:c:func:`PyType_Modified`. + +.. + +.. date: 2022-10-03-20-33-24 +.. gh-issue: 95756 +.. nonce: SSmXlG +.. section: C API + +Lazily create and cache ``co_`` attributes for better performance for code +getters. + +.. + +.. date: 2022-09-20-01-04-57 +.. gh-issue: 96512 +.. nonce: msZTjF +.. section: C API + +Configuration for the :ref:`integer string conversion length limitation +` now lives in the PyConfig C API struct. + +.. + +.. date: 2022-08-16-16-54-42 +.. gh-issue: 95589 +.. nonce: 6xE1ar +.. section: C API + +Extensions classes that set ``tp_dictoffset`` and ``tp_weaklistoffset`` lose +the support for multiple inheritance, but are now safe. Extension classes +should use :const:`Py_TPFLAGS_MANAGED_DICT` and +:const:`Py_TPFLAGS_MANAGED_WEAKREF` instead. + +.. + +.. date: 2022-08-08-14-36-31 +.. gh-issue: 95781 +.. nonce: W_G8YW +.. section: C API + +An unrecognized format character in :c:func:`PyUnicode_FromFormat` and +:c:func:`PyUnicode_FromFormatV` now sets a :exc:`SystemError`. In previous +versions it caused all the rest of the format string to be copied as-is to +the result string, and any extra arguments discarded. + +.. + +.. date: 2022-08-03-14-39-08 +.. gh-issue: 92678 +.. nonce: ozFTEx +.. section: C API + +Restore the 3.10 behavior for multiple inheritance of C extension classes +that store their dictionary at the end of the struct. + +.. + +.. date: 2022-08-03-13-01-57 +.. gh-issue: 92678 +.. nonce: DLwONN +.. section: C API + +Support C extensions using managed dictionaries by setting the +``Py_TPFLAGS_MANAGED_DICT`` flag. + +.. + +.. date: 2022-08-01-16-21-39 +.. gh-issue: 93274 +.. nonce: QoDHEu +.. section: C API + +API for implementing vectorcall (:c:data:`Py_TPFLAGS_HAVE_VECTORCALL`, +:c:func:`PyVectorcall_NARGS` and :c:func:`PyVectorcall_Call`) was added to +the limited API and stable ABI. + +.. + +.. date: 2022-07-31-21-58-27 +.. gh-issue: 95504 +.. nonce: wy7B1F +.. section: C API + +Fix sign placement when specifying width or precision in +:c:func:`PyUnicode_FromFormat` and :c:func:`PyUnicode_FromFormatV`. Patch by +Philip Georgi. + +.. + +.. date: 2022-07-29-15-24-45 +.. gh-issue: 93012 +.. nonce: -DdGEy +.. section: C API + +The :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag is now removed from a class +when the class's :py:meth:`~object.__call__` method is reassigned. This +makes vectorcall safe to use with mutable types (i.e. heap types without the +:const:`immutable ` flag). Mutable types that do +not override :c:member:`~PyTypeObject.tp_call` now inherit the +:const:`Py_TPFLAGS_HAVE_VECTORCALL` flag. + +.. + +.. date: 2022-07-29-10-41-59 +.. gh-issue: 95388 +.. nonce: aiRSgr +.. section: C API + +Creating :c:data:`immutable types ` with mutable +bases is deprecated and is planned to be disabled in Python 3.14. + +.. + +.. date: 2022-07-25-15-54-27 +.. gh-issue: 92678 +.. nonce: ziZpxz +.. section: C API + +Adds unstable C-API functions ``_PyObject_VisitManagedDict`` and +``_PyObject_ClearManagedDict`` to allow C extensions to allow the VM to +manage their object's dictionaries. + +.. + +.. date: 2022-07-19-22-37-40 +.. gh-issue: 94936 +.. nonce: LGlmKv +.. section: C API + +Added :c:func:`PyCode_GetVarnames`, :c:func:`PyCode_GetCellvars` and +:c:func:`PyCode_GetFreevars` for accessing ``co_varnames``, ``co_cellvars`` +and ``co_freevars`` respectively via the C API. + +.. + +.. date: 2022-07-17-18-21-40 +.. gh-issue: 94930 +.. nonce: gPFGDL +.. section: C API + +Fix ``SystemError`` raised when :c:func:`PyArg_ParseTupleAndKeywords` is +used with ``#`` in ``(...)`` but without ``PY_SSIZE_T_CLEAN`` defined. + +.. + +.. date: 2022-07-12-17-39-32 +.. gh-issue: 94731 +.. nonce: 9CPJNU +.. section: C API + +Python again uses C-style casts for most casting operations when compiled +with C++. This may trigger compiler warnings, if they are enabled with e.g. +``-Wold-style-cast`` or ``-Wzero-as-null-pointer-constant`` options for +``g++``. + +.. + +.. date: 2022-06-17-13-41-38 +.. gh-issue: 93937 +.. nonce: uKVTEh +.. section: C API + +The following frame functions and type are now directly available with +``#include ``, it's no longer needed to add ``#include +``: + +* :c:func:`PyFrame_Check` +* :c:func:`PyFrame_GetBack` +* :c:func:`PyFrame_GetBuiltins` +* :c:func:`PyFrame_GetGenerator` +* :c:func:`PyFrame_GetGlobals` +* :c:func:`PyFrame_GetLasti` +* :c:func:`PyFrame_GetLocals` +* :c:type:`PyFrame_Type` + +Patch by Victor Stinner. + +.. + +.. date: 2022-06-13-21-37-31 +.. gh-issue: 91321 +.. nonce: DgJFvS +.. section: C API + +Fix the compatibility of the Python C API with C++ older than C++11. Patch +by Victor Stinner. + +.. + +.. date: 2022-06-10-23-41-48 +.. gh-issue: 91731 +.. nonce: fhYUQG +.. section: C API + +Avoid defining the ``static_assert`` when compiling with C++ 11, where this +is a keyword and redefining it can lead to undefined behavior. Patch by +Pablo Galindo + +.. + +.. date: 2022-06-10-16-50-27 +.. gh-issue: 89546 +.. nonce: mX1f10 +.. section: C API + +:c:func:`PyType_FromMetaclass` (and other ``PyType_From*`` functions) now +check that offsets and the base class's +:c:member:`~PyTypeObject.tp_basicsize` fit in the new class's +``tp_basicsize``. + +.. + +.. date: 2022-06-06-16-04-14 +.. gh-issue: 93503 +.. nonce: MHJTu8 +.. section: C API + +Add two new public functions to the public C-API, +:c:func:`PyEval_SetProfileAllThreads` and +:c:func:`PyEval_SetTraceAllThreads`, that allow to set tracing and profiling +functions in all running threads in addition to the calling one. Also, two +analogous functions to the :mod:`threading` module +(:func:`threading.setprofile_all_threads` and +:func:`threading.settrace_all_threads`) that allow to do the same from +Python. Patch by Pablo Galindo + +.. + +.. date: 2022-06-04-13-15-41 +.. gh-issue: 93442 +.. nonce: 4M4NDb +.. section: C API + +Add C++ overloads for _Py_CAST_impl() to handle 0/NULL. This will allow C++ +extensions that pass 0 or NULL to macros using _Py_CAST() to continue to +compile. + +.. + +.. date: 2022-06-03-14-54-41 +.. gh-issue: 93466 +.. nonce: DDtH0X +.. section: C API + +Slot IDs in PyType_Spec may not be repeated. The documentation was updated +to mention this. For some cases of repeated slots, PyType_FromSpec and +related functions will now raise an exception. + +.. + +.. date: 2022-05-23-15-22-18 +.. gh-issue: 92898 +.. nonce: Qjc9d3 +.. section: C API + +Fix C++ compiler warnings when casting function arguments to ``PyObject*``. +Patch by Serge Guelton. + +.. + +.. date: 2022-05-23-13-33-18 +.. gh-issue: 93103 +.. nonce: ooD3Eb +.. section: C API + +Deprecate global configuration variables, like +:c:var:`Py_IgnoreEnvironmentFlag`, in the documentation: the +:c:func:`Py_InitializeFromConfig` API should be instead. Patch by Victor +Stinner. + +.. + +.. date: 2022-05-23-12-31-04 +.. gh-issue: 77782 +.. nonce: ugC8dn +.. section: C API + +Deprecate global configuration variable like +:c:var:`Py_IgnoreEnvironmentFlag`: the :c:func:`Py_InitializeFromConfig` API +should be instead. Patch by Victor Stinner. + +.. + +.. date: 2022-05-19-18-05-51 +.. gh-issue: 92913 +.. nonce: Ass1Hv +.. section: C API + +Ensures changes to :c:member:`PyConfig.module_search_paths` are ignored +unless :c:member:`PyConfig.module_search_paths_set` is set + +.. + +.. date: 2022-05-13-18-17-48 +.. gh-issue: 92781 +.. nonce: TVDr3- +.. section: C API + +Avoid mixing declarations and code in the C API to fix the compiler warning: +"ISO C90 forbids mixed declarations and code" +[-Werror=declaration-after-statement]. Patch by Victor Stinner. + +.. + +.. date: 2022-05-11-02-33-10 +.. gh-issue: 92651 +.. nonce: FIXLf0 +.. section: C API + +Remove the ``token.h`` header file. There was never any public tokenizer C +API. The ``token.h`` header file was only designed to be used by Python +internals. Patch by Victor Stinner. + +.. + +.. date: 2022-05-10-12-35-42 +.. gh-issue: 92536 +.. nonce: cAoRCZ +.. section: C API + +Remove legacy Unicode APIs based on ``Py_UNICODE*``. + +.. + +.. date: 2022-05-09-23-16-38 +.. gh-issue: 85858 +.. nonce: VIcNDL +.. section: C API + +Remove the ``PyUnicode_InternImmortal()`` function and the +``SSTATE_INTERNED_IMMORTAL`` macro. Patch by Victor Stinner. + +.. + +.. date: 2022-05-03-19-35-37 +.. gh-issue: 92193 +.. nonce: 61VoFL +.. section: C API + +Add new function :c:func:`PyFunction_SetVectorcall` to the C API which sets +the vectorcall field of a given :c:type:`PyFunctionObject`. + +Warning: extensions using this API must preserve the behavior of the +unaltered function! + +.. + +.. date: 2022-04-13-16-10-55 +.. gh-issue: 59121 +.. nonce: -B7mKp +.. section: C API + +Fixed an assert that prevented ``PyRun_InteractiveOne`` from providing +tracebacks when parsing from the provided FD. + +.. + +.. bpo: 45383 +.. date: 2021-10-05-21-59-43 +.. nonce: TVClgf +.. section: C API + +The :c:func:`PyType_FromSpec` API will now find and use a metaclass based on +the provided bases. An error will be raised if there is a metaclass +conflict. diff --git a/Misc/NEWS.d/3.12.0a2.rst b/Misc/NEWS.d/3.12.0a2.rst new file mode 100644 index 00000000000000..4029856ee332a9 --- /dev/null +++ b/Misc/NEWS.d/3.12.0a2.rst @@ -0,0 +1,1100 @@ +.. date: 2022-11-04-09-29-36 +.. gh-issue: 98433 +.. nonce: l76c5G +.. release date: 2022-11-14 +.. section: Security + +The IDNA codec decoder used on DNS hostnames by :mod:`socket` or +:mod:`asyncio` related name resolution functions no longer involves a +quadratic algorithm. This prevents a potential CPU denial of service if an +out-of-spec excessive length hostname involving bidirectional characters +were decoded. Some protocols such as :mod:`urllib` http ``3xx`` redirects +potentially allow for an attacker to supply such a name. + +Individual labels within an IDNA encoded DNS name will now raise an error +early during IDNA decoding if they are longer than 1024 unicode characters +given that each decoded DNS label must be 63 or fewer characters and the +entire decoded DNS name is limited to 255. Only an application presenting a +hostname or label consisting primarily of :rfc:`3454` section 3.1 "Nothing" +characters to be removed would run into of this new limit. See also +:rfc:`5894` section 6 and :rfc:`3491`. + +.. + +.. date: 2022-10-26-21-04-23 +.. gh-issue: 98739 +.. nonce: keBWcY +.. section: Security + +Update bundled libexpat to 2.5.0 + +.. + +.. date: 2022-11-11-14-48-17 +.. gh-issue: 81057 +.. nonce: ik4iOv +.. section: Core and Builtins + +The docs clearly say that ``PyImport_Inittab``, +:c:func:`PyImport_AppendInittab`, and :c:func:`PyImport_ExtendInittab` +should not be used after :c:func:`Py_Initialize` has been called. We now +enforce this for the two functions. Additionally, the runtime now uses an +internal copy of ``PyImport_Inittab``, to guard against modification. + +.. + +.. date: 2022-11-09-12-07-24 +.. gh-issue: 99298 +.. nonce: NeArAJ +.. section: Core and Builtins + +Fix an issue that could potentially cause incorrect error handling for some +bytecode instructions. + +.. + +.. date: 2022-11-08-17-47-10 +.. gh-issue: 99254 +.. nonce: RSvyFt +.. section: Core and Builtins + +The compiler now removes all unused constants from code objects (except the +first one, which may be a docstring). + +.. + +.. date: 2022-11-08-16-35-25 +.. gh-issue: 99205 +.. nonce: 2YOoFT +.. section: Core and Builtins + +Fix an issue that prevented :c:type:`PyThreadState` and +:c:type:`PyInterpreterState` memory from being freed properly. + +.. + +.. date: 2022-11-07-14-16-59 +.. gh-issue: 81057 +.. nonce: 3uKlLQ +.. section: Core and Builtins + +The 18 global C variables holding the state of the allocators have been +moved to ``_PyRuntimeState``. This is a strictly internal change with no +change in behavior. + +.. + +.. date: 2022-11-07-10-29-41 +.. gh-issue: 99181 +.. nonce: bfG4bI +.. section: Core and Builtins + +Fix failure in :keyword:`except* ` with unhashable exceptions. + +.. + +.. date: 2022-11-07-08-17-12 +.. gh-issue: 99204 +.. nonce: Mf4hMD +.. section: Core and Builtins + +Fix calculation of :data:`sys._base_executable` when inside a POSIX virtual +environment using copies of the python binary when the base installation +does not provide the executable name used by the venv. Calculation will fall +back to alternative names ("python", "python."). + +.. + +.. date: 2022-11-06-22-59-02 +.. gh-issue: 96055 +.. nonce: TmQuJn +.. section: Core and Builtins + +Update :mod:`faulthandler` to emit an error message with the proper +unexpected signal number. Patch by Dong-hee Na. + +.. + +.. date: 2022-11-06-13-25-01 +.. gh-issue: 99153 +.. nonce: uE3CVL +.. section: Core and Builtins + +Fix location of :exc:`SyntaxError` for a :keyword:`try` block with both +:keyword:`except` and :keyword:`except* `. + +.. + +.. date: 2022-11-06-00-47-11 +.. gh-issue: 98686 +.. nonce: DBDy6U +.. section: Core and Builtins + +Merge the adaptive opcode logic into each instruction's unquickened variant, +and merge the logic in ``EXTENDED_ARG_QUICK`` into :opcode:`EXTENDED_ARG`. +With these changes, the quickening that happens at code object creation is +now only responsible for initializing warmup counters and inserting +superinstructions. + +.. + +.. date: 2022-11-06-00-17-58 +.. gh-issue: 99103 +.. nonce: bFA9BX +.. section: Core and Builtins + +Fix the error reporting positions of specialized traceback anchors when the +source line contains Unicode characters. + +.. + +.. date: 2022-11-05-18-36-27 +.. gh-issue: 99139 +.. nonce: cI9vV1 +.. section: Core and Builtins + +Improve the error suggestion for :exc:`NameError` exceptions for instances. +Now if a :exc:`NameError` is raised in a method and the instance has an +attribute that's exactly equal to the name in the exception, the suggestion +will include ``self.`` instead of the closest match in the method +scope. Patch by Pablo Galindo + +.. + +.. date: 2022-11-03-13-11-17 +.. gh-issue: 98401 +.. nonce: CBS4nv +.. section: Core and Builtins + +Octal escapes with value larger than ``0o377`` (ex: ``"\477"``), deprecated +in Python 3.11, now produce a :exc:`SyntaxWarning`, instead of +:exc:`DeprecationWarning`. In a future Python version they will be +eventually a :exc:`SyntaxError`. Patch by Victor Stinner. + +.. + +.. date: 2022-11-02-17-02-06 +.. gh-issue: 98401 +.. nonce: y-dbVW +.. section: Core and Builtins + +A backslash-character pair that is not a valid escape sequence now generates +a :exc:`SyntaxWarning`, instead of :exc:`DeprecationWarning`. For example, +``re.compile("\d+\.\d+")`` now emits a :exc:`SyntaxWarning` (``"\d"`` is an +invalid escape sequence), use raw strings for regular expression: +``re.compile(r"\d+\.\d+")``. In a future Python version, :exc:`SyntaxError` +will eventually be raised, instead of :exc:`SyntaxWarning`. Patch by Victor +Stinner. + +.. + +.. date: 2022-11-02-14-42-35 +.. gh-issue: 96793 +.. nonce: q0Oi74 +.. section: Core and Builtins + +Handle StopIteration and StopAsyncIteration raised in generator or +coroutines in the bytecode, rather than in wrapping C code. + +.. + +.. date: 2022-10-31-22-55-34 +.. gh-issue: 98931 +.. nonce: AoWZ-4 +.. section: Core and Builtins + +Improve the :exc:`SyntaxError` error message when the user types ``import x +from y`` instead of ``from y import x``. Patch by Pablo Galindo + +.. + +.. date: 2022-10-31-21-01-35 +.. gh-issue: 98852 +.. nonce: MYaRN6 +.. section: Core and Builtins + +Fix subscription of type aliases containing bare generic types or types like +:class:`~typing.TypeVar`: for example ``tuple[A, T][int]`` and +``tuple[TypeVar, T][int]``, where ``A`` is a generic type, and ``T`` is a +type variable. + +.. + +.. date: 2022-10-31-18-03-10 +.. gh-issue: 98925 +.. nonce: zpdjVd +.. section: Core and Builtins + +Lower the recursion depth for marshal on WASI to support (in-development) +wasmtime 2.0. + +.. + +.. date: 2022-10-28-14-52-55 +.. gh-issue: 98783 +.. nonce: iG0kMs +.. section: Core and Builtins + +Fix multiple crashes in debug mode when ``str`` subclasses are used instead +of ``str`` itself. + +.. + +.. date: 2022-10-28-13-59-51 +.. gh-issue: 98811 +.. nonce: XQypJa +.. section: Core and Builtins + +Use complete source locations to simplify detection of ``__future__`` +imports which are not at the beginning of the file. Also corrects the offset +in the exception raised in one case, which was off by one and impeded +highlighting. + +.. + +.. date: 2022-10-28-09-42-51 +.. gh-issue: 96793 +.. nonce: ucBfWO +.. section: Core and Builtins + +Add specialization of :opcode:`FOR_ITER` for generators. Saves multiple +layers of dispatch and checking to get from the :opcode:`FOR_ITER` +instruction in the caller to the :opcode:`RESUME` in the generator. + +.. + +.. date: 2022-10-27-16-42-16 +.. gh-issue: 98762 +.. nonce: Eb2kzg +.. section: Core and Builtins + +Fix source locations of :keyword:`match` sub-patterns. + +.. + +.. date: 2022-10-24-10-30-30 +.. gh-issue: 98586 +.. nonce: Tha5Iy +.. section: Core and Builtins + +Added the methods :c:func:`PyObject_Vectorcall` and +:c:func:`PyObject_VectorcallMethod` to the :ref:`Limited API ` along +with the auxiliary macro constant :const:`PY_VECTORCALL_ARGUMENTS_OFFSET`. + +The availability of these functions enables more efficient :PEP:`590` vector +calls from binary extension modules that avoid argument boxing/unboxing +overheads. + +.. + +.. date: 2022-10-21-11-28-53 +.. gh-issue: 99257 +.. nonce: nmcuf- +.. section: Core and Builtins + +Fix an issue where member descriptors (such as those for +:attr:`~object.__slots__`) could behave incorrectly or crash instead of +raising a :exc:`TypeError` when accessed via an instance of an invalid type. + +.. + +.. date: 2022-10-19-23-54-43 +.. gh-issue: 93143 +.. nonce: 1wCYub +.. section: Core and Builtins + +Rather than changing :attr:`~types.CodeType.co_code`, the interpreter will +now display a :exc:`RuntimeWarning` and assign :const:`None` to any fast +locals that are left unbound after jumps or :keyword:`del` statements +executed while tracing. + +.. + +.. date: 2022-10-19-15-59-08 +.. gh-issue: 96421 +.. nonce: e22y3r +.. section: Core and Builtins + +When calling into Python code from C code, through +:c:func:`PyEval_EvalFrameEx` or a related C-API function, a shim frame in +inserted into the call stack. This occurs in the +``_PyEval_EvalFrameDefault()`` function. The extra frame should be invisible +to all Python and most C extensions, but out-of-process profilers and +debuggers need to be aware of it. These shim frames can be detected by +checking ``frame->owner == FRAME_OWNED_BY_CSTACK``. + +Extensions implementing their own interpreters using PEP 523 need to be +aware of this shim frame and the changes to the semantics of +:opcode:`RETURN_VALUE`, :opcode:`YIELD_VALUE`, and +:opcode:`RETURN_GENERATOR`, which now clear the frame. + +.. + +.. date: 2022-10-19-01-01-08 +.. gh-issue: 98415 +.. nonce: ZS2eWh +.. section: Core and Builtins + +Fix detection of MAC addresses for :mod:`uuid` on certain OSs. Patch by +Chaim Sanders + +.. + +.. date: 2022-10-16-13-26-46 +.. gh-issue: 98686 +.. nonce: D9Gu_Q +.. section: Core and Builtins + +Quicken all code objects, and specialize adaptive bytecode instructions more +aggressively. + +.. + +.. date: 2022-10-15-23-15-14 +.. gh-issue: 92119 +.. nonce: PMSwwG +.. section: Core and Builtins + +Print exception class name instead of its string representation when raising +errors from :mod:`ctypes` calls. + +.. + +.. date: 2022-10-15-22-25-20 +.. gh-issue: 91058 +.. nonce: Uo2kW- +.. section: Core and Builtins + +:exc:`ImportError` raised from failed ``from import `` now +include suggestions for the value of ```` based on the available names +in ````. Patch by Pablo Galindo + +.. + +.. date: 2022-09-13-14-07-06 +.. gh-issue: 96793 +.. nonce: 7DLRSm +.. section: Core and Builtins + +The :opcode:`FOR_ITER` now leaves the iterator on the stack on termination +of the loop. This is to assist specialization of loops for generators. + +.. + +.. date: 2022-09-09-16-32-58 +.. gh-issue: 90716 +.. nonce: z4yuYq +.. section: Core and Builtins + +Add _pylong.py module. It includes asymptotically faster algorithms that +can be used for operations on integers with many digits. It is used by +longobject.c to speed up some operations. + +.. + +.. date: 2022-07-30-14-10-27 +.. gh-issue: 95389 +.. nonce: nSGEkG +.. section: Core and Builtins + +Expose :data:`~socket.ETH_P_ALL` and some of the :ref:`ETHERTYPE_* constants +` in :mod:`socket`. Patch by Noam Cohen. + +.. + +.. date: 2022-06-10-16-37-44 +.. gh-issue: 93696 +.. nonce: 65BI2R +.. section: Core and Builtins + +Allow :mod:`pdb` to locate source for frozen modules in the standard +library. + +.. + +.. date: 2022-11-12-15-45-51 +.. gh-issue: 99418 +.. nonce: FxfAXS +.. section: Library + +Fix bug in :func:`urllib.parse.urlparse` that causes URL schemes that begin +with a digit, a plus sign, or a minus sign to be parsed incorrectly. + +.. + +.. date: 2022-11-11-18-23-41 +.. gh-issue: 94597 +.. nonce: m6vMDK +.. section: Library + +Deprecate :class:`asyncio.AbstractChildWatcher` to be removed in Python +3.14. Patch by Kumar Aditya. + +.. + +.. date: 2022-11-10-11-51-39 +.. gh-issue: 99305 +.. nonce: 6LzQc3 +.. section: Library + +Improve performance of :func:`secrets.token_hex`. + +.. + +.. date: 2022-11-09-20-48-42 +.. gh-issue: 74044 +.. nonce: zBj26K +.. section: Library + +Fixed bug where :func:`inspect.signature` reported incorrect arguments for +decorated methods. + +.. + +.. date: 2022-11-09-12-16-35 +.. gh-issue: 99275 +.. nonce: klOqoL +.. section: Library + +Fix ``SystemError`` in :mod:`ctypes` when exception was not set during +``__initsubclass__``. + +.. + +.. date: 2022-11-09-08-40-52 +.. gh-issue: 99277 +.. nonce: J1P44O +.. section: Library + +Remove older version of ``_SSLProtocolTransport.get_write_buffer_limits`` in +:mod:`!asyncio.sslproto` + +.. + +.. date: 2022-11-08-11-15-37 +.. gh-issue: 99248 +.. nonce: 1vt8xI +.. section: Library + +fix negative numbers failing in verify() + +.. + +.. date: 2022-11-06-12-44-51 +.. gh-issue: 99155 +.. nonce: vLZOzi +.. section: Library + +Fix :class:`statistics.NormalDist` pickle with ``0`` and ``1`` protocols. + +.. + +.. date: 2022-11-05-23-16-15 +.. gh-issue: 93464 +.. nonce: ucd4vP +.. section: Library + +``enum.auto()`` is now correctly activated when combined with other +assignment values. E.g. ``ONE = auto(), 'some text'`` will now evaluate as +``(1, 'some text')``. + +.. + +.. date: 2022-11-05-17-16-40 +.. gh-issue: 99134 +.. nonce: Msgspf +.. section: Library + +Update the bundled copy of pip to version 22.3.1. + +.. + +.. date: 2022-11-03-15-28-07 +.. gh-issue: 92584 +.. nonce: m5ctkm +.. section: Library + +Remove the ``distutils`` package. It was deprecated in Python 3.10 by +:pep:`632` "Deprecate distutils module". For projects still using +``distutils`` and cannot be updated to something else, the ``setuptools`` +project can be installed: it still provides ``distutils``. Patch by Victor +Stinner. + +.. + +.. date: 2022-11-02-18-27-13 +.. gh-issue: 98999 +.. nonce: Ai2KDh +.. section: Library + +Now :mod:`_pyio` is consistent with :mod:`_io` in raising ``ValueError`` +when executing methods over closed buffers. + +.. + +.. date: 2022-11-02-05-54-02 +.. gh-issue: 83004 +.. nonce: 0v8iyw +.. section: Library + +Clean up refleak on failed module initialisation in :mod:`_zoneinfo` + +.. + +.. date: 2022-11-02-05-53-25 +.. gh-issue: 83004 +.. nonce: qc_KHr +.. section: Library + +Clean up refleaks on failed module initialisation in in :mod:`_pickle` + +.. + +.. date: 2022-11-02-05-52-36 +.. gh-issue: 83004 +.. nonce: LBl79O +.. section: Library + +Clean up refleak on failed module initialisation in :mod:`_io`. + +.. + +.. date: 2022-10-31-12-34-03 +.. gh-issue: 98897 +.. nonce: rgNn4x +.. section: Library + +Fix memory leak in :func:`math.dist` when both points don't have the same +dimension. Patch by Kumar Aditya. + +.. + +.. date: 2022-10-30-22-42-48 +.. gh-issue: 98878 +.. nonce: fgrykp +.. section: Library + +Use the frame bound builtins when offering a name suggestion in +:mod:`traceback` to prevent crashing when ``__builtins__`` is not a dict. + +.. + +.. date: 2022-10-30-15-26-33 +.. gh-issue: 98139 +.. nonce: qtm-9T +.. section: Library + +In :mod:`importlib._bootstrap`, enhance namespace package repr to ``. + +.. + +.. date: 2022-10-29-09-42-20 +.. gh-issue: 90352 +.. nonce: t8QEPt +.. section: Library + +Fix ``_SelectorDatagramTransport`` to inherit from +:class:`~asyncio.DatagramTransport` in :mod:`asyncio`. Patch by Kumar +Aditya. + +.. + +.. date: 2022-10-29-03-40-18 +.. gh-issue: 98793 +.. nonce: WSPB4A +.. section: Library + +Fix argument typechecks in :func:`!_overlapped.WSAConnect` and +:func:`!_overlapped.Overlapped.WSASendTo` functions. + +.. + +.. date: 2022-10-28-23-44-17 +.. gh-issue: 98744 +.. nonce: sGHDWm +.. section: Library + +Prevent crashing in :mod:`traceback` when retrieving the byte-offset for +some source files that contain certain unicode characters. + +.. + +.. date: 2022-10-27-12-56-38 +.. gh-issue: 98740 +.. nonce: ZoqqGM +.. section: Library + +Fix internal error in the :mod:`re` module which in very rare circumstances +prevented compilation of a regular expression containing a :ref:`conditional +expression ` without the "else" branch. + +.. + +.. date: 2022-10-26-07-51-55 +.. gh-issue: 98703 +.. nonce: 0hW773 +.. section: Library + +Fix :meth:`asyncio.StreamWriter.drain` to call ``protocol.connection_lost`` +callback only once on Windows. + +.. + +.. date: 2022-10-25-20-17-34 +.. gh-issue: 98624 +.. nonce: YQUPFy +.. section: Library + +Add a mutex to unittest.mock.NonCallableMock to protect concurrent access to +mock attributes. + +.. + +.. date: 2022-10-25-07-00-31 +.. gh-issue: 98658 +.. nonce: nGABW9 +.. section: Library + +The :class:`array.array` class now supports subscripting, making it a +:term:`generic type`. + +.. + +.. date: 2022-10-15-10-43-45 +.. gh-issue: 98284 +.. nonce: SaVHTd +.. section: Library + +Improved :class:`TypeError` message for undefined abstract methods of a +:class:`abc.ABC` instance. The names of the missing methods are surrounded +by single-quotes to highlight them. + +.. + +.. date: 2022-10-10-07-07-31 +.. gh-issue: 96151 +.. nonce: K9fwoq +.. section: Library + +Allow ``BUILTINS`` to be a valid field name for frozen dataclasses. + +.. + +.. date: 2022-10-08-19-39-27 +.. gh-issue: 98086 +.. nonce: y---WC +.. section: Library + +Make sure ``patch.dict()`` can be applied on async functions. + +.. + +.. date: 2022-09-05-17-08-56 +.. gh-issue: 72719 +.. nonce: jUpzF3 +.. section: Library + +Remove modules :mod:`asyncore` and :mod:`asynchat`, which were deprecated by +:pep:`594`. + +.. + +.. date: 2022-08-23-03-13-18 +.. gh-issue: 96192 +.. nonce: TJywOF +.. section: Library + +Fix handling of ``bytes`` :term:`path-like objects ` in +:func:`os.ismount()`. + +.. + +.. date: 2022-06-23-15-36-49 +.. gh-issue: 94172 +.. nonce: DzQk0s +.. section: Library + +:mod:`ftplib`: Remove the ``FTP_TLS.ssl_version`` class attribute: use the +*context* parameter instead. Patch by Victor Stinner + +.. + +.. date: 2022-06-23-15-31-49 +.. gh-issue: 94172 +.. nonce: AXE2IZ +.. section: Library + +Remove the *keyfile*, *certfile* and *check_hostname* parameters, deprecated +since Python 3.6, in modules: :mod:`ftplib`, :mod:`http.client`, +:mod:`imaplib`, :mod:`poplib` and :mod:`smtplib`. Use the *context* +parameter (*ssl_context* in :mod:`imaplib`) instead. Patch by Victor +Stinner. + +.. + +.. date: 2022-06-14-22-46-05 +.. gh-issue: 83638 +.. nonce: 73xfGK +.. section: Library + +Add the :attr:`~sqlite3.Connection.autocommit` attribute to +:class:`sqlite3.Connection` and the *autocommit* parameter to +:func:`sqlite3.connect` to control :pep:`249`-compliant :ref:`transaction +handling `. Patch by Erlend E. +Aasland. + +.. + +.. date: 2022-05-08-08-47-32 +.. gh-issue: 92452 +.. nonce: 3pNHe6 +.. section: Library + +Fixed a race condition that could cause :func:`sysconfig.get_config_var` to +incorrectly return :const:`None` in multi-threaded programs. + +.. + +.. date: 2022-05-03-11-32-29 +.. gh-issue: 91803 +.. nonce: pI4Juv +.. section: Library + +Fix an error when using a method of objects mocked with +:func:`unittest.mock.create_autospec` after it was sealed with +:func:`unittest.mock.seal` function. + +.. + +.. bpo: 38523 +.. date: 2020-10-23-22-20-52 +.. nonce: CrkxLh +.. section: Library + +:func:`shutil.copytree` now applies the *ignore_dangling_symlinks* argument +recursively. + +.. + +.. bpo: 40358 +.. date: 2020-04-30-02-15-08 +.. nonce: A4ygqe +.. section: Library + +Add walk_up argument in :meth:`pathlib.PurePath.relative_to`. + +.. + +.. bpo: 36267 +.. date: 2019-09-03-15-45-19 +.. nonce: z42Ex7 +.. section: Library + +Fix IndexError in :class:`argparse.ArgumentParser` when a ``store_true`` +action is given an explicit argument. + +.. + +.. date: 2022-10-29-02-33-46 +.. gh-issue: 98832 +.. nonce: DudEIH +.. section: Documentation + +Changes wording of docstring for :func:`pathlib.Path.iterdir`. + +.. + +.. date: 2022-10-06-13-00-28 +.. gh-issue: 97966 +.. nonce: fz7kFg +.. section: Documentation + +Update uname docs to clarify the special nature of the platform attribute +and to indicate when it became late-bound. + +.. + +.. date: 2022-10-31-14-47-49 +.. gh-issue: 98903 +.. nonce: 7KinCV +.. section: Tests + +The Python test suite now fails wit exit code 4 if no tests ran. It should +help detecting typos in test names and test methods. + +.. + +.. date: 2022-10-26-15-19-20 +.. gh-issue: 98713 +.. nonce: Lnu32R +.. section: Tests + +Fix a bug in the :mod:`typing` tests where a test relying on +CPython-specific implementation details was not decorated with +``@cpython_only`` and was not skipped on other implementations. + +.. + +.. date: 2022-10-15-07-46-48 +.. gh-issue: 87390 +.. nonce: asR-Zo +.. section: Tests + +Add tests for star-unpacking with PEP 646, and some other miscellaneous PEP +646 tests. + +.. + +.. date: 2022-10-12-14-57-06 +.. gh-issue: 96853 +.. nonce: ANe-bw +.. section: Tests + +Added explicit coverage of ``Py_Initialize`` (and hence ``Py_InitializeEx``) +back to the embedding tests (all other embedding tests migrated to +``Py_InitializeFromConfig`` in Python 3.11) + +.. + +.. bpo: 34272 +.. date: 2018-07-29-15-59-51 +.. nonce: lVX2uR +.. section: Tests + +Some C API tests were moved into the new Lib/test/test_capi/ directory. + +.. + +.. date: 2022-11-04-02-58-10 +.. gh-issue: 99086 +.. nonce: DV_4Br +.. section: Build + +Fix ``-Wimplicit-int`` compiler warning in :program:`configure` check for +``PTHREAD_SCOPE_SYSTEM``. + +.. + +.. date: 2022-11-02-19-25-07 +.. gh-issue: 99016 +.. nonce: R05NkD +.. section: Build + +Fix build with ``PYTHON_FOR_REGEN=python3.8``. + +.. + +.. date: 2022-11-02-18-45-35 +.. gh-issue: 97731 +.. nonce: zKpTlj +.. section: Build + +Specify the full path to the source location for ``make docclean`` (needed +for cross-builds). + +.. + +.. date: 2022-11-02-10-56-40 +.. gh-issue: 98949 +.. nonce: 3SRD8C +.. section: Build + +Drop unused build dependency on ``readelf``. + +.. + +.. date: 2022-11-01-21-45-58 +.. gh-issue: 98989 +.. nonce: tMxbdB +.. section: Build + +Use ``python3.11``, if available, for regeneration and freezing. + +.. + +.. date: 2022-10-28-22-24-26 +.. gh-issue: 98831 +.. nonce: IXRCRX +.. section: Build + +Add new tooling, in ``Tools/cases_generator``, to generate the interpreter +switch statement from a list of opcode definitions. This only affects +adding, modifying or removing instruction definitions. The instruction +definitions now live in ``Python/bytecodes.c``, in the form of a `custom DSL +(under development) +`__. +The tooling reads this file and writes ``Python/generated_cases.c.h``, which +is then included by ``Python/ceval.c`` to provide most of the cases of the +main interpreter switch. + +.. + +.. date: 2022-10-28-18-53-40 +.. gh-issue: 98817 +.. nonce: oPqrtt +.. section: Build + +Remove PCbuild/lib.pyproj: it's not used for anything, is only a minor +convenience for Visual Studio users (who probably mostly don't even know +about it), and it takes a lot of maintenance effort to keep updated. + +.. + +.. date: 2022-10-27-19-47-31 +.. gh-issue: 98776 +.. nonce: lt_UOG +.. section: Build + +Fix ``make regen-test-levenshtein`` for out-of-tree builds. + +.. + +.. date: 2022-10-26-12-37-52 +.. gh-issue: 98707 +.. nonce: eVXGEx +.. section: Build + +Don't use vendored ``libmpdec`` headers if :option:`--with-system-libmpdec` +is passed to :program:`configure`. Don't use vendored ``libexpat`` headers +if :option:`--with-system-expat` is passed to :program:`!configure`. + +.. + +.. date: 2022-11-01-11-07-33 +.. gh-issue: 98689 +.. nonce: 0f6e_N +.. section: Windows + +Update Windows builds to zlib v1.2.13. v1.2.12 has CVE-2022-37434, but the +vulnerable ``inflateGetHeader`` API is not used by Python. + +.. + +.. date: 2022-11-01-00-37-13 +.. gh-issue: 98790 +.. nonce: fpaPAx +.. section: Windows + +Assumes that a missing ``DLLs`` directory means that standard extension +modules are in the executable's directory. + +.. + +.. date: 2022-10-27-20-30-16 +.. gh-issue: 98745 +.. nonce: v06p4r +.. section: Windows + +Update :file:`py.exe` launcher to install 3.11 by default and 3.12 on +request. + +.. + +.. date: 2022-10-26-17-43-09 +.. gh-issue: 98692 +.. nonce: bOopfZ +.. section: Windows + +Fix the :ref:`launcher` ignoring unrecognized shebang lines instead of +treating them as local paths + +.. + +.. date: 2022-10-25-10-34-17 +.. gh-issue: 94328 +.. nonce: 19NhdU +.. section: Windows + +Update Windows installer to use SQLite 3.39.4. + +.. + +.. date: 2022-10-25-10-32-23 +.. gh-issue: 94328 +.. nonce: W3YNC_ +.. section: macOS + +Update macOS installer to SQLite 3.39.4. + +.. + +.. date: 2022-11-04-16-13-35 +.. gh-issue: 98724 +.. nonce: p0urWO +.. section: C API + +The :c:macro:`Py_CLEAR`, :c:macro:`Py_SETREF` and :c:macro:`Py_XSETREF` +macros now only evaluate their argument once. If the argument has side +effects, these side effects are no longer duplicated. Patch by Victor +Stinner. + +.. + +.. date: 2022-11-03-17-46-41 +.. gh-issue: 98978 +.. nonce: KJjBvv +.. section: C API + +Fix use-after-free in ``Py_SetPythonHome(NULL)``, +``Py_SetProgramName(NULL)`` and ``_Py_SetProgramFullPath(NULL)`` function +calls. Issue reported by Benedikt Reinartz. Patch by Victor Stinner. + +.. + +.. date: 2022-10-25-17-50-43 +.. gh-issue: 98410 +.. nonce: NSXYfm +.. section: C API + +Add ``getbufferproc`` and ``releasebufferproc`` to the stable API. + +.. + +.. date: 2022-10-24-12-09-17 +.. gh-issue: 98610 +.. nonce: PLX2Np +.. section: C API + +Some configurable capabilities of sub-interpreters have changed. They always +allow subprocesses (:mod:`subprocess`) now, whereas before subprocesses +could be optionally disaallowed for a sub-interpreter. Instead +:func:`os.exec` can now be disallowed. Disallowing daemon threads is now +supported. Disallowing all threads is still allowed, but is never done by +default. Note that the optional restrictions are only available through +``_Py_NewInterpreterFromConfig()``, which isn't a public API. They do not +affect the main interpreter, nor :c:func:`Py_NewInterpreter`. + +.. + +.. date: 2022-10-24-11-26-55 +.. gh-issue: 98608 +.. nonce: _Q2lNV +.. section: C API + +A ``_PyInterpreterConfig`` has been added and ``_Py_NewInterpreter()`` has +been renamed to ``_Py_NewInterpreterFromConfig()``. The +"isolated_subinterpreters" argument is now a granular config that captures +the previous behavior. Note that this is all "private" API. + +.. + +.. date: 2022-10-16-15-00-25 +.. gh-issue: 96853 +.. nonce: V0wiXP +.. section: C API + +``Py_InitializeEx`` now correctly calls ``PyConfig_Clear`` after +initializing the interpreter (the omission didn't cause a memory leak only +because none of the dynamically allocated config fields are populated by the +wrapper function) + +.. + +.. date: 2022-08-05-15-26-12 +.. gh-issue: 91248 +.. nonce: ujirJJ +.. section: C API + +Add :c:func:`PyFrame_GetVar` and :c:func:`PyFrame_GetVarString` functions to +get a frame variable by its name. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/3.9.0a3.rst b/Misc/NEWS.d/3.9.0a3.rst index 77ccc7453c2157..54b61ca3b7785f 100644 --- a/Misc/NEWS.d/3.9.0a3.rst +++ b/Misc/NEWS.d/3.9.0a3.rst @@ -805,8 +805,7 @@ event loop only if called from the main thread. .. section: Documentation Add an entry for ``__module__`` in the "function" & "method" sections of the -`inspect docs types and members table -`_ +:mod:`inspect` docs' :ref:`inspect-types` table. .. diff --git a/Misc/NEWS.d/next/Build/2018-08-21-11-10-18.bpo-34449.Z3qm3c.rst b/Misc/NEWS.d/next/Build/2018-08-21-11-10-18.bpo-34449.Z3qm3c.rst deleted file mode 100644 index 53b75aee9cc272..00000000000000 --- a/Misc/NEWS.d/next/Build/2018-08-21-11-10-18.bpo-34449.Z3qm3c.rst +++ /dev/null @@ -1 +0,0 @@ -Drop invalid compiler switch ``-fPIC`` for HP aCC on HP-UX. Patch by Michael Osipov. diff --git a/Misc/NEWS.d/next/Build/2022-05-12-10-19-15.gh-issue-90473.-syvqK.rst b/Misc/NEWS.d/next/Build/2022-05-12-10-19-15.gh-issue-90473.-syvqK.rst deleted file mode 100644 index 29ce3de1f2134f..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-05-12-10-19-15.gh-issue-90473.-syvqK.rst +++ /dev/null @@ -1 +0,0 @@ -Disable pymalloc and increase stack size on ``wasm32-wasi``. diff --git a/Misc/NEWS.d/next/Build/2022-05-25-05-46-00.gh-issue-93202.T37jtj.rst b/Misc/NEWS.d/next/Build/2022-05-25-05-46-00.gh-issue-93202.T37jtj.rst deleted file mode 100644 index 6018e400c15d91..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-05-25-05-46-00.gh-issue-93202.T37jtj.rst +++ /dev/null @@ -1,4 +0,0 @@ -Python now always use the ``%zu`` and ``%zd`` printf formats to format a -``size_t`` or ``Py_ssize_t`` number. Building Python 3.12 requires a C11 -compiler, so these printf formats are now always supported. Patch by Victor -Stinner. diff --git a/Misc/NEWS.d/next/Build/2022-05-25-13-56-00.gh-issue-93207.B9Rubf.rst b/Misc/NEWS.d/next/Build/2022-05-25-13-56-00.gh-issue-93207.B9Rubf.rst deleted file mode 100644 index cd462bb232f043..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-05-25-13-56-00.gh-issue-93207.B9Rubf.rst +++ /dev/null @@ -1,3 +0,0 @@ -``va_start()`` with two parameters, like ``va_start(args, format),`` -is now required to build Python. ``va_start()`` is no longer called with a single parameter. -Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Build/2022-05-31-18-04-58.gh-issue-69093.6lSa0C.rst b/Misc/NEWS.d/next/Build/2022-05-31-18-04-58.gh-issue-69093.6lSa0C.rst deleted file mode 100644 index b061d6c2186c10..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-05-31-18-04-58.gh-issue-69093.6lSa0C.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ``Modules/Setup.stdlib.in`` rule for ``_sqlite3`` extension. diff --git a/Misc/NEWS.d/next/Build/2022-06-04-12-53-53.gh-issue-93491.ehM211.rst b/Misc/NEWS.d/next/Build/2022-06-04-12-53-53.gh-issue-93491.ehM211.rst deleted file mode 100644 index b3560fac81d2a9..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-06-04-12-53-53.gh-issue-93491.ehM211.rst +++ /dev/null @@ -1 +0,0 @@ -``configure`` now detects and reports :pep:`11` support tiers. diff --git a/Misc/NEWS.d/next/Build/2022-06-08-14-28-03.gh-issue-93584.0xfHOK.rst b/Misc/NEWS.d/next/Build/2022-06-08-14-28-03.gh-issue-93584.0xfHOK.rst deleted file mode 100644 index 07ca5fad592b2d..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-06-08-14-28-03.gh-issue-93584.0xfHOK.rst +++ /dev/null @@ -1,2 +0,0 @@ -Address race condition in ``Makefile`` when installing a PGO build. All -``test`` and ``install`` targets now depend on ``all`` target. diff --git a/Misc/NEWS.d/next/Build/2022-06-25-23-25-47.gh-issue-94280.YhEyW_.rst b/Misc/NEWS.d/next/Build/2022-06-25-23-25-47.gh-issue-94280.YhEyW_.rst deleted file mode 100644 index 1199e842177d7d..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-06-25-23-25-47.gh-issue-94280.YhEyW_.rst +++ /dev/null @@ -1,3 +0,0 @@ -Updated pegen regeneration script on Windows to find and use Python 3.9 or -higher. Prior to this, pegen regeneration already required 3.9 or higher, -but the script may have used lower versions of Python. diff --git a/Misc/NEWS.d/next/Build/2022-06-27-11-57-15.gh-issue-93939.rv7s8W.rst b/Misc/NEWS.d/next/Build/2022-06-27-11-57-15.gh-issue-93939.rv7s8W.rst deleted file mode 100644 index f9ecf5022e2489..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-06-27-11-57-15.gh-issue-93939.rv7s8W.rst +++ /dev/null @@ -1,2 +0,0 @@ -The ``2to3``, ``idle``, and ``pydoc`` scripts are now generated and installed by -``Makefile`` instead of ``setup.py``. diff --git a/Misc/NEWS.d/next/Build/2022-06-28-09-42-10.gh-issue-93939._VWxKW.rst b/Misc/NEWS.d/next/Build/2022-06-28-09-42-10.gh-issue-93939._VWxKW.rst deleted file mode 100644 index ab873bc839f4e5..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-06-28-09-42-10.gh-issue-93939._VWxKW.rst +++ /dev/null @@ -1,2 +0,0 @@ -C extension modules are now built by ``configure`` and ``make`` -instead of ``distutils`` and ``setup.py``. diff --git a/Misc/NEWS.d/next/Build/2022-06-29-08-58-31.gh-issue-94404.3MadM6.rst b/Misc/NEWS.d/next/Build/2022-06-29-08-58-31.gh-issue-94404.3MadM6.rst deleted file mode 100644 index c7c47533c54ae2..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-06-29-08-58-31.gh-issue-94404.3MadM6.rst +++ /dev/null @@ -1,3 +0,0 @@ -``makesetup`` now works around an issue with sed on macOS and uses correct -CFLAGS for object files that end up in a shared extension. Module CFLAGS -are used before PY_STDMODULE_CFLAGS to avoid clashes with system headers. diff --git a/Misc/NEWS.d/next/Build/2022-06-30-09-57-39.gh-issue-90005.9-pQyR.rst b/Misc/NEWS.d/next/Build/2022-06-30-09-57-39.gh-issue-90005.9-pQyR.rst deleted file mode 100644 index 9b14f767847dad..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-06-30-09-57-39.gh-issue-90005.9-pQyR.rst +++ /dev/null @@ -1 +0,0 @@ -``_dbm`` module dependencies are now detected by configure. diff --git a/Misc/NEWS.d/next/Build/2022-06-30-17-00-54.gh-issue-90005.iiq5qD.rst b/Misc/NEWS.d/next/Build/2022-06-30-17-00-54.gh-issue-90005.iiq5qD.rst deleted file mode 100644 index 62f40b8f95f6ce..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-06-30-17-00-54.gh-issue-90005.iiq5qD.rst +++ /dev/null @@ -1 +0,0 @@ -Fix building ``_ctypes`` extension without ``pkg-config``. diff --git a/Misc/NEWS.d/next/Build/2022-06-30-17-18-23.gh-issue-90005.EIOOla.rst b/Misc/NEWS.d/next/Build/2022-06-30-17-18-23.gh-issue-90005.EIOOla.rst deleted file mode 100644 index 90a2dd486c1956..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-06-30-17-18-23.gh-issue-90005.EIOOla.rst +++ /dev/null @@ -1,5 +0,0 @@ -Dependencies of :mod:`readline` and :mod:`curses` module are now detected in -``configure`` script with ``pkg-config``. Only ``ncurses`` / ``ncursesw`` -are detected automatically. The old ``curses`` library is not configured -automatically. Workaround for missing ``termcap`` or ``tinfo`` library -has been removed. diff --git a/Misc/NEWS.d/next/Build/2022-07-08-10-28-23.gh-issue-94682.ZtGt_0.rst b/Misc/NEWS.d/next/Build/2022-07-08-10-28-23.gh-issue-94682.ZtGt_0.rst deleted file mode 100644 index 60717a15bcc271..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-08-10-28-23.gh-issue-94682.ZtGt_0.rst +++ /dev/null @@ -1 +0,0 @@ -Build and test with OpenSSL 1.1.1q diff --git a/Misc/NEWS.d/next/Build/2022-07-12-13-39-18.gh-issue-94773.koHKm5.rst b/Misc/NEWS.d/next/Build/2022-07-12-13-39-18.gh-issue-94773.koHKm5.rst deleted file mode 100644 index ed7e40c91103d6..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-12-13-39-18.gh-issue-94773.koHKm5.rst +++ /dev/null @@ -1,2 +0,0 @@ -``deepfreeze.py`` now supports code object with frozensets that contain -incompatible, unsortable types. diff --git a/Misc/NEWS.d/next/Build/2022-07-13-10-13-10.gh-issue-94801.3xUB24.rst b/Misc/NEWS.d/next/Build/2022-07-13-10-13-10.gh-issue-94801.3xUB24.rst deleted file mode 100644 index 5caf84f96da131..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-13-10-13-10.gh-issue-94801.3xUB24.rst +++ /dev/null @@ -1,2 +0,0 @@ -``configure`` now uses custom flags like ``ZLIB_CFLAGS`` and ``ZLIB_LIBS`` -when searching for headers and libraries. diff --git a/Misc/NEWS.d/next/Build/2022-07-14-02-45-44.gh-issue-94841.lLRTdf.rst b/Misc/NEWS.d/next/Build/2022-07-14-02-45-44.gh-issue-94841.lLRTdf.rst deleted file mode 100644 index f7ad4f88a51db0..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-14-02-45-44.gh-issue-94841.lLRTdf.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the possible performance regression of :c:func:`PyObject_Free` compiled with MSVC version 1932. diff --git a/Misc/NEWS.d/next/Build/2022-07-14-11-13-26.gh-issue-94847.s3Kr5p.rst b/Misc/NEWS.d/next/Build/2022-07-14-11-13-26.gh-issue-94847.s3Kr5p.rst deleted file mode 100644 index a6d1e7277da4c0..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-14-11-13-26.gh-issue-94847.s3Kr5p.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed ``_decimal`` module build issue on GCC when compiling with LTO and -pydebug. Debug builds no longer force inlining of functions. diff --git a/Misc/NEWS.d/next/Build/2022-07-21-09-17-01.gh-issue-95085.E9x2S_.rst b/Misc/NEWS.d/next/Build/2022-07-21-09-17-01.gh-issue-95085.E9x2S_.rst deleted file mode 100644 index 02dbd2b8b3118f..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-21-09-17-01.gh-issue-95085.E9x2S_.rst +++ /dev/null @@ -1,2 +0,0 @@ -Platforms ``wasm32-unknown-emscripten`` and ``wasm32-unknown-wasi`` have -been promoted to :pep:`11` tier 3 platform support. diff --git a/Misc/NEWS.d/next/Build/2022-07-23-21-39-09.gh-issue-95174.7cYMZR.rst b/Misc/NEWS.d/next/Build/2022-07-23-21-39-09.gh-issue-95174.7cYMZR.rst deleted file mode 100644 index 72ce183ac8329d..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-23-21-39-09.gh-issue-95174.7cYMZR.rst +++ /dev/null @@ -1 +0,0 @@ -Python now skips missing :mod:`socket` functions and methods on WASI. WASI can only create sockets from existing fd / accept and has no netdb. diff --git a/Misc/NEWS.d/next/Build/2022-07-25-08-59-35.gh-issue-95174.g8woUW.rst b/Misc/NEWS.d/next/Build/2022-07-25-08-59-35.gh-issue-95174.g8woUW.rst deleted file mode 100644 index 05f29955072c54..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-25-08-59-35.gh-issue-95174.g8woUW.rst +++ /dev/null @@ -1,2 +0,0 @@ -Python now detects missing ``dup`` function in WASI and works around some -missing :mod:`errno`, :mod:`select`, and :mod:`socket` constants. diff --git a/Misc/NEWS.d/next/Build/2022-07-25-09-48-43.gh-issue-95145.ZNS3dj.rst b/Misc/NEWS.d/next/Build/2022-07-25-09-48-43.gh-issue-95145.ZNS3dj.rst deleted file mode 100644 index c751b5e3adc474..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-25-09-48-43.gh-issue-95145.ZNS3dj.rst +++ /dev/null @@ -1,2 +0,0 @@ -wasm32-wasi builds no longer depend on WASIX's pthread stubs. Python now has -its own stubbed pthread API. diff --git a/Misc/NEWS.d/next/Build/2022-07-26-18-13-34.gh-issue-94801.9fREfy.rst b/Misc/NEWS.d/next/Build/2022-07-26-18-13-34.gh-issue-94801.9fREfy.rst deleted file mode 100644 index a58be30e81b057..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-07-26-18-13-34.gh-issue-94801.9fREfy.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a regression in ``configure`` script that caused some header checks to -ignore custom ``CPPFLAGS``. The regression was introduced in :gh:`94802`. diff --git a/Misc/NEWS.d/next/Build/2022-08-04-15-29-35.gh-issue-93744.svRuqm.rst b/Misc/NEWS.d/next/Build/2022-08-04-15-29-35.gh-issue-93744.svRuqm.rst deleted file mode 100644 index fa9ade25718fcb..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-08-04-15-29-35.gh-issue-93744.svRuqm.rst +++ /dev/null @@ -1,3 +0,0 @@ -Remove the ``configure --with-cxx-main`` build option: it didn't work for -many years. Remove the ``MAINCC`` variable from ``configure`` and -``Makefile``. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Build/2022-08-12-13-06-03.gh-issue-90536.qMpF6p.rst b/Misc/NEWS.d/next/Build/2022-08-12-13-06-03.gh-issue-90536.qMpF6p.rst deleted file mode 100644 index 4605e03915ee63..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-08-12-13-06-03.gh-issue-90536.qMpF6p.rst +++ /dev/null @@ -1,2 +0,0 @@ -Use the BOLT post-link optimizer to improve performance, particularly on -medium-to-large applications. diff --git a/Misc/NEWS.d/next/Build/2022-08-15-10-56-07.gh-issue-95973.Bsswsc.rst b/Misc/NEWS.d/next/Build/2022-08-15-10-56-07.gh-issue-95973.Bsswsc.rst deleted file mode 100644 index d03bc5205ead8e..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-08-15-10-56-07.gh-issue-95973.Bsswsc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add a new ``--with-dsymutil`` configure option to to link debug information -in macOS. Patch by Pablo Galindo. diff --git a/Misc/NEWS.d/next/Build/2022-08-26-11-09-11.gh-issue-84461.Nsdn_R.rst b/Misc/NEWS.d/next/Build/2022-08-26-11-09-11.gh-issue-84461.Nsdn_R.rst deleted file mode 100644 index 134e9645159a69..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-08-26-11-09-11.gh-issue-84461.Nsdn_R.rst +++ /dev/null @@ -1,3 +0,0 @@ -``wasm32-emscripten`` platform no longer builds :mod:`resource` module, -:func:`~os.getresuid`, :func:`~os.getresgid`, and their setters. The APIs -are stubs and not functional. diff --git a/Misc/NEWS.d/next/Build/2022-08-26-11-50-03.gh-issue-96269.x_J5h0.rst b/Misc/NEWS.d/next/Build/2022-08-26-11-50-03.gh-issue-96269.x_J5h0.rst deleted file mode 100644 index adb605fede5dae..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-08-26-11-50-03.gh-issue-96269.x_J5h0.rst +++ /dev/null @@ -1,3 +0,0 @@ -Shared module targets now depend on new ``MODULE_DEPS`` variable, which -includes ``EXPORTSYMS``. This fixes a build order issue on unsupported AIX -platform. diff --git a/Misc/NEWS.d/next/Build/2022-09-11-14-23-49.gh-issue-96729.W4uBWL.rst b/Misc/NEWS.d/next/Build/2022-09-11-14-23-49.gh-issue-96729.W4uBWL.rst deleted file mode 100644 index b67cd200e2d346..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-09-11-14-23-49.gh-issue-96729.W4uBWL.rst +++ /dev/null @@ -1,2 +0,0 @@ -Ensure that Windows releases built with ``Tools\msi\buildrelease.bat`` are -upgradable to and from official Python releases. diff --git a/Misc/NEWS.d/next/Build/2022-09-12-18-34-51.gh-issue-85936.tX4VCU.rst b/Misc/NEWS.d/next/Build/2022-09-12-18-34-51.gh-issue-85936.tX4VCU.rst deleted file mode 100644 index 302b863a3b1e34..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-09-12-18-34-51.gh-issue-85936.tX4VCU.rst +++ /dev/null @@ -1,2 +0,0 @@ -CPython now uses the ThinLTO option as the default policy if the Clang -compiler accepts the flag. Patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/Build/2022-09-17-11-19-24.gh-issue-96883.p_gr62.rst b/Misc/NEWS.d/next/Build/2022-09-17-11-19-24.gh-issue-96883.p_gr62.rst deleted file mode 100644 index 2258ce8ab9d58e..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-09-17-11-19-24.gh-issue-96883.p_gr62.rst +++ /dev/null @@ -1,2 +0,0 @@ -``wasm32-emscripten`` builds for browsers now include -:mod:`concurrent.futures` for :mod:`asyncio` and :mod:`unittest.mock`. diff --git a/Misc/NEWS.d/next/Build/2022-09-20-12-43-44.gh-issue-96761.IF29kR.rst b/Misc/NEWS.d/next/Build/2022-09-20-12-43-44.gh-issue-96761.IF29kR.rst deleted file mode 100644 index 18f75ac489105e..00000000000000 --- a/Misc/NEWS.d/next/Build/2022-09-20-12-43-44.gh-issue-96761.IF29kR.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix the build process of clang compiler for :program:`_bootstrap_python` if -LTO optimization is applied. Patch by Matthias Görgens and Dong-hee Na. diff --git a/Misc/NEWS.d/next/Build/2022-10-16-12-49-24.gh-issue-88226.BsnQ4k.rst b/Misc/NEWS.d/next/Build/2022-10-16-12-49-24.gh-issue-88226.BsnQ4k.rst new file mode 100644 index 00000000000000..5f32091739a282 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2022-10-16-12-49-24.gh-issue-88226.BsnQ4k.rst @@ -0,0 +1,3 @@ +Always define ``TARGET_*`` labels in ``Python/ceval.c``, even if +``USE_COMPUTED_GOTOS`` is disabled. This allows breakpoints to be +set at those labels in (for instance) ``gdb``. diff --git a/Misc/NEWS.d/next/Build/2022-11-03-08-10-49.gh-issue-98872.gdsR8X.rst b/Misc/NEWS.d/next/Build/2022-11-03-08-10-49.gh-issue-98872.gdsR8X.rst new file mode 100644 index 00000000000000..ad4dc496ee0e6b --- /dev/null +++ b/Misc/NEWS.d/next/Build/2022-11-03-08-10-49.gh-issue-98872.gdsR8X.rst @@ -0,0 +1 @@ +Fix a possible fd leak in ``Programs/_freeze_module.c`` introduced in Python 3.11. diff --git a/Misc/NEWS.d/next/Build/2022-11-09-14-42-48.gh-issue-99289.X7wFE1.rst b/Misc/NEWS.d/next/Build/2022-11-09-14-42-48.gh-issue-99289.X7wFE1.rst new file mode 100644 index 00000000000000..86ce4c79d17cff --- /dev/null +++ b/Misc/NEWS.d/next/Build/2022-11-09-14-42-48.gh-issue-99289.X7wFE1.rst @@ -0,0 +1,4 @@ +Add a ``COMPILEALL_OPTS`` variable in Makefile to override :mod:`compileall` +options (default: ``-j0``) in ``make install``. Also merged the ``compileall`` +commands into a single command building .pyc files for the all optimization levels +(0, 1, 2) at once. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Build/2022-11-15-08-40-22.gh-issue-99337.5LoQDE.rst b/Misc/NEWS.d/next/Build/2022-11-15-08-40-22.gh-issue-99337.5LoQDE.rst new file mode 100644 index 00000000000000..f7396abde13b88 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2022-11-15-08-40-22.gh-issue-99337.5LoQDE.rst @@ -0,0 +1 @@ +Fix a compilation issue with GCC 12 on macOS. diff --git a/Misc/NEWS.d/next/Build/2022-11-24-02-58-10.gh-issue-99086.DV_4Br.rst b/Misc/NEWS.d/next/Build/2022-11-24-02-58-10.gh-issue-99086.DV_4Br.rst new file mode 100644 index 00000000000000..2dace165ca1ada --- /dev/null +++ b/Misc/NEWS.d/next/Build/2022-11-24-02-58-10.gh-issue-99086.DV_4Br.rst @@ -0,0 +1 @@ +Fix ``-Wimplicit-int``, ``-Wstrict-prototypes``, and ``-Wimplicit-function-declaration`` compiler warnings in :program:`configure` checks. diff --git a/Misc/NEWS.d/next/C API/2021-10-05-21-59-43.bpo-45383.TVClgf.rst b/Misc/NEWS.d/next/C API/2021-10-05-21-59-43.bpo-45383.TVClgf.rst deleted file mode 100644 index ca1b7a43d29481..00000000000000 --- a/Misc/NEWS.d/next/C API/2021-10-05-21-59-43.bpo-45383.TVClgf.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :c:func:`PyType_FromSpec` API will now find and use a metaclass -based on the provided bases. -An error will be raised if there is a metaclass conflict. diff --git a/Misc/NEWS.d/next/C API/2022-04-13-16-10-55.gh-issue-59121.-B7mKp.rst b/Misc/NEWS.d/next/C API/2022-04-13-16-10-55.gh-issue-59121.-B7mKp.rst deleted file mode 100644 index d52866ea08f713..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-04-13-16-10-55.gh-issue-59121.-B7mKp.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an assert that prevented ``PyRun_InteractiveOne`` from providing tracebacks when parsing from the provided FD. diff --git a/Misc/NEWS.d/next/C API/2022-05-03-19-35-37.gh-issue-92193.61VoFL.rst b/Misc/NEWS.d/next/C API/2022-05-03-19-35-37.gh-issue-92193.61VoFL.rst deleted file mode 100644 index e0755bb0199491..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-05-03-19-35-37.gh-issue-92193.61VoFL.rst +++ /dev/null @@ -1,5 +0,0 @@ -Add new function :c:func:`PyFunction_SetVectorcall` to the C API -which sets the vectorcall field of a given :c:type:`PyFunctionObject`. - -Warning: extensions using this API must preserve the behavior -of the unaltered function! diff --git a/Misc/NEWS.d/next/C API/2022-05-09-23-16-38.gh-issue-85858.VIcNDL.rst b/Misc/NEWS.d/next/C API/2022-05-09-23-16-38.gh-issue-85858.VIcNDL.rst deleted file mode 100644 index c175d1efee388a..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-05-09-23-16-38.gh-issue-85858.VIcNDL.rst +++ /dev/null @@ -1,2 +0,0 @@ -Remove the ``PyUnicode_InternImmortal()`` function and the -``SSTATE_INTERNED_IMMORTAL`` macro. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2022-05-10-12-35-42.gh-issue-92536.cAoRCZ.rst b/Misc/NEWS.d/next/C API/2022-05-10-12-35-42.gh-issue-92536.cAoRCZ.rst deleted file mode 100644 index a0b1bc69e281a2..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-05-10-12-35-42.gh-issue-92536.cAoRCZ.rst +++ /dev/null @@ -1 +0,0 @@ -Remove legacy Unicode APIs based on ``Py_UNICODE*``. diff --git a/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst b/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst deleted file mode 100644 index 60a8818e46b7ad..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst +++ /dev/null @@ -1,3 +0,0 @@ -Remove the ``token.h`` header file. There was never any public tokenizer C -API. The ``token.h`` header file was only designed to be used by Python -internals. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2022-05-13-18-17-48.gh-issue-92781.TVDr3-.rst b/Misc/NEWS.d/next/C API/2022-05-13-18-17-48.gh-issue-92781.TVDr3-.rst deleted file mode 100644 index 6442ba619450f0..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-05-13-18-17-48.gh-issue-92781.TVDr3-.rst +++ /dev/null @@ -1,3 +0,0 @@ -Avoid mixing declarations and code in the C API to fix the compiler warning: -"ISO C90 forbids mixed declarations and code" -[-Werror=declaration-after-statement]. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2022-05-19-18-05-51.gh-issue-92913.Ass1Hv.rst b/Misc/NEWS.d/next/C API/2022-05-19-18-05-51.gh-issue-92913.Ass1Hv.rst deleted file mode 100644 index c448c64029d825..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-05-19-18-05-51.gh-issue-92913.Ass1Hv.rst +++ /dev/null @@ -1,2 +0,0 @@ -Ensures changes to :c:member:`PyConfig.module_search_paths` are ignored -unless :c:member:`PyConfig.module_search_paths_set` is set diff --git a/Misc/NEWS.d/next/C API/2022-05-23-12-31-04.gh-issue-77782.ugC8dn.rst b/Misc/NEWS.d/next/C API/2022-05-23-12-31-04.gh-issue-77782.ugC8dn.rst deleted file mode 100644 index d212f6b977ef93..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-05-23-12-31-04.gh-issue-77782.ugC8dn.rst +++ /dev/null @@ -1,3 +0,0 @@ -Deprecate global configuration variable like -:c:var:`Py_IgnoreEnvironmentFlag`: the :c:func:`Py_InitializeFromConfig` API -should be instead. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2022-05-23-13-33-18.gh-issue-93103.ooD3Eb.rst b/Misc/NEWS.d/next/C API/2022-05-23-13-33-18.gh-issue-93103.ooD3Eb.rst deleted file mode 100644 index 404f0089cd0b79..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-05-23-13-33-18.gh-issue-93103.ooD3Eb.rst +++ /dev/null @@ -1,4 +0,0 @@ -Deprecate global configuration variables, like -:c:var:`Py_IgnoreEnvironmentFlag`, in the documentation: the -:c:func:`Py_InitializeFromConfig` API should be instead. Patch by Victor -Stinner. diff --git a/Misc/NEWS.d/next/C API/2022-05-23-15-22-18.gh-issue-92898.Qjc9d3.rst b/Misc/NEWS.d/next/C API/2022-05-23-15-22-18.gh-issue-92898.Qjc9d3.rst deleted file mode 100644 index 01eca1db1f18c9..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-05-23-15-22-18.gh-issue-92898.Qjc9d3.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix C++ compiler warnings when casting function arguments to ``PyObject*``. -Patch by Serge Guelton. diff --git a/Misc/NEWS.d/next/C API/2022-06-03-14-54-41.gh-issue-93466.DDtH0X.rst b/Misc/NEWS.d/next/C API/2022-06-03-14-54-41.gh-issue-93466.DDtH0X.rst deleted file mode 100644 index 0bb65ea2b38786..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-06-03-14-54-41.gh-issue-93466.DDtH0X.rst +++ /dev/null @@ -1,3 +0,0 @@ -Slot IDs in PyType_Spec may not be repeated. The documentation was updated -to mention this. For some cases of repeated slots, PyType_FromSpec and -related functions will now raise an exception. diff --git a/Misc/NEWS.d/next/C API/2022-06-04-13-15-41.gh-issue-93442.4M4NDb.rst b/Misc/NEWS.d/next/C API/2022-06-04-13-15-41.gh-issue-93442.4M4NDb.rst deleted file mode 100644 index f48ed37c814454..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-06-04-13-15-41.gh-issue-93442.4M4NDb.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add C++ overloads for _Py_CAST_impl() to handle 0/NULL. This will allow C++ -extensions that pass 0 or NULL to macros using _Py_CAST() to continue to -compile. diff --git a/Misc/NEWS.d/next/C API/2022-06-06-16-04-14.gh-issue-93503.MHJTu8.rst b/Misc/NEWS.d/next/C API/2022-06-06-16-04-14.gh-issue-93503.MHJTu8.rst deleted file mode 100644 index 3dd292cd778023..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-06-06-16-04-14.gh-issue-93503.MHJTu8.rst +++ /dev/null @@ -1,7 +0,0 @@ -Add two new public functions to the public C-API, -:c:func:`PyEval_SetProfileAllThreads` and :c:func:`PyEval_SetTraceAllThreads`, -that allow to set tracing and profiling functions in all running threads in -addition to the calling one. Also, two analogous functions to the -:mod:`threading` module (:func:`threading.setprofile_all_threads` and -:func:`threading.settrace_all_threads`) that allow to do the same from Python. -Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/C API/2022-06-10-16-50-27.gh-issue-89546.mX1f10.rst b/Misc/NEWS.d/next/C API/2022-06-10-16-50-27.gh-issue-89546.mX1f10.rst deleted file mode 100644 index 8e6b6d95c7b4ab..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-06-10-16-50-27.gh-issue-89546.mX1f10.rst +++ /dev/null @@ -1,4 +0,0 @@ -:c:func:`PyType_FromMetaclass` (and other ``PyType_From*`` functions) now -check that offsets and the base class's -:c:member:`~PyTypeObject.tp_basicsize` fit in the new class's -``tp_basicsize``. diff --git a/Misc/NEWS.d/next/C API/2022-06-10-23-41-48.gh-issue-91731.fhYUQG.rst b/Misc/NEWS.d/next/C API/2022-06-10-23-41-48.gh-issue-91731.fhYUQG.rst deleted file mode 100644 index 185671ca4fcfb1..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-06-10-23-41-48.gh-issue-91731.fhYUQG.rst +++ /dev/null @@ -1,3 +0,0 @@ -Avoid defining the ``static_assert`` when compiling with C++ 11, where this -is a keyword and redefining it can lead to undefined behavior. Patch by -Pablo Galindo diff --git a/Misc/NEWS.d/next/C API/2022-06-13-21-37-31.gh-issue-91321.DgJFvS.rst b/Misc/NEWS.d/next/C API/2022-06-13-21-37-31.gh-issue-91321.DgJFvS.rst deleted file mode 100644 index 57c39bc8d83c87..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-06-13-21-37-31.gh-issue-91321.DgJFvS.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix the compatibility of the Python C API with C++ older than C++11. Patch by -Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2022-06-17-13-41-38.gh-issue-93937.uKVTEh.rst b/Misc/NEWS.d/next/C API/2022-06-17-13-41-38.gh-issue-93937.uKVTEh.rst deleted file mode 100644 index c0a0745aa0dd25..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-06-17-13-41-38.gh-issue-93937.uKVTEh.rst +++ /dev/null @@ -1,14 +0,0 @@ -The following frame functions and type are now directly available with -``#include ``, it's no longer needed to add ``#include -``: - -* :c:func:`PyFrame_Check` -* :c:func:`PyFrame_GetBack` -* :c:func:`PyFrame_GetBuiltins` -* :c:func:`PyFrame_GetGenerator` -* :c:func:`PyFrame_GetGlobals` -* :c:func:`PyFrame_GetLasti` -* :c:func:`PyFrame_GetLocals` -* :c:type:`PyFrame_Type` - -Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2022-07-12-17-39-32.gh-issue-94731.9CPJNU.rst b/Misc/NEWS.d/next/C API/2022-07-12-17-39-32.gh-issue-94731.9CPJNU.rst deleted file mode 100644 index b7816d28215e7b..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-07-12-17-39-32.gh-issue-94731.9CPJNU.rst +++ /dev/null @@ -1,3 +0,0 @@ -Python again uses C-style casts for most casting operations when compiled -with C++. This may trigger compiler warnings, if they are enabled with e.g. -``-Wold-style-cast `` or ``-Wzero-as-null-pointer-constant`` options for ``g++``. diff --git a/Misc/NEWS.d/next/C API/2022-07-17-18-21-40.gh-issue-94930.gPFGDL.rst b/Misc/NEWS.d/next/C API/2022-07-17-18-21-40.gh-issue-94930.gPFGDL.rst deleted file mode 100644 index 5b79d757b1e4aa..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-07-17-18-21-40.gh-issue-94930.gPFGDL.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix ``SystemError`` raised when :c:func:`PyArg_ParseTupleAndKeywords` is -used with ``#`` in ``(...)`` but without ``PY_SSIZE_T_CLEAN`` defined. diff --git a/Misc/NEWS.d/next/C API/2022-07-19-22-37-40.gh-issue-94936.LGlmKv.rst b/Misc/NEWS.d/next/C API/2022-07-19-22-37-40.gh-issue-94936.LGlmKv.rst deleted file mode 100644 index abef9bb376c895..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-07-19-22-37-40.gh-issue-94936.LGlmKv.rst +++ /dev/null @@ -1,3 +0,0 @@ -Added :c:func:`PyCode_GetVarnames`, :c:func:`PyCode_GetCellvars` and -:c:func:`PyCode_GetFreevars` for accessing ``co_varnames``, ``co_cellvars`` -and ``co_freevars`` respectively via the C API. diff --git a/Misc/NEWS.d/next/C API/2022-07-25-15-54-27.gh-issue-92678.ziZpxz.rst b/Misc/NEWS.d/next/C API/2022-07-25-15-54-27.gh-issue-92678.ziZpxz.rst deleted file mode 100644 index 52473c9dfab946..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-07-25-15-54-27.gh-issue-92678.ziZpxz.rst +++ /dev/null @@ -1,3 +0,0 @@ -Adds unstable C-API functions ``_PyObject_VisitManagedDict`` and -``_PyObject_ClearManagedDict`` to allow C extensions to allow the VM to -manage their object's dictionaries. diff --git a/Misc/NEWS.d/next/C API/2022-07-29-10-41-59.gh-issue-95388.aiRSgr.rst b/Misc/NEWS.d/next/C API/2022-07-29-10-41-59.gh-issue-95388.aiRSgr.rst deleted file mode 100644 index c389d13db6afc2..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-07-29-10-41-59.gh-issue-95388.aiRSgr.rst +++ /dev/null @@ -1,2 +0,0 @@ -Creating :c:data:`immutable types ` with mutable -bases is deprecated and is planned to be disabled in Python 3.14. diff --git a/Misc/NEWS.d/next/C API/2022-07-29-15-24-45.gh-issue-93012.-DdGEy.rst b/Misc/NEWS.d/next/C API/2022-07-29-15-24-45.gh-issue-93012.-DdGEy.rst deleted file mode 100644 index 199c2d1fc197dd..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-07-29-15-24-45.gh-issue-93012.-DdGEy.rst +++ /dev/null @@ -1,6 +0,0 @@ -The :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag is now removed from a class -when the class's :py:meth:`~object.__call__` method is reassigned. This -makes vectorcall safe to use with mutable types (i.e. heap types without the -:const:`immutable ` flag). Mutable types that do -not override :c:member:`~PyTypeObject.tp_call` now inherit the -:const:`Py_TPFLAGS_HAVE_VECTORCALL` flag. diff --git a/Misc/NEWS.d/next/C API/2022-07-31-21-58-27.gh-issue-95504.wy7B1F.rst b/Misc/NEWS.d/next/C API/2022-07-31-21-58-27.gh-issue-95504.wy7B1F.rst deleted file mode 100644 index 955bdd4c7494f1..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-07-31-21-58-27.gh-issue-95504.wy7B1F.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix sign placement when specifying width or precision in -:c:func:`PyUnicode_FromFormat` and :c:func:`PyUnicode_FromFormatV`. -Patch by Philip Georgi. diff --git a/Misc/NEWS.d/next/C API/2022-08-01-16-21-39.gh-issue-93274.QoDHEu.rst b/Misc/NEWS.d/next/C API/2022-08-01-16-21-39.gh-issue-93274.QoDHEu.rst deleted file mode 100644 index da6cce4a7b283a..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-08-01-16-21-39.gh-issue-93274.QoDHEu.rst +++ /dev/null @@ -1,3 +0,0 @@ -API for implementing vectorcall (:c:data:`Py_TPFLAGS_HAVE_VECTORCALL`, -:c:func:`PyVectorcall_NARGS` and :c:func:`PyVectorcall_Call`) was added to -the limited API and stable ABI. diff --git a/Misc/NEWS.d/next/C API/2022-08-03-13-01-57.gh-issue-92678.DLwONN.rst b/Misc/NEWS.d/next/C API/2022-08-03-13-01-57.gh-issue-92678.DLwONN.rst deleted file mode 100644 index a3a209f5e41338..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-08-03-13-01-57.gh-issue-92678.DLwONN.rst +++ /dev/null @@ -1,2 +0,0 @@ -Support C extensions using managed dictionaries by setting the -``Py_TPFLAGS_MANAGED_DICT`` flag. diff --git a/Misc/NEWS.d/next/C API/2022-08-03-14-39-08.gh-issue-92678.ozFTEx.rst b/Misc/NEWS.d/next/C API/2022-08-03-14-39-08.gh-issue-92678.ozFTEx.rst deleted file mode 100644 index 6bf3d4b1abbf5a..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-08-03-14-39-08.gh-issue-92678.ozFTEx.rst +++ /dev/null @@ -1,2 +0,0 @@ -Restore the 3.10 behavior for multiple inheritance of C extension classes -that store their dictionary at the end of the struct. diff --git a/Misc/NEWS.d/next/C API/2022-08-08-14-36-31.gh-issue-95781.W_G8YW.rst b/Misc/NEWS.d/next/C API/2022-08-08-14-36-31.gh-issue-95781.W_G8YW.rst deleted file mode 100644 index eb2fd7e9da3d81..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-08-08-14-36-31.gh-issue-95781.W_G8YW.rst +++ /dev/null @@ -1,4 +0,0 @@ -An unrecognized format character in :c:func:`PyUnicode_FromFormat` and -:c:func:`PyUnicode_FromFormatV` now sets a :exc:`SystemError`. -In previous versions it caused all the rest of the format string to be -copied as-is to the result string, and any extra arguments discarded. diff --git a/Misc/NEWS.d/next/C API/2022-08-16-16-54-42.gh-issue-95589.6xE1ar.rst b/Misc/NEWS.d/next/C API/2022-08-16-16-54-42.gh-issue-95589.6xE1ar.rst deleted file mode 100644 index 696e3c80db62cf..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-08-16-16-54-42.gh-issue-95589.6xE1ar.rst +++ /dev/null @@ -1,4 +0,0 @@ -Extensions classes that set ``tp_dictoffset`` and ``tp_weaklistoffset`` -lose the support for multiple inheritance, but are now safe. Extension -classes should use :const:`Py_TPFLAGS_MANAGED_DICT` and -:const:`Py_TPFLAGS_MANAGED_WEAKREF` instead. diff --git a/Misc/NEWS.d/next/C API/2022-09-20-01-04-57.gh-issue-96512.msZTjF.rst b/Misc/NEWS.d/next/C API/2022-09-20-01-04-57.gh-issue-96512.msZTjF.rst deleted file mode 100644 index 787bee3ee23b73..00000000000000 --- a/Misc/NEWS.d/next/C API/2022-09-20-01-04-57.gh-issue-96512.msZTjF.rst +++ /dev/null @@ -1,2 +0,0 @@ -Configuration for the :ref:`integer string conversion length limitation -` now lives in the PyConfig C API struct. diff --git a/Misc/NEWS.d/next/C API/2022-11-02-16-51-24.gh-issue-47146.dsYDtI.rst b/Misc/NEWS.d/next/C API/2022-11-02-16-51-24.gh-issue-47146.dsYDtI.rst new file mode 100644 index 00000000000000..0f419427925dc9 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2022-11-02-16-51-24.gh-issue-47146.dsYDtI.rst @@ -0,0 +1,5 @@ +The ``structmember.h`` header is deprecated. Its non-deprecated contents are +now available just by including ``Python.h``, with a ``Py_`` prefix added if +it was missing. (Deprecated contents are :c:macro:`T_OBJECT`, +:c:macro:`T_NONE`, and no-op flags.) Patch by Petr Viktorin, based on +earlier work by Alexander Belopolsky and Matthias Braun. diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-09-04-19-09-49.bpo-38031.Yq4L72.rst b/Misc/NEWS.d/next/Core and Builtins/2019-09-04-19-09-49.bpo-38031.Yq4L72.rst new file mode 100644 index 00000000000000..b5964375962f66 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-09-04-19-09-49.bpo-38031.Yq4L72.rst @@ -0,0 +1,2 @@ +Fix a possible assertion failure in :class:`io.FileIO` when the opener +returns an invalid file descriptor. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-01-02-14-53-59.bpo-46142.WayjgT.rst b/Misc/NEWS.d/next/Core and Builtins/2022-01-02-14-53-59.bpo-46142.WayjgT.rst deleted file mode 100644 index 3c62c54289946c..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-01-02-14-53-59.bpo-46142.WayjgT.rst +++ /dev/null @@ -1,3 +0,0 @@ -Make ``--help`` output shorter by moving some info to the new -``--help-env`` and ``--help-xoptions`` command-line options. -Also add ``--help-all`` option to print complete usage. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-03-22-13-12-27.bpo-47091.tJcy-P.rst b/Misc/NEWS.d/next/Core and Builtins/2022-03-22-13-12-27.bpo-47091.tJcy-P.rst deleted file mode 100644 index 72a39b5ebc7109..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-03-22-13-12-27.bpo-47091.tJcy-P.rst +++ /dev/null @@ -1 +0,0 @@ -Improve performance of repetition of :class:`list` and :class:`tuple` by using ``memcpy`` to copy data and performing the reference increments in one step. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-04-15-22-12-53.gh-issue-91578.rDOtyK.rst b/Misc/NEWS.d/next/Core and Builtins/2022-04-15-22-12-53.gh-issue-91578.rDOtyK.rst deleted file mode 100644 index 4dc738ab9051a4..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-04-15-22-12-53.gh-issue-91578.rDOtyK.rst +++ /dev/null @@ -1 +0,0 @@ -Updates the error message for abstract class. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-04-16-15-37-55.gh-issue-91399.trLbK6.rst b/Misc/NEWS.d/next/Core and Builtins/2022-04-16-15-37-55.gh-issue-91399.trLbK6.rst deleted file mode 100644 index d1e6c098829253..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-04-16-15-37-55.gh-issue-91399.trLbK6.rst +++ /dev/null @@ -1 +0,0 @@ -Removed duplicate '{0, 0, 0, 0, 0, 0}' entry in 'Objects/unicodetype_db.h'. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-04-24-02-22-10.gh-issue-91432.YPJAK6.rst b/Misc/NEWS.d/next/Core and Builtins/2022-04-24-02-22-10.gh-issue-91432.YPJAK6.rst deleted file mode 100644 index 081dc0f5c334c5..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-04-24-02-22-10.gh-issue-91432.YPJAK6.rst +++ /dev/null @@ -1 +0,0 @@ -Specialized the :opcode:`FOR_ITER` opcode using the PEP 659 machinery diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-03-20-12-18.gh-issue-92261.aigLnb.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-03-20-12-18.gh-issue-92261.aigLnb.rst deleted file mode 100644 index df0228e273d8e3..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-03-20-12-18.gh-issue-92261.aigLnb.rst +++ /dev/null @@ -1 +0,0 @@ -Fix hang when trying to iterate over a ``typing.Union``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-08-19-43-31.gh-issue-88750.1BjJg-.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-08-19-43-31.gh-issue-88750.1BjJg-.rst deleted file mode 100644 index bc8d41397c00c1..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-08-19-43-31.gh-issue-88750.1BjJg-.rst +++ /dev/null @@ -1,2 +0,0 @@ -The deprecated debug build only ``PYTHONTHREADDEBUG`` environment variable -no longer does anything. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-10-11-34-35.gh-issue-92619.u0V0lY.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-10-11-34-35.gh-issue-92619.u0V0lY.rst deleted file mode 100644 index dfc9c0d1327fe3..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-10-11-34-35.gh-issue-92619.u0V0lY.rst +++ /dev/null @@ -1 +0,0 @@ -Make the compiler duplicate an exit block only if none of its instructions have a lineno (previously only the first instruction in the block was checked, leading to unnecessarily duplicated blocks). diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-11-09-16-54.gh-issue-91102.lenv9h.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-11-09-16-54.gh-issue-91102.lenv9h.rst deleted file mode 100644 index b2bdf9d5322f37..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-11-09-16-54.gh-issue-91102.lenv9h.rst +++ /dev/null @@ -1 +0,0 @@ -:meth:`_warnings.warn_explicit` is ported to Argument Clinic. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-12-13-23-19.gh-issue-92236.sDRzUe.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-12-13-23-19.gh-issue-92236.sDRzUe.rst deleted file mode 100644 index fe482d505c67b4..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-12-13-23-19.gh-issue-92236.sDRzUe.rst +++ /dev/null @@ -1,2 +0,0 @@ -Remove spurious "LINE" event when starting a generator or coroutine, visible -tracing functions implemented in C. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-13-00-57-18.gh-issue-92658.YdhFE2.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-13-00-57-18.gh-issue-92658.YdhFE2.rst deleted file mode 100644 index 887b3d61596609..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-13-00-57-18.gh-issue-92658.YdhFE2.rst +++ /dev/null @@ -1 +0,0 @@ -Add support for connecting and binding to Hyper-V sockets on Windows Hyper-V hosts and guests. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-13-12-36-10.gh-issue-92777.Odo4vP.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-13-12-36-10.gh-issue-92777.Odo4vP.rst deleted file mode 100644 index 19416590a8e9ac..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-13-12-36-10.gh-issue-92777.Odo4vP.rst +++ /dev/null @@ -1 +0,0 @@ -Specialize ``LOAD_METHOD`` for objects with lazy dictionaries. Patch by Ken Jin. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-14-13-22-11.gh-issue-92804.rAqpI2.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-14-13-22-11.gh-issue-92804.rAqpI2.rst deleted file mode 100644 index 7a5fd3f6568eaf..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-14-13-22-11.gh-issue-92804.rAqpI2.rst +++ /dev/null @@ -1 +0,0 @@ -Fix memory leak in ``memoryview`` iterator as it was not finalized at exit. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-15-15-25-05.gh-issue-90473.MoPHYW.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-15-15-25-05.gh-issue-90473.MoPHYW.rst deleted file mode 100644 index 1f9f45a511fbae..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-15-15-25-05.gh-issue-90473.MoPHYW.rst +++ /dev/null @@ -1 +0,0 @@ -Decrease default recursion limit on WASI to address limited call stack size. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-17-20-41-43.gh-issue-92858.eIXJTn.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-17-20-41-43.gh-issue-92858.eIXJTn.rst deleted file mode 100644 index fa91d941b1759a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-17-20-41-43.gh-issue-92858.eIXJTn.rst +++ /dev/null @@ -1 +0,0 @@ -Improve error message for some suites with syntax error before ':' diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-18-08-32-33.gh-issue-92914.tJUeTD.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-18-08-32-33.gh-issue-92914.tJUeTD.rst deleted file mode 100644 index 1242a15c029dc1..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-18-08-32-33.gh-issue-92914.tJUeTD.rst +++ /dev/null @@ -1 +0,0 @@ -Always round the allocated size for lists up to the nearest even number. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-18-12-55-35.gh-issue-90690.TKuoTa.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-18-12-55-35.gh-issue-90690.TKuoTa.rst deleted file mode 100644 index 6c5aa91dfb9d3d..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-18-12-55-35.gh-issue-90690.TKuoTa.rst +++ /dev/null @@ -1,2 +0,0 @@ -The PRECALL instruction has been removed. It offered only a small advantage -for specialization and is not needed in the vast majority of cases. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-18-18-34-45.gh-issue-92930.kpYPOb.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-18-18-34-45.gh-issue-92930.kpYPOb.rst deleted file mode 100644 index cd5d7b3214ea43..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-18-18-34-45.gh-issue-92930.kpYPOb.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a crash in ``_pickle.c`` from mutating collections during ``__reduce__`` or ``persistent_id``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-19-13-25-50.gh-issue-92955.kmNV33.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-19-13-25-50.gh-issue-92955.kmNV33.rst deleted file mode 100644 index 09f03e520c436a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-19-13-25-50.gh-issue-92955.kmNV33.rst +++ /dev/null @@ -1 +0,0 @@ -Fix memory leak in code object's lines and positions iterators as they were not finalized at exit. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-19-15-29-53.gh-issue-89914.8bAffH.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-19-15-29-53.gh-issue-89914.8bAffH.rst deleted file mode 100644 index d2156f8bbf3d20..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-19-15-29-53.gh-issue-89914.8bAffH.rst +++ /dev/null @@ -1,2 +0,0 @@ -The operand of the ``YIELD_VALUE`` instruction is set to the stack depth. -This is done to help frame handling on ``yield`` and may assist debuggers. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-20-09-25-34.gh-issue-93021.k3Aji2.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-20-09-25-34.gh-issue-93021.k3Aji2.rst deleted file mode 100644 index 8fdd8dfb4229a7..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-20-09-25-34.gh-issue-93021.k3Aji2.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix the :attr:`__text_signature__` for :meth:`__get__` methods implemented -in C. Patch by Jelle Zijlstra. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-20-13-32-24.gh-issue-93012.e9B-pv.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-20-13-32-24.gh-issue-93012.e9B-pv.rst deleted file mode 100644 index 8de0f000647dc8..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-20-13-32-24.gh-issue-93012.e9B-pv.rst +++ /dev/null @@ -1,8 +0,0 @@ -Added the new function :c:func:`PyType_FromMetaclass`, which generalizes the -existing :c:func:`PyType_FromModuleAndSpec` using an additional metaclass -argument. This is useful for language binding tools, where it can be used to -intercept type-related operations like subclassing or static attribute access -by specifying a metaclass with custom slots. - -Importantly, :c:func:`PyType_FromMetaclass` is available in the Limited API, -which provides a path towards migrating more binding tools onto the Stable ABI. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-21-23-21-37.gh-issue-93065.5I18WC.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-21-23-21-37.gh-issue-93065.5I18WC.rst deleted file mode 100644 index ea801653f75025..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-21-23-21-37.gh-issue-93065.5I18WC.rst +++ /dev/null @@ -1,5 +0,0 @@ -Fix contextvars HAMT implementation to handle iteration over deep trees. - -The bug was discovered and fixed by Eli Libman. See -`MagicStack/immutables#84 `_ -for more details. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-22-02-37-50.gh-issue-93061.r70Imp.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-22-02-37-50.gh-issue-93061.r70Imp.rst deleted file mode 100644 index d41e59028ad570..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-22-02-37-50.gh-issue-93061.r70Imp.rst +++ /dev/null @@ -1 +0,0 @@ -Backward jumps after ``async for`` loops are no longer given dubious line numbers. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-23-18-36-07.gh-issue-93143.X1Yqxm.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-23-18-36-07.gh-issue-93143.X1Yqxm.rst deleted file mode 100644 index 03994bcfdb56f8..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-23-18-36-07.gh-issue-93143.X1Yqxm.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid ``NULL`` checks for uninitialized local variables by determining at compile time which variables must be initialized. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-24-14-35-48.gh-issue-93040.9X6Ofu.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-24-14-35-48.gh-issue-93040.9X6Ofu.rst deleted file mode 100644 index b2e527446ea7a0..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-24-14-35-48.gh-issue-93040.9X6Ofu.rst +++ /dev/null @@ -1 +0,0 @@ -Wraps unused parameters in ``Objects/obmalloc.c`` with ``Py_UNUSED``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-25-04-07-22.gh-issue-91924.-UyO4q.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-25-04-07-22.gh-issue-91924.-UyO4q.rst deleted file mode 100644 index 44866a03cf48b1..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-25-04-07-22.gh-issue-91924.-UyO4q.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix ``__lltrace__`` debug feature if the stdout encoding is not UTF-8. Patch -by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-25-12-30-12.gh-issue-84694.5sjy2w.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-25-12-30-12.gh-issue-84694.5sjy2w.rst deleted file mode 100644 index c062d28b294868..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-25-12-30-12.gh-issue-84694.5sjy2w.rst +++ /dev/null @@ -1,2 +0,0 @@ -The ``--experimental-isolated-subinterpreters`` configure option and -``EXPERIMENTAL_ISOLATED_SUBINTERPRETERS`` macro have been removed. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-25-21-56-25.gh-issue-93223.gTOGVZ.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-25-21-56-25.gh-issue-93223.gTOGVZ.rst deleted file mode 100644 index cac30e11215b4c..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-25-21-56-25.gh-issue-93223.gTOGVZ.rst +++ /dev/null @@ -1 +0,0 @@ -When a bytecode instruction jumps to an unconditional jump instruction, the first instruction can often be optimized to target the unconditional jump's target directly. For tracing reasons, this would previously only occur if both instructions have the same line number. This also now occurs if the unconditional jump is artificial, i.e., if it has no associated line number. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-10-22-46.gh-issue-93345.gi1A4L.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-30-10-22-46.gh-issue-93345.gi1A4L.rst deleted file mode 100644 index 4cdb37cfe46981..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-10-22-46.gh-issue-93345.gi1A4L.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a crash in substitution of a ``TypeVar`` in nested generic alias after -``TypeVarTuple``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-14-50-03.gh-issue-93283.XDO2ZQ.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-30-14-50-03.gh-issue-93283.XDO2ZQ.rst deleted file mode 100644 index 550e86988b25a7..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-14-50-03.gh-issue-93283.XDO2ZQ.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve error message for invalid syntax of conversion character in f-string -expressions. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-15-35-42.gh-issue-93354.RZk8gs.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-30-15-35-42.gh-issue-93354.RZk8gs.rst deleted file mode 100644 index dcfe6a9b6ba3a5..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-15-35-42.gh-issue-93354.RZk8gs.rst +++ /dev/null @@ -1,3 +0,0 @@ -Use exponential backoff for specialization counters in the interpreter. Can -reduce the number of failed specializations significantly and avoid slowdown -for those parts of a program that are not suitable for specialization. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-15-51-11.gh-issue-93356.l5wnzW.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-30-15-51-11.gh-issue-93356.l5wnzW.rst deleted file mode 100644 index 9bb58468197e2d..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-15-51-11.gh-issue-93356.l5wnzW.rst +++ /dev/null @@ -1 +0,0 @@ -Code for exception handlers is emitted at the end of the code unit's bytecode. This avoids one jump when no exception is raised. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-19-00-38.gh-issue-93359.zXV3A0.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-30-19-00-38.gh-issue-93359.zXV3A0.rst deleted file mode 100644 index 36e5e52390d7b4..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-30-19-00-38.gh-issue-93359.zXV3A0.rst +++ /dev/null @@ -1,2 +0,0 @@ -Ensure that custom :mod:`ast` nodes without explicit end positions can be -compiled. Patch by Pablo Galindo. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-05-31-16-36-30.gh-issue-93382.Jf6gAj.rst b/Misc/NEWS.d/next/Core and Builtins/2022-05-31-16-36-30.gh-issue-93382.Jf6gAj.rst deleted file mode 100644 index 1fe821edf5a148..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-05-31-16-36-30.gh-issue-93382.Jf6gAj.rst +++ /dev/null @@ -1,2 +0,0 @@ -Cache the result of :c:func:`PyCode_GetCode` function to restore the O(1) -lookup of the :attr:`~types.CodeType.co_code` attribute. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-01-17-47-40.gh-issue-93418.24dJuc.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-01-17-47-40.gh-issue-93418.24dJuc.rst deleted file mode 100644 index 74ad06bfeee7c4..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-01-17-47-40.gh-issue-93418.24dJuc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed an assert where an f-string has an equal sign '=' following an -expression, but there's no trailing brace. For example, f"{i=". diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-02-08-28-55.gh-issue-93429.DZTWHx.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-02-08-28-55.gh-issue-93429.DZTWHx.rst deleted file mode 100644 index 02efedaa9645f0..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-02-08-28-55.gh-issue-93429.DZTWHx.rst +++ /dev/null @@ -1 +0,0 @@ -``LOAD_METHOD`` instruction has been removed. It was merged back into ``LOAD_ATTR``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-02-23-00-08.gh-issue-93444.m63DIs.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-02-23-00-08.gh-issue-93444.m63DIs.rst deleted file mode 100644 index 23cc1bd3e0b4d2..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-02-23-00-08.gh-issue-93444.m63DIs.rst +++ /dev/null @@ -1 +0,0 @@ -Removed redundant fields from the compiler's basicblock struct: ``b_nofallthrough``, ``b_exit``, ``b_return``. They can be easily calculated from the opcode of the last instruction of the block. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-06-14-28-24.gh-issue-93533.lnC0CC.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-06-14-28-24.gh-issue-93533.lnC0CC.rst deleted file mode 100644 index 2d8ac7068af673..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-06-14-28-24.gh-issue-93533.lnC0CC.rst +++ /dev/null @@ -1 +0,0 @@ -Reduce the size of the inline cache for ``LOAD_METHOD`` by 2 bytes. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-09-09-08-29.gh-issue-93621.-_Pn1d.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-09-09-08-29.gh-issue-93621.-_Pn1d.rst deleted file mode 100644 index 388540982fd1ff..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-09-09-08-29.gh-issue-93621.-_Pn1d.rst +++ /dev/null @@ -1 +0,0 @@ -Change order of bytecode instructions emitted for :keyword:`with` and :keyword:`async with` to reduce the number of entries in the exception table. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-09-19-19-02.gh-issue-93461.5DqP1e.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-09-19-19-02.gh-issue-93461.5DqP1e.rst deleted file mode 100644 index f6ed14887eae1b..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-09-19-19-02.gh-issue-93461.5DqP1e.rst +++ /dev/null @@ -1,6 +0,0 @@ -:func:`importlib.invalidate_caches` now drops entries from -:data:`sys.path_importer_cache` with a relative path as name. This solves a -caching issue when a process changes its current working directory. - -``FileFinder`` no longer inserts a dot in the path, e.g. -``/egg/./spam`` is now ``/egg/spam``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-10-10-31-18.gh-issue-93662.-7RSC1.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-10-10-31-18.gh-issue-93662.-7RSC1.rst deleted file mode 100644 index e444a00cf7ecc0..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-10-10-31-18.gh-issue-93662.-7RSC1.rst +++ /dev/null @@ -1,2 +0,0 @@ -Make sure that the end column offsets are correct in multi-line method -calls. Previously, the end column could precede the column offset. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-10-12-03-17.gh-issue-93671.idkQqG.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-10-12-03-17.gh-issue-93671.idkQqG.rst deleted file mode 100644 index a7757153359517..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-10-12-03-17.gh-issue-93671.idkQqG.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix some exponential backtrace case happening with deeply nested sequence -patterns in match statements. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-10-16-57-35.gh-issue-93678.1WBnHt.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-10-16-57-35.gh-issue-93678.1WBnHt.rst deleted file mode 100644 index 24a0d1042d81ae..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-10-16-57-35.gh-issue-93678.1WBnHt.rst +++ /dev/null @@ -1 +0,0 @@ -Refactor the compiler to reduce boilerplate and repetition. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-12-19-31-56.gh-issue-89828.bq02M7.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-12-19-31-56.gh-issue-89828.bq02M7.rst deleted file mode 100644 index 14ca99e1458ce3..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-12-19-31-56.gh-issue-89828.bq02M7.rst +++ /dev/null @@ -1,2 +0,0 @@ -:class:`types.GenericAlias` no longer relays the ``__class__`` attribute. -For example, ``isinstance(list[int], type)`` no longer returns ``True``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-13-10-48-09.gh-issue-93516.yJSait.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-13-10-48-09.gh-issue-93516.yJSait.rst deleted file mode 100644 index 5c22c7a67b6e51..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-13-10-48-09.gh-issue-93516.yJSait.rst +++ /dev/null @@ -1,2 +0,0 @@ -Lazily create a table mapping bytecode offsets to line numbers to speed up -calculation of line numbers when tracing. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-13-13-55-34.gh-issue-93516.HILrDl.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-13-13-55-34.gh-issue-93516.HILrDl.rst deleted file mode 100644 index a324c2dbcbe8a6..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-13-13-55-34.gh-issue-93516.HILrDl.rst +++ /dev/null @@ -1,2 +0,0 @@ -Store offset of first traceable instruction in code object to avoid having -to recompute it for each instruction when tracing. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-15-11-16-13.gh-issue-93841.06zqX3.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-15-11-16-13.gh-issue-93841.06zqX3.rst deleted file mode 100644 index 179d3808e3b4d5..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-15-11-16-13.gh-issue-93841.06zqX3.rst +++ /dev/null @@ -1,3 +0,0 @@ -When built with ``-enable-pystats``, ``sys._stats_on()``, -``sys._stats_off()``, ``sys._stats_clear()`` and ``sys._stats_dump()`` -functions have been added to enable gathering stats for parts of programs. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-15-16-45-53.gh-issue-93678.1I_ZT3.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-15-16-45-53.gh-issue-93678.1I_ZT3.rst deleted file mode 100644 index 3c99fd2ecf6633..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-15-16-45-53.gh-issue-93678.1I_ZT3.rst +++ /dev/null @@ -1 +0,0 @@ -Refactor compiler optimisation code so that it no longer needs the ``struct assembler`` and ``struct compiler`` passed around. Instead, each function takes the CFG and other data that it actually needs. This will make it possible to test this code directly. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-16-16-53-22.gh-issue-93911.RDwIiK.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-16-16-53-22.gh-issue-93911.RDwIiK.rst deleted file mode 100644 index 9efa994c1916dd..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-16-16-53-22.gh-issue-93911.RDwIiK.rst +++ /dev/null @@ -1 +0,0 @@ -Specialize ``LOAD_ATTR`` for ``property()`` attributes. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-17-16-30-24.gh-issue-93955.LmiAe9.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-17-16-30-24.gh-issue-93955.LmiAe9.rst deleted file mode 100644 index 3b2f0e8c32d745..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-17-16-30-24.gh-issue-93955.LmiAe9.rst +++ /dev/null @@ -1 +0,0 @@ -Improve performance of attribute lookups on objects with custom ``__getattribute__`` and ``__getattr__``. Patch by Ken Jin. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-18-17-00-33.gh-issue-93911.y286of.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-18-17-00-33.gh-issue-93911.y286of.rst deleted file mode 100644 index 7fc8f6a2d49293..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-18-17-00-33.gh-issue-93911.y286of.rst +++ /dev/null @@ -1 +0,0 @@ -Specialize ``LOAD_ATTR`` for objects with custom ``__getattribute__``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-20-13-48-57.gh-issue-94021.o78q3G.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-20-13-48-57.gh-issue-94021.o78q3G.rst deleted file mode 100644 index 0724c517b2ba06..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-20-13-48-57.gh-issue-94021.o78q3G.rst +++ /dev/null @@ -1 +0,0 @@ -Fix unreachable code warning in ``Python/specialize.c``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-23-12-10-39.gh-issue-94163.SqAfQq.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-23-12-10-39.gh-issue-94163.SqAfQq.rst deleted file mode 100644 index 3f82943cbfbe07..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-23-12-10-39.gh-issue-94163.SqAfQq.rst +++ /dev/null @@ -1,5 +0,0 @@ -Add :opcode:`BINARY_SLICE` and :opcode:`STORE_SLICE` instructions for more efficient handling -and better specialization of slicing operations, where the slice is explicit -in the source code. - - diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-24-14-06-20.gh-issue-93883.8jVQQ4.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-24-14-06-20.gh-issue-93883.8jVQQ4.rst deleted file mode 100644 index 53345577036a5a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-24-14-06-20.gh-issue-93883.8jVQQ4.rst +++ /dev/null @@ -1 +0,0 @@ -Revise the display strategy of traceback enhanced error locations. The indicators are only shown when the location doesn't span the whole line. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-25-10-19-43.gh-issue-87995.aMDHnp.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-25-10-19-43.gh-issue-87995.aMDHnp.rst deleted file mode 100644 index 4154ebce234958..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-25-10-19-43.gh-issue-87995.aMDHnp.rst +++ /dev/null @@ -1,2 +0,0 @@ -:class:`types.MappingProxyType` instances are now hashable if the underlying -mapping is hashable. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-26-14-37-03.gh-issue-94192.ab7tn7.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-26-14-37-03.gh-issue-94192.ab7tn7.rst deleted file mode 100644 index ebd8b04e45091e..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-26-14-37-03.gh-issue-94192.ab7tn7.rst +++ /dev/null @@ -1 +0,0 @@ -Fix error for dictionary literals with invalid expression as value. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-28-10-08-06.gh-issue-94262.m-HWUZ.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-28-10-08-06.gh-issue-94262.m-HWUZ.rst deleted file mode 100644 index 7ba39bbcfe21da..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-28-10-08-06.gh-issue-94262.m-HWUZ.rst +++ /dev/null @@ -1,3 +0,0 @@ -Don't create frame objects for incomplete frames. Prevents the creation of -generators and closures from being observable to Python and C extensions, -restoring the behavior of 3.10 and earlier. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-28-12-41-17.gh-issue-88116.A7fEl_.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-28-12-41-17.gh-issue-88116.A7fEl_.rst deleted file mode 100644 index a8347cff09f39f..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-28-12-41-17.gh-issue-88116.A7fEl_.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue when reading line numbers from code objects if the encoded line -numbers are close to ``INT_MIN``. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-28-14-20-36.gh-issue-94360.DiEnen.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-28-14-20-36.gh-issue-94360.DiEnen.rst deleted file mode 100644 index 0a74ba38b0ac43..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-28-14-20-36.gh-issue-94360.DiEnen.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed a tokenizer crash when reading encoded files with syntax errors from -``stdin`` with non utf-8 encoded text. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-29-15-45-04.gh-issue-94329.olUQyk.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-29-15-45-04.gh-issue-94329.olUQyk.rst deleted file mode 100644 index afd31b6e4dc44f..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-29-15-45-04.gh-issue-94329.olUQyk.rst +++ /dev/null @@ -1,2 +0,0 @@ -Compile and run code with unpacking of extremely large sequences (1000s of elements). -Such code failed to compile. It now compiles and runs correctly. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-29-22-18-36.gh-issue-91719.3APYYI.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-29-22-18-36.gh-issue-91719.3APYYI.rst deleted file mode 100644 index 0d085e88778372..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-29-22-18-36.gh-issue-91719.3APYYI.rst +++ /dev/null @@ -1,2 +0,0 @@ -Reload ``opcode`` when raising ``unknown opcode error`` in the interpreter main loop, -for C compilers to generate dispatching code independently. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-06-30-15-07-26.gh-issue-94438.btzHSk.rst b/Misc/NEWS.d/next/Core and Builtins/2022-06-30-15-07-26.gh-issue-94438.btzHSk.rst deleted file mode 100644 index b08dd8f2ad160f..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-06-30-15-07-26.gh-issue-94438.btzHSk.rst +++ /dev/null @@ -1,2 +0,0 @@ -Account for instructions that can push NULL to the stack when setting line -number in a frame. Prevents some (unlikely) crashes. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-01-20-00-19.gh-issue-94485.mo5st7.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-01-20-00-19.gh-issue-94485.mo5st7.rst deleted file mode 100644 index 14d90b7e764a77..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-01-20-00-19.gh-issue-94485.mo5st7.rst +++ /dev/null @@ -1,2 +0,0 @@ -Line number of a module's ``RESUME`` instruction is set to 0 as specified in -:pep:`626`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-06-14-02-26.gh-issue-92228.44Cbly.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-06-14-02-26.gh-issue-92228.44Cbly.rst deleted file mode 100644 index 458ad897cefcb6..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-06-14-02-26.gh-issue-92228.44Cbly.rst +++ /dev/null @@ -1 +0,0 @@ -Disable the compiler's inline-small-exit-blocks optimization for exit blocks that are associated with source code lines. This fixes a bug where the debugger cannot tell where an exception handler ends and the following code block begins. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-07-21-13-25.gh-issue-94215._Sv9Ms.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-07-21-13-25.gh-issue-94215._Sv9Ms.rst deleted file mode 100644 index 07af472fbdcab4..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-07-21-13-25.gh-issue-94215._Sv9Ms.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix an issue where exceptions raised by line-tracing events would cause -frames to be left in an invalid state, possibly resulting in a hard crash of -the interpreter. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-08-11-44-45.gh-issue-93252.i2358c.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-08-11-44-45.gh-issue-93252.i2358c.rst deleted file mode 100644 index 1cc2d8560e53e5..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-08-11-44-45.gh-issue-93252.i2358c.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue that caused internal frames to outlive failed Python function -calls, possibly resulting in memory leaks or hard interpreter crashes. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-08-16-44-11.gh-issue-94694.VkL2CM.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-08-16-44-11.gh-issue-94694.VkL2CM.rst deleted file mode 100644 index 6434788140f420..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-08-16-44-11.gh-issue-94694.VkL2CM.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix an issue that could cause code with multi-line method lookups to have -misleading or incorrect column offset information. In some cases (when -compiling a hand-built AST) this could have resulted in a hard crash of the -interpreter. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-14-10-07-53.gh-issue-90699.x3aG9m.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-14-10-07-53.gh-issue-90699.x3aG9m.rst deleted file mode 100644 index 795f4df987eb90..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-14-10-07-53.gh-issue-90699.x3aG9m.rst +++ /dev/null @@ -1 +0,0 @@ -Fix reference counting bug in :meth:`bool.__repr__`. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-15-16-15-04.gh-issue-91153.HiBmtt.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-15-16-15-04.gh-issue-91153.HiBmtt.rst deleted file mode 100644 index 2caa0170f75bca..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-15-16-15-04.gh-issue-91153.HiBmtt.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue where a :class:`bytearray` item assignment could crash if it's -resized by the new value's :meth:`__index__` method. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-15-22-16-08.gh-issue-94822.zRRzBN.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-15-22-16-08.gh-issue-94822.zRRzBN.rst deleted file mode 100644 index 5b24918e497706..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-15-22-16-08.gh-issue-94822.zRRzBN.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue where lookups of metaclass descriptors may be ignored when an -identically-named attribute also exists on the class itself. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-15-22-47-44.gh-issue-94893.YiJYcW.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-15-22-47-44.gh-issue-94893.YiJYcW.rst deleted file mode 100644 index 6384ef92c6543a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-15-22-47-44.gh-issue-94893.YiJYcW.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue where frame object manipulations could corrupt inline bytecode -caches. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-16-08-14-17.gh-issue-94869.eRwMsX.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-16-08-14-17.gh-issue-94869.eRwMsX.rst deleted file mode 100644 index 2ccf91b0cd99af..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-16-08-14-17.gh-issue-94869.eRwMsX.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix the column offsets for some expressions in multi-line f-strings -:mod:`ast` nodes. Patch by Pablo Galindo. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-17-15-54-29.gh-issue-91256.z7i7Q5.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-17-15-54-29.gh-issue-91256.z7i7Q5.rst deleted file mode 100644 index 802a614fd48d72..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-17-15-54-29.gh-issue-91256.z7i7Q5.rst +++ /dev/null @@ -1 +0,0 @@ -Ensures the program name is known for help text during interpreter startup. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-18-04-48-34.gh-issue-94947.df9gUw.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-18-04-48-34.gh-issue-94947.df9gUw.rst deleted file mode 100644 index 360ea67048fe22..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-18-04-48-34.gh-issue-94947.df9gUw.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`ast.parse` will no longer parse assignment expressions when passed ``feature_version`` less than ``(3, 8)``. Patch by Shantanu Jain. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-18-05-10-29.gh-issue-94949.OsZ7_s.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-18-05-10-29.gh-issue-94949.OsZ7_s.rst deleted file mode 100644 index bc452d434da0f6..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-18-05-10-29.gh-issue-94949.OsZ7_s.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`ast.parse` will no longer parse parenthesized context managers when passed ``feature_version`` less than ``(3, 9)``. Patch by Shantanu Jain. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-18-14-19-21.gh-issue-94739.NQJQi7.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-18-14-19-21.gh-issue-94739.NQJQi7.rst deleted file mode 100644 index 7476892c423b1a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-18-14-19-21.gh-issue-94739.NQJQi7.rst +++ /dev/null @@ -1 +0,0 @@ -Allow jumping within, out of, and across exception handlers in the debugger. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-19-04-34-56.gh-issue-94996.dV564A.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-19-04-34-56.gh-issue-94996.dV564A.rst deleted file mode 100644 index 90c9ada079e077..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-19-04-34-56.gh-issue-94996.dV564A.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`ast.parse` will no longer parse function definitions with positional-only params when passed ``feature_version`` less than ``(3, 8)``. Patch by Shantanu Jain. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-19-09-41-55.gh-issue-94938.xYBlM7.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-19-09-41-55.gh-issue-94938.xYBlM7.rst deleted file mode 100644 index cc4feae685f23c..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-19-09-41-55.gh-issue-94938.xYBlM7.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix error detection in some builtin functions when keyword argument name is -an instance of a str subclass with overloaded ``__eq__`` and ``__hash__``. -Previously it could cause SystemError or other undesired behavior. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-19-16-30-59.gh-issue-94036._6Utkm.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-19-16-30-59.gh-issue-94036._6Utkm.rst deleted file mode 100644 index b0f03677362631..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-19-16-30-59.gh-issue-94036._6Utkm.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix incorrect source location info for some multi-line attribute accesses -and method calls. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-20-13-46-01.gh-issue-91409.dhL8Zo.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-20-13-46-01.gh-issue-91409.dhL8Zo.rst deleted file mode 100644 index 2bc0d8224c6a00..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-20-13-46-01.gh-issue-91409.dhL8Zo.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix incorrect source location info caused by certain optimizations in the -bytecode compiler. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-21-17-54-52.gh-issue-95113.NnSLpT.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-21-17-54-52.gh-issue-95113.NnSLpT.rst deleted file mode 100644 index c2ff6c90ac8c11..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-21-17-54-52.gh-issue-95113.NnSLpT.rst +++ /dev/null @@ -1,4 +0,0 @@ -Replace all ``EXTENDED_ARG_QUICK`` instructions with basic -:opcode:`EXTENDED_ARG` instructions in unquickened code. Consumers of -non-adaptive bytecode should be able to handle extended arguments the same -way they were handled in CPython 3.10 and older. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-21-19-19-20.gh-issue-95060.4xdT1f.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-21-19-19-20.gh-issue-95060.4xdT1f.rst deleted file mode 100644 index 160999e82bf081..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-21-19-19-20.gh-issue-95060.4xdT1f.rst +++ /dev/null @@ -1,2 +0,0 @@ -Undocumented ``PyCode_Addr2Location`` function now properly returns when -``addrq`` argument is less than zero. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-22-12-53-34.gh-issue-94438.hNqACc.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-22-12-53-34.gh-issue-94438.hNqACc.rst deleted file mode 100644 index 2a7249a833c2ed..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-22-12-53-34.gh-issue-94438.hNqACc.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix an issue that caused extended opcode arguments and some conditional pops -to be ignored when calculating valid jump targets for assignments to the -``f_lineno`` attribute of frame objects. In some cases, this could cause -inconsistent internal state, resulting in a hard crash of the interpreter. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-23-19-16-25.gh-issue-93351.0Jyvu-.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-23-19-16-25.gh-issue-93351.0Jyvu-.rst deleted file mode 100644 index 97cf8055ac54b1..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-23-19-16-25.gh-issue-93351.0Jyvu-.rst +++ /dev/null @@ -1,3 +0,0 @@ -:class:`ast.AST` node positions are now validated when provided to -:func:`compile` and other related functions. If invalid positions are -detected, a :exc:`ValueError` will be raised. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-24-00-27-47.gh-issue-95185.ghYTZx.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-24-00-27-47.gh-issue-95185.ghYTZx.rst deleted file mode 100644 index de156bab2f51f1..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-24-00-27-47.gh-issue-95185.ghYTZx.rst +++ /dev/null @@ -1,3 +0,0 @@ -Prevented crashes in the AST constructor when compiling some absurdly long -expressions like ``"+0"*1000000``. :exc:`RecursionError` is now raised -instead. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-26-09-31-12.gh-issue-93678.W8vvgT.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-26-09-31-12.gh-issue-93678.W8vvgT.rst deleted file mode 100644 index 6ff816a172cc27..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-26-09-31-12.gh-issue-93678.W8vvgT.rst +++ /dev/null @@ -1 +0,0 @@ -Add cfg_builder struct and refactor the relevant code so that a cfg can be constructed without an instance of the compiler struct. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-26-12-59-03.gh-issue-95245.GHWczn.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-26-12-59-03.gh-issue-95245.GHWczn.rst deleted file mode 100644 index d6dccc8fcad0b1..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-26-12-59-03.gh-issue-95245.GHWczn.rst +++ /dev/null @@ -1,2 +0,0 @@ -Merge managed dict and values pointer into a single tagged pointer to save -one word in the pre-header. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-27-14-05-07.gh-issue-95324.28Q5u7.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-27-14-05-07.gh-issue-95324.28Q5u7.rst deleted file mode 100644 index 250385270e9487..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-27-14-05-07.gh-issue-95324.28Q5u7.rst +++ /dev/null @@ -1,2 +0,0 @@ -Emit a warning in debug mode if an object does not call -:c:func:`PyObject_GC_UnTrack` before deallocation. Patch by Pablo Galindo. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-27-14-21-57.gh-issue-90081.HVAS5x.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-27-14-21-57.gh-issue-90081.HVAS5x.rst deleted file mode 100644 index a3be34c175af6e..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-27-14-21-57.gh-issue-90081.HVAS5x.rst +++ /dev/null @@ -1,2 +0,0 @@ -Run Python code in tracer/profiler function at full speed. Fixes slowdown in -earlier versions of 3.11. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-28-08-33-31.gh-issue-95355.yN4XVk.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-28-08-33-31.gh-issue-95355.yN4XVk.rst deleted file mode 100644 index 6a289991e0d60f..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-28-08-33-31.gh-issue-95355.yN4XVk.rst +++ /dev/null @@ -1 +0,0 @@ -``_PyPegen_Parser_New`` now properly detects token memory allocation errors. Patch by Honglin Zhu. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-28-19-07-06.gh-issue-87092.73IPS1.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-28-19-07-06.gh-issue-87092.73IPS1.rst deleted file mode 100644 index d4cc4bb9afc2ef..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-28-19-07-06.gh-issue-87092.73IPS1.rst +++ /dev/null @@ -1 +0,0 @@ -Create a 'jump target label' abstraction in the compiler so that the compiler's codegen stage does not work directly with basic blocks. This prepares the code for changes to the underlying CFG generation mechanism. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-31-03-22-58.gh-issue-91146.Y2Hziy.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-31-03-22-58.gh-issue-91146.Y2Hziy.rst deleted file mode 100644 index 9172ca298e8095..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-31-03-22-58.gh-issue-91146.Y2Hziy.rst +++ /dev/null @@ -1,2 +0,0 @@ -Reduce allocation size of :class:`list` from :meth:`str.split` -and :meth:`str.rsplit`. Patch by Dong-hee Na and Inada Naoki. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-31-13-23-12.gh-issue-95150.67FXVo.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-31-13-23-12.gh-issue-95150.67FXVo.rst deleted file mode 100644 index c3db4714188b3f..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-31-13-23-12.gh-issue-95150.67FXVo.rst +++ /dev/null @@ -1,3 +0,0 @@ -Update code object hashing and equality to consider all debugging and -exception handling tables. This fixes an issue where certain non-identical -code objects could be "deduplicated" during compilation. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-04-18-46-54.gh-issue-95605.FbpCoG.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-04-18-46-54.gh-issue-95605.FbpCoG.rst deleted file mode 100644 index 49441c6b3118b4..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-04-18-46-54.gh-issue-95605.FbpCoG.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix misleading contents of error message when converting an all-whitespace -string to :class:`float`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-11-09-19-55.gh-issue-95876.YpQfoV.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-11-09-19-55.gh-issue-95876.YpQfoV.rst deleted file mode 100644 index 96b69015a5861a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-11-09-19-55.gh-issue-95876.YpQfoV.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix format string in ``_PyPegen_raise_error_known_location`` that can lead -to memory corruption on some 64bit systems. The function was building a -tuple with ``i`` (int) instead of ``n`` (Py_ssize_t) for Py_ssize_t -arguments. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-12-13-04-25.gh-issue-95922.YNCtyX.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-12-13-04-25.gh-issue-95922.YNCtyX.rst deleted file mode 100644 index 277d35deab3919..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-12-13-04-25.gh-issue-95922.YNCtyX.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed bug where the compiler's ``eliminate_empty_basic_blocks`` function -ignores the last block of the code unit. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-12-18-13-49.gh-issue-91210.AWMSLj.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-12-18-13-49.gh-issue-91210.AWMSLj.rst deleted file mode 100644 index f17d6ce5763ffd..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-12-18-13-49.gh-issue-91210.AWMSLj.rst +++ /dev/null @@ -1 +0,0 @@ -Improve error message when a parameter without a default value follows one with a default value, and show the same message, even when the non-default/default sequence is preceded by positional-only parameters. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-14-10-04-44.gh-issue-95977.gCTZb9.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-14-10-04-44.gh-issue-95977.gCTZb9.rst deleted file mode 100644 index b265c770233a33..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-14-10-04-44.gh-issue-95977.gCTZb9.rst +++ /dev/null @@ -1 +0,0 @@ -Optimized calling :meth:`~object.__get__` with vectorcall. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-15-11-58-05.gh-issue-90997.bWwV8Q.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-15-11-58-05.gh-issue-90997.bWwV8Q.rst deleted file mode 100644 index 8db714e59e1592..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-15-11-58-05.gh-issue-90997.bWwV8Q.rst +++ /dev/null @@ -1,3 +0,0 @@ -Compile virtual :keyword:`try`/:keyword:`except` blocks to handle exceptions -raised during :meth:`~generator.close` or :meth:`~generator.throw` calls -through a suspended frame. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-15-12-41-14.gh-issue-95245.N4gOUV.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-15-12-41-14.gh-issue-95245.N4gOUV.rst deleted file mode 100644 index 4449ddd8ded8a0..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-15-12-41-14.gh-issue-95245.N4gOUV.rst +++ /dev/null @@ -1,3 +0,0 @@ -Reduces the size of a "simple" Python object from 8 to 6 words by moving the -weakreflist pointer into the pre-header directly before the object's -dict/values pointer. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-15-20-52-41.gh-issue-93678.X7GuIJ.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-15-20-52-41.gh-issue-93678.X7GuIJ.rst deleted file mode 100644 index 9e2b90ba07a454..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-15-20-52-41.gh-issue-93678.X7GuIJ.rst +++ /dev/null @@ -1 +0,0 @@ -Added test a harness for direct unit tests of the compiler's optimization stage. The ``_testinternalcapi.optimize_cfg()`` function runs the optimiser on a sequence of instructions. The ``CfgOptimizationTestCase`` class in ``test.support`` has utilities for invoking the optimizer and checking the output. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-15-21-08-11.gh-issue-96005.6eoc8k.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-15-21-08-11.gh-issue-96005.6eoc8k.rst deleted file mode 100644 index 06e414bca0f9c0..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-15-21-08-11.gh-issue-96005.6eoc8k.rst +++ /dev/null @@ -1,4 +0,0 @@ -On WASI :data:`~errno.ENOTCAPABLE` is now mapped to :exc:`PermissionError`. -The :mod:`errno` modules exposes the new error number. ``getpath.py`` now -ignores :exc:`PermissionError` when it cannot open landmark files -``pybuilddir.txt`` and ``pyenv.cfg``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-18-13-47-59.gh-issue-96046.5Hqbka.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-18-13-47-59.gh-issue-96046.5Hqbka.rst deleted file mode 100644 index b8cb52d4b4ece9..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-18-13-47-59.gh-issue-96046.5Hqbka.rst +++ /dev/null @@ -1,4 +0,0 @@ -:c:func:`PyType_Ready` now initializes ``ht_cached_keys`` and performs -additional checks to ensure that type objects are properly configured. This -avoids crashes in 3rd party packages that don't use regular API to create -new types. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-19-06-51-17.gh-issue-96071.mVgPAo.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-19-06-51-17.gh-issue-96071.mVgPAo.rst deleted file mode 100644 index 37653ffac12418..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-19-06-51-17.gh-issue-96071.mVgPAo.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a deadlock in :c:func:`PyGILState_Ensure` when allocating new thread state. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-20-18-36-40.gh-issue-96143.nh3GFM.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-20-18-36-40.gh-issue-96143.nh3GFM.rst deleted file mode 100644 index 30f44fd453a547..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-20-18-36-40.gh-issue-96143.nh3GFM.rst +++ /dev/null @@ -1,7 +0,0 @@ -Add a new ``-X perf`` Python command line option as well as -:func:`sys.activate_stack_trampoline` and :func:`sys.deactivate_stack_trampoline` -function in the :mod:`sys` module that allows to set/unset the interpreter in a -way that the Linux ``perf`` profiler can detect Python calls. The new -:func:`sys.is_stack_trampoline_active` function allows to query the state of the -perf trampoline. Design by Pablo Galindo. Patch by Pablo Galindo and Christian Heimes -with contributions from Gregory P. Smith [Google] and Mark Shannon. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-22-21-33-28.gh-issue-96187.W_6SRG.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-22-21-33-28.gh-issue-96187.W_6SRG.rst deleted file mode 100644 index fd194faa68545d..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-22-21-33-28.gh-issue-96187.W_6SRG.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed a bug that caused ``_PyCode_GetExtra`` to return garbage for negative -indexes. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-24-14-30-26.gh-issue-96237.msif5f.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-24-14-30-26.gh-issue-96237.msif5f.rst deleted file mode 100644 index cb8a1c0eb7c26f..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-24-14-30-26.gh-issue-96237.msif5f.rst +++ /dev/null @@ -1,5 +0,0 @@ -The internal field ``_PyInterpreterFrame.f_func`` is renamed to -``_PyInterpreterFrame.f_funcobj`` and may be any object. The ``f_globals`` -and ``f_builtin`` fields may hold junk values. - -It is safest to treat the ``_PyInterpreterFrame`` struct as opaque. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-25-10-19-34.gh-issue-96268.AbYrLB.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-25-10-19-34.gh-issue-96268.AbYrLB.rst deleted file mode 100644 index 987d85ff3bab8e..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-25-10-19-34.gh-issue-96268.AbYrLB.rst +++ /dev/null @@ -1,2 +0,0 @@ -Loading a file with invalid UTF-8 will now report the broken character at -the correct location. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-26-18-46-32.gh-issue-93554.QEaCcK.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-26-18-46-32.gh-issue-93554.QEaCcK.rst deleted file mode 100644 index dff12aef721b53..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-26-18-46-32.gh-issue-93554.QEaCcK.rst +++ /dev/null @@ -1,16 +0,0 @@ -Change the jump opcodes so that all conditional jumps are forward jumps. -Backward jumps are converted by the assembler into a conditional forward -jump whose target is the fallthrough block (and with a reversed condition), -followed by an unconditional backward jump. For example: - -``POP_JUMP_IF_TRUE BACKWARD_TARGET`` becomes ``POP_JUMP_IF_FALSE NEXT_BLOCK; -JUMP BACKWARD_TARGET``. - -All the directed conditional jump opcodes were removed: -``POP_JUMP_FORWARD_IF_TRUE``, ``POP_JUMP_BACKWARD_IF_TRUE``, -``POP_JUMP_FORWARD_IF_FALSE``, ``POP_JUMP_BACKWARD_IF_FALSE``, -``POP_JUMP_FORWARD_IF_NONE``, ``POP_JUMP_BACKWARD_IF_NONE``, -``POP_JUMP_FORWARD_IF_NOT_NONE``, ``POP_JUMP_BACKWARD_IF_NOT_NONE``. - -The corresponding opcodes without direction are no longer pseudo-instructions, -and they implement the forward conditional jumps. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-28-10-51-19.gh-issue-96352.jTLD2d.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-28-10-51-19.gh-issue-96352.jTLD2d.rst deleted file mode 100644 index 25ab9678715a92..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-28-10-51-19.gh-issue-96352.jTLD2d.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :exc:`AttributeError` missing ``name`` and ``obj`` attributes in -:meth:`object.__getattribute__`. Patch by Philip Georgi. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-29-00-37-21.gh-issue-96364.c-IVyb.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-29-00-37-21.gh-issue-96364.c-IVyb.rst deleted file mode 100644 index 8872f9a5a498d5..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-29-00-37-21.gh-issue-96364.c-IVyb.rst +++ /dev/null @@ -1 +0,0 @@ -Fix text signatures of ``list.__getitem__`` and ``dict.__getitem__``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-29-13-06-58.gh-issue-95196.eGRR4b.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-29-13-06-58.gh-issue-95196.eGRR4b.rst deleted file mode 100644 index 37534fa1752550..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-29-13-06-58.gh-issue-95196.eGRR4b.rst +++ /dev/null @@ -1 +0,0 @@ -Disable incorrect pickling of the C implemented classmethod descriptors. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-31-18-46-13.gh-issue-96348.xzCoTP.rst b/Misc/NEWS.d/next/Core and Builtins/2022-08-31-18-46-13.gh-issue-96348.xzCoTP.rst deleted file mode 100644 index 5d3bd17b578669..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-31-18-46-13.gh-issue-96348.xzCoTP.rst +++ /dev/null @@ -1,2 +0,0 @@ -Emit a DeprecationWarning when :meth:`~generator.throw`, :meth:`~coroutine.throw` or :meth:`~agen.athrow` -are called with more than one argument. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-02-16-47-52.gh-issue-93911.vF-GWe.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-02-16-47-52.gh-issue-93911.vF-GWe.rst deleted file mode 100644 index b8dc0435377b5f..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-02-16-47-52.gh-issue-93911.vF-GWe.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue that could prevent :opcode:`LOAD_ATTR` from specializing -properly when accessing properties. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-05-09-56-32.gh-issue-91079.H4-DdU.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-05-09-56-32.gh-issue-91079.H4-DdU.rst deleted file mode 100644 index 64606ac74a49bd..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-05-09-56-32.gh-issue-91079.H4-DdU.rst +++ /dev/null @@ -1,3 +0,0 @@ -Separate Python recursion checking from C recursion checking which reduces -the chance of C stack overflow and allows the recursion limit to be -increased safely. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-05-15-07-25.gh-issue-96582.HEsL5s.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-05-15-07-25.gh-issue-96582.HEsL5s.rst deleted file mode 100644 index 162f7baadf49d1..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-05-15-07-25.gh-issue-96582.HEsL5s.rst +++ /dev/null @@ -1 +0,0 @@ -Fix possible ``NULL`` pointer dereference in ``_PyThread_CurrentFrames``. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-05-16-43-44.gh-issue-96569.9lmTCC.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-05-16-43-44.gh-issue-96569.9lmTCC.rst deleted file mode 100644 index 4734d3d6ded12a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-05-16-43-44.gh-issue-96569.9lmTCC.rst +++ /dev/null @@ -1 +0,0 @@ -Remove two cases of undefined behavoir, by adding NULL checks. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-05-19-20-44.gh-issue-96587.bVxhX2.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-05-19-20-44.gh-issue-96587.bVxhX2.rst deleted file mode 100644 index 37e9dcbb11f030..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-05-19-20-44.gh-issue-96587.bVxhX2.rst +++ /dev/null @@ -1,2 +0,0 @@ -Correctly raise ``SyntaxError`` on exception groups (:pep:`654`) on python -versions prior to 3.11 diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-06-11-19-03.gh-issue-90230.YOtzs5.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-06-11-19-03.gh-issue-90230.YOtzs5.rst deleted file mode 100644 index aac48e7d792f8d..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-06-11-19-03.gh-issue-90230.YOtzs5.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix compiler warnings and test failures when building with -``--enable-pystats``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-06-14-26-36.gh-issue-96612.P4ZbeY.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-06-14-26-36.gh-issue-96612.P4ZbeY.rst deleted file mode 100644 index 52e92703c9c483..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-06-14-26-36.gh-issue-96612.P4ZbeY.rst +++ /dev/null @@ -1 +0,0 @@ -Make sure that incomplete frames do not show up in tracemalloc traces. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-06-16-22-13.gh-issue-96611.14wIX8.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-06-16-22-13.gh-issue-96611.14wIX8.rst deleted file mode 100644 index 08bd409bc9f997..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-06-16-22-13.gh-issue-96611.14wIX8.rst +++ /dev/null @@ -1,2 +0,0 @@ -When loading a file with invalid UTF-8 inside a multi-line string, a correct -SyntaxError is emitted. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-06-16-54-49.gh-issue-96572.8DRsaW.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-06-16-54-49.gh-issue-96572.8DRsaW.rst deleted file mode 100644 index 44cceb46c28c3e..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-06-16-54-49.gh-issue-96572.8DRsaW.rst +++ /dev/null @@ -1 +0,0 @@ -Fix use after free in trace refs build mode. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-07-12-02-11.gh-issue-96636.YvN-K6.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-07-12-02-11.gh-issue-96636.YvN-K6.rst deleted file mode 100644 index e0fbd8761aa335..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-07-12-02-11.gh-issue-96636.YvN-K6.rst +++ /dev/null @@ -1,3 +0,0 @@ -Ensure that tracing, ``sys.setrace()``, is turned on immediately. In -pre-release versions of 3.11, some tracing events might have been lost when -turning on tracing in a ``__del__`` method or interrupt. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-07-13-38-37.gh-issue-96641.wky0Fc.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-07-13-38-37.gh-issue-96641.wky0Fc.rst deleted file mode 100644 index 51faca8716fbca..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-07-13-38-37.gh-issue-96641.wky0Fc.rst +++ /dev/null @@ -1 +0,0 @@ -Do not expose ``KeyWrapper`` in :mod:`_functools`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-09-13-13-27.gh-issue-96678.vMxi9F.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-09-13-13-27.gh-issue-96678.vMxi9F.rst deleted file mode 100644 index 575b52be2940dc..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-09-13-13-27.gh-issue-96678.vMxi9F.rst +++ /dev/null @@ -1 +0,0 @@ -Fix case of undefined behavior in ceval.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-11-00-37-50.gh-issue-90751.VE8-zf.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-11-00-37-50.gh-issue-90751.VE8-zf.rst deleted file mode 100644 index 0908f1cc066fd8..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-11-00-37-50.gh-issue-90751.VE8-zf.rst +++ /dev/null @@ -1,2 +0,0 @@ -:class:`memoryview` now supports half-floats. -Patch by Dong-hee Na and Antoine Pitrou. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-11-12-43-43.gh-issue-96751.anRT6a.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-11-12-43-43.gh-issue-96751.anRT6a.rst deleted file mode 100644 index fb5b73e1ac7727..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-11-12-43-43.gh-issue-96751.anRT6a.rst +++ /dev/null @@ -1 +0,0 @@ -Remove dead code from ``CALL_FUNCTION_EX`` opcode. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-12-15-15-04.gh-issue-90997.sZO8c9.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-12-15-15-04.gh-issue-90997.sZO8c9.rst deleted file mode 100644 index 4a43e2babcdef8..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-12-15-15-04.gh-issue-90997.sZO8c9.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve the performance of reading and writing inline bytecode caches on -some platforms. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-12-16-58-22.gh-issue-96754.0GRme5.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-12-16-58-22.gh-issue-96754.0GRme5.rst deleted file mode 100644 index beac84ee822a84..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-12-16-58-22.gh-issue-96754.0GRme5.rst +++ /dev/null @@ -1,3 +0,0 @@ -Make sure that all frame objects created are created from valid interpreter -frames. Prevents the possibility of invalid frames in backtraces and signal -handlers. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-13-12-06-46.gh-issue-96678.NqGFyb.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-13-12-06-46.gh-issue-96678.NqGFyb.rst deleted file mode 100644 index bdd33c8d2ca914..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-13-12-06-46.gh-issue-96678.NqGFyb.rst +++ /dev/null @@ -1 +0,0 @@ -Fix undefined behaviour in C code of null pointer arithmetic. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-13-21-45-07.gh-issue-95778.Oll4_5.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-13-21-45-07.gh-issue-95778.Oll4_5.rst deleted file mode 100644 index f202afc1f2595e..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-13-21-45-07.gh-issue-95778.Oll4_5.rst +++ /dev/null @@ -1,2 +0,0 @@ -The ``PyLong_FromString`` function was refactored to make it more maintainable -and extensible. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-16-12-36-13.gh-issue-96864.PLU3i8.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-16-12-36-13.gh-issue-96864.PLU3i8.rst deleted file mode 100644 index c0d41ae7d21ed9..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-16-12-36-13.gh-issue-96864.PLU3i8.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a possible assertion failure, fatal error, or :exc:`SystemError` if a -line tracing event raises an exception while opcode tracing is enabled. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-16-16-54-35.gh-issue-96387.GRzewg.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-16-16-54-35.gh-issue-96387.GRzewg.rst deleted file mode 100644 index 611ab94bc636a6..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-16-16-54-35.gh-issue-96387.GRzewg.rst +++ /dev/null @@ -1,5 +0,0 @@ -At Python exit, sometimes a thread holding the GIL can wait forever for a -thread (usually a daemon thread) which requested to drop the GIL, whereas -the thread already exited. To fix the race condition, the thread which -requested the GIL drop now resets its request before exiting. Issue -discovered and analyzed by Mingliang ZHAO. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-16-19-02-40.gh-issue-95778.cJmnst.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-16-19-02-40.gh-issue-95778.cJmnst.rst deleted file mode 100644 index ebf63778a605d7..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-16-19-02-40.gh-issue-95778.cJmnst.rst +++ /dev/null @@ -1,3 +0,0 @@ -When :exc:`ValueError` is raised if an integer is larger than the limit, -mention the :func:`sys.set_int_max_str_digits` function in the error message. -Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-17-17-08-01.gh-issue-90994.f0H2Yd.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-17-17-08-01.gh-issue-90994.f0H2Yd.rst new file mode 100644 index 00000000000000..5edc1ea65d8b88 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-09-17-17-08-01.gh-issue-90994.f0H2Yd.rst @@ -0,0 +1,4 @@ +Improve error messages when there's a syntax error with call arguments. The following three cases are covered: +- No value is assigned to a named argument, eg ``foo(a=)``. +- A value is assigned to a star argument, eg ``foo(*args=[0])``. +- A value is assigned to a double-star keyword argument, eg ``foo(**kwarg={'a': 0})``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-18-08-47-40.gh-issue-96821.Co2iOq.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-18-08-47-40.gh-issue-96821.Co2iOq.rst deleted file mode 100644 index 4fd0532e827d18..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-18-08-47-40.gh-issue-96821.Co2iOq.rst +++ /dev/null @@ -1 +0,0 @@ -Fix undefined behaviour in ``_testcapimodule.c``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-20-11-06-45.gh-issue-95921.dkcRQn.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-20-11-06-45.gh-issue-95921.dkcRQn.rst deleted file mode 100644 index 0c8b704c9510a6..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-20-11-06-45.gh-issue-95921.dkcRQn.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix overly-broad source position information for chained comparisons used as -branching conditions. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-21-14-38-31.gh-issue-96848.WuoLzU.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-21-14-38-31.gh-issue-96848.WuoLzU.rst deleted file mode 100644 index a9b04ce87d4d0d..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-21-14-38-31.gh-issue-96848.WuoLzU.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix command line parsing: reject :option:`-X int_max_str_digits <-X>` option -with no value (invalid) when the :envvar:`PYTHONINTMAXSTRDIGITS` environment -variable is set to a valid limit. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-21-16-06-37.gh-issue-96975.BmE0XY.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-21-16-06-37.gh-issue-96975.BmE0XY.rst deleted file mode 100644 index e6fcb84d7bff23..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-21-16-06-37.gh-issue-96975.BmE0XY.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a crash occurring when :c:func:`PyEval_GetFrame` is called while the -topmost Python frame is in a partially-initialized state. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-27-11-59-13.gh-issue-96670.XrBBit.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-27-11-59-13.gh-issue-96670.XrBBit.rst deleted file mode 100644 index ba8e2bbaf62ba1..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-27-11-59-13.gh-issue-96670.XrBBit.rst +++ /dev/null @@ -1,2 +0,0 @@ -The parser now raises :exc:`SyntaxError` when parsing source code containing -null bytes. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-29-15-19-29.gh-issue-94526.wq5m6T.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-29-15-19-29.gh-issue-94526.wq5m6T.rst deleted file mode 100644 index 59e389a64ee07d..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-29-15-19-29.gh-issue-94526.wq5m6T.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix the Python path configuration used to initialized :data:`sys.path` at -Python startup. Paths are no longer encoded to UTF-8/strict to avoid encoding -errors if it contains surrogate characters (bytes paths are decoded with the -surrogateescape error handler). Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-09-30-13-26-58.gh-issue-97670.n61vMR.rst b/Misc/NEWS.d/next/Core and Builtins/2022-09-30-13-26-58.gh-issue-97670.n61vMR.rst deleted file mode 100644 index 50b47871a5fdef..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-09-30-13-26-58.gh-issue-97670.n61vMR.rst +++ /dev/null @@ -1,6 +0,0 @@ -Remove the :func:`sys.getdxp` function and the ``Tools/scripts/analyze_dxp.py`` -script. DXP stands for "dynamic execution pairs". They were related to -``DYNAMIC_EXECUTION_PROFILE`` and ``DXPAIRS`` macros which have been removed in -Python 3.11. Python can now be built with :option:`./configure --enable-pystats -<--enable-pystats>` to gather statistics on Python opcodes. Patch by Victor -Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-01-08-55-09.gh-issue-97591.pw6kkH.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-01-08-55-09.gh-issue-97591.pw6kkH.rst deleted file mode 100644 index d3a5867db7fce2..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-01-08-55-09.gh-issue-97591.pw6kkH.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed a missing incref/decref pair in `Exception.__setstate__()`. -Patch by Ofey Chan. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-03-13-35-48.gh-issue-97752.0xTjJY.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-03-13-35-48.gh-issue-97752.0xTjJY.rst deleted file mode 100644 index c656350703489e..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-03-13-35-48.gh-issue-97752.0xTjJY.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix possible data corruption or crashes when accessing the ``f_back`` member -of newly-created generator or coroutine frames. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-04-02-00-10.gh-issue-97779.f3N1hI.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-04-02-00-10.gh-issue-97779.f3N1hI.rst deleted file mode 100644 index 6115218088651b..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-04-02-00-10.gh-issue-97779.f3N1hI.rst +++ /dev/null @@ -1 +0,0 @@ -Ensure that all Python frame objects are backed by "complete" frames. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-04-17-02-18.gh-issue-97850.E3QTRA.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-04-17-02-18.gh-issue-97850.E3QTRA.rst deleted file mode 100644 index f880d9663842ff..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-04-17-02-18.gh-issue-97850.E3QTRA.rst +++ /dev/null @@ -1 +0,0 @@ -Long deprecated, ``module_repr()`` should now be completely eradicated. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-05-00-37-27.gh-issue-65961.z0Ys0y.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-05-00-37-27.gh-issue-65961.z0Ys0y.rst deleted file mode 100644 index 0c034263c1a89a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-10-05-00-37-27.gh-issue-65961.z0Ys0y.rst +++ /dev/null @@ -1,5 +0,0 @@ -When ``__package__`` is different than ``__spec__.parent``, raise a -``DeprecationWarning`` instead of ``ImportWarning``. - -Also remove ``importlib.util.set_package()`` which was scheduled for -removal. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-05-11-44-52.gh-issue-91053.f5Bo3p.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-05-11-44-52.gh-issue-91053.f5Bo3p.rst new file mode 100644 index 00000000000000..59bb12caef740d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-10-05-11-44-52.gh-issue-91053.f5Bo3p.rst @@ -0,0 +1,4 @@ +Optimizing interpreters and JIT compilers may need to invalidate internal +metadata when functions are modified. This change adds the ability to +provide a callback that will be invoked each time a function is created, +modified, or destroyed. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-10-16-53-40.gh-issue-99298.HqRJES.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-10-16-53-40.gh-issue-99298.HqRJES.rst new file mode 100644 index 00000000000000..79a5b3ba98cf66 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-10-16-53-40.gh-issue-99298.HqRJES.rst @@ -0,0 +1,3 @@ +Remove the remaining error paths for attribute specializations, and refuse +to specialize attribute accesses on types that haven't had +:c:func:`PyType_Ready` called on them yet. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-10-17-09-16.gh-issue-98686.bmAKwr.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-10-17-09-16.gh-issue-98686.bmAKwr.rst new file mode 100644 index 00000000000000..b4a40d1684299f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-10-17-09-16.gh-issue-98686.bmAKwr.rst @@ -0,0 +1,2 @@ +Remove the ``BINARY_OP_GENERIC`` and ``COMPARE_OP_GENERIC`` +"specializations". diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-11-14-04-01.gh-issue-99377.-CJvWn.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-11-14-04-01.gh-issue-99377.-CJvWn.rst new file mode 100644 index 00000000000000..631b9ca23044a7 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-11-14-04-01.gh-issue-99377.-CJvWn.rst @@ -0,0 +1 @@ +Add audit events for thread creation and clear operations. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-12-01-39-57.gh-issue-99370._cu32j.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-12-01-39-57.gh-issue-99370._cu32j.rst new file mode 100644 index 00000000000000..142f91ccd92e50 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-12-01-39-57.gh-issue-99370._cu32j.rst @@ -0,0 +1,2 @@ +Fix zip path for venv created from a non-installed python on POSIX +platforms. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-16-21-35-30.gh-issue-99547.p_c_bp.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-16-21-35-30.gh-issue-99547.p_c_bp.rst new file mode 100644 index 00000000000000..7e3c52924213ec --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-16-21-35-30.gh-issue-99547.p_c_bp.rst @@ -0,0 +1 @@ +Add a function to os.path to check if a path is a junction: isjunction. Add similar functionality to pathlib.Path as is_junction. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-18-11-24-25.gh-issue-99553.F64h-n.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-18-11-24-25.gh-issue-99553.F64h-n.rst new file mode 100644 index 00000000000000..8d9f55d6d9d079 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-18-11-24-25.gh-issue-99553.F64h-n.rst @@ -0,0 +1,2 @@ +Fix bug where an :exc:`ExceptionGroup` subclass can wrap a +:exc:`BaseException`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-19-22-27-52.gh-issue-99581.yKYPbf.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-19-22-27-52.gh-issue-99581.yKYPbf.rst new file mode 100644 index 00000000000000..8071fd130dd685 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-19-22-27-52.gh-issue-99581.yKYPbf.rst @@ -0,0 +1,3 @@ +Fixed a bug that was causing a buffer overflow if the tokenizer copies a +line missing the newline caracter from a file that is as long as the +available tokenizer buffer. Patch by Pablo galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-21-11-27-14.gh-issue-99578.DcKoBJ.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-21-11-27-14.gh-issue-99578.DcKoBJ.rst new file mode 100644 index 00000000000000..9321cef77eed77 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-21-11-27-14.gh-issue-99578.DcKoBJ.rst @@ -0,0 +1,3 @@ +Fix a reference bug in :func:`_imp.create_builtin()` after the creation of the +first sub-interpreter for modules ``builtins`` and ``sys``. Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-23-18-16-18.gh-issue-99708.7MuaiR.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-23-18-16-18.gh-issue-99708.7MuaiR.rst new file mode 100644 index 00000000000000..47f385c2eefee7 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-23-18-16-18.gh-issue-99708.7MuaiR.rst @@ -0,0 +1 @@ +Fix bug where compiler crashes on an if expression with an empty body block. diff --git a/Misc/NEWS.d/next/Documentation/2017-12-10-19-13-39.bpo-13553.gQbZs4.rst b/Misc/NEWS.d/next/Documentation/2017-12-10-19-13-39.bpo-13553.gQbZs4.rst deleted file mode 100644 index 23d3c1555e3707..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2017-12-10-19-13-39.bpo-13553.gQbZs4.rst +++ /dev/null @@ -1 +0,0 @@ -Document tkinter.Tk args. diff --git a/Misc/NEWS.d/next/Documentation/2019-09-12-08-28-17.bpo-38056.6ktYkc.rst b/Misc/NEWS.d/next/Documentation/2019-09-12-08-28-17.bpo-38056.6ktYkc.rst deleted file mode 100644 index 2e6b70fd84b6d9..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2019-09-12-08-28-17.bpo-38056.6ktYkc.rst +++ /dev/null @@ -1 +0,0 @@ -Overhaul the :ref:`error-handlers` documentation in :mod:`codecs`. diff --git a/Misc/NEWS.d/next/Documentation/2021-04-01-08-09-34.bpo-43689.mqCfLe.rst b/Misc/NEWS.d/next/Documentation/2021-04-01-08-09-34.bpo-43689.mqCfLe.rst deleted file mode 100644 index 5cc13d7068c95f..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2021-04-01-08-09-34.bpo-43689.mqCfLe.rst +++ /dev/null @@ -1 +0,0 @@ -The ``Differ`` documentation now also mentions other whitespace characters, which make it harder to understand the diff output. diff --git a/Misc/NEWS.d/next/Documentation/2022-01-13-16-03-15.bpo-40838.k3NVCf.rst b/Misc/NEWS.d/next/Documentation/2022-01-13-16-03-15.bpo-40838.k3NVCf.rst deleted file mode 100644 index 0f071ab64dbecc..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-01-13-16-03-15.bpo-40838.k3NVCf.rst +++ /dev/null @@ -1,2 +0,0 @@ -Document that :func:`inspect.getdoc`, :func:`inspect.getmodule`, and -:func:`inspect.getsourcefile` might return ``None``. diff --git a/Misc/NEWS.d/next/Documentation/2022-03-30-17-56-01.bpo-47161.gesHfS.rst b/Misc/NEWS.d/next/Documentation/2022-03-30-17-56-01.bpo-47161.gesHfS.rst deleted file mode 100644 index 6b552daa7c13af..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-03-30-17-56-01.bpo-47161.gesHfS.rst +++ /dev/null @@ -1,2 +0,0 @@ -Document that :class:`pathlib.PurePath` does not collapse -initial double slashes because they denote UNC paths. diff --git a/Misc/NEWS.d/next/Documentation/2022-05-18-23-58-26.gh-issue-92240.bHvYiz.rst b/Misc/NEWS.d/next/Documentation/2022-05-18-23-58-26.gh-issue-92240.bHvYiz.rst deleted file mode 100644 index 53b2a66c9779c2..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-05-18-23-58-26.gh-issue-92240.bHvYiz.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added release dates for -"What's New in Python 3.X" for 3.0, 3.1, 3.2, 3.8 and 3.10 diff --git a/Misc/NEWS.d/next/Documentation/2022-05-20-18-42-10.gh-issue-93031.c2RdJe.rst b/Misc/NEWS.d/next/Documentation/2022-05-20-18-42-10.gh-issue-93031.c2RdJe.rst deleted file mode 100644 index c46b45d2433cb8..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-05-20-18-42-10.gh-issue-93031.c2RdJe.rst +++ /dev/null @@ -1 +0,0 @@ -Update tutorial introduction output to use 3.10+ SyntaxError invalid range. diff --git a/Misc/NEWS.d/next/Documentation/2022-05-26-11-33-23.gh-issue-86438.kEGGmK.rst b/Misc/NEWS.d/next/Documentation/2022-05-26-11-33-23.gh-issue-86438.kEGGmK.rst deleted file mode 100644 index 75abfdd63b8b2e..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-05-26-11-33-23.gh-issue-86438.kEGGmK.rst +++ /dev/null @@ -1,3 +0,0 @@ -Clarify that :option:`-W` and :envvar:`PYTHONWARNINGS` are matched literally -and case-insensitively, rather than as regular expressions, in -:mod:`warnings`. diff --git a/Misc/NEWS.d/next/Documentation/2022-05-26-14-51-25.gh-issue-88831.5Cccr5.rst b/Misc/NEWS.d/next/Documentation/2022-05-26-14-51-25.gh-issue-88831.5Cccr5.rst deleted file mode 100644 index 983bea981a9b20..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-05-26-14-51-25.gh-issue-88831.5Cccr5.rst +++ /dev/null @@ -1 +0,0 @@ -Augmented documentation of asyncio.create_task(). Clarified the need to keep strong references to tasks and added a code snippet detailing how to to this. diff --git a/Misc/NEWS.d/next/Documentation/2022-05-29-21-22-54.gh-issue-86986.lFXw8j.rst b/Misc/NEWS.d/next/Documentation/2022-05-29-21-22-54.gh-issue-86986.lFXw8j.rst deleted file mode 100644 index 1db028c30f67a4..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-05-29-21-22-54.gh-issue-86986.lFXw8j.rst +++ /dev/null @@ -1 +0,0 @@ -The minimum Sphinx version required to build the documentation is now 3.2. diff --git a/Misc/NEWS.d/next/Documentation/2022-06-15-12-12-49.gh-issue-87260.epyI7D.rst b/Misc/NEWS.d/next/Documentation/2022-06-15-12-12-49.gh-issue-87260.epyI7D.rst deleted file mode 100644 index 4c6cee86ca115f..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-06-15-12-12-49.gh-issue-87260.epyI7D.rst +++ /dev/null @@ -1 +0,0 @@ -Align :mod:`sqlite3` argument specs with the actual implementation. diff --git a/Misc/NEWS.d/next/Documentation/2022-06-16-10-10-59.gh-issue-61162.1ypkG8.rst b/Misc/NEWS.d/next/Documentation/2022-06-16-10-10-59.gh-issue-61162.1ypkG8.rst deleted file mode 100644 index c8b3a222232189..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-06-16-10-10-59.gh-issue-61162.1ypkG8.rst +++ /dev/null @@ -1 +0,0 @@ -Clarify :mod:`sqlite3` behavior when :ref:`sqlite3-connection-context-manager`. diff --git a/Misc/NEWS.d/next/Documentation/2022-06-19-18-18-22.gh-issue-86128.39DDTD.rst b/Misc/NEWS.d/next/Documentation/2022-06-19-18-18-22.gh-issue-86128.39DDTD.rst deleted file mode 100644 index bab006856deea7..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-06-19-18-18-22.gh-issue-86128.39DDTD.rst +++ /dev/null @@ -1 +0,0 @@ -Document a limitation in ThreadPoolExecutor where its exit handler is executed before any handlers in atexit. diff --git a/Misc/NEWS.d/next/Documentation/2022-07-07-08-42-05.gh-issue-94321.pmCIPb.rst b/Misc/NEWS.d/next/Documentation/2022-07-07-08-42-05.gh-issue-94321.pmCIPb.rst deleted file mode 100644 index c1a8dcd8535383..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-07-07-08-42-05.gh-issue-94321.pmCIPb.rst +++ /dev/null @@ -1,2 +0,0 @@ -Document the :pep:`246` style protocol type -:class:`sqlite3.PrepareProtocol`. diff --git a/Misc/NEWS.d/next/Documentation/2022-07-29-09-04-02.gh-issue-95415.LKTyw6.rst b/Misc/NEWS.d/next/Documentation/2022-07-29-09-04-02.gh-issue-95415.LKTyw6.rst deleted file mode 100644 index ece36bc4d1cedb..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-07-29-09-04-02.gh-issue-95415.LKTyw6.rst +++ /dev/null @@ -1,2 +0,0 @@ -Use consistent syntax for platform availability. The directive now supports -a content body and emits a warning when it encounters an unknown platform. diff --git a/Misc/NEWS.d/next/Documentation/2022-07-29-23-02-19.gh-issue-95451.-tgB93.rst b/Misc/NEWS.d/next/Documentation/2022-07-29-23-02-19.gh-issue-95451.-tgB93.rst deleted file mode 100644 index 3a7b8a122b7a1c..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-07-29-23-02-19.gh-issue-95451.-tgB93.rst +++ /dev/null @@ -1,3 +0,0 @@ -Update library documentation with -:ref:`availability information ` -on WebAssembly platforms ``wasm32-emscripten`` and ``wasm32-wasi``. diff --git a/Misc/NEWS.d/next/Documentation/2022-07-30-00-23-11.gh-issue-95454.we7AFm.rst b/Misc/NEWS.d/next/Documentation/2022-07-30-00-23-11.gh-issue-95454.we7AFm.rst deleted file mode 100644 index 6440c23fd5000a..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-07-30-00-23-11.gh-issue-95454.we7AFm.rst +++ /dev/null @@ -1,2 +0,0 @@ -Replaced incorrectly written true/false values -in documentiation. Patch by Robert O'Shea diff --git a/Misc/NEWS.d/next/Documentation/2022-08-03-13-35-08.gh-issue-91207.eJ4pPf.rst b/Misc/NEWS.d/next/Documentation/2022-08-03-13-35-08.gh-issue-91207.eJ4pPf.rst deleted file mode 100644 index 8c7391f7edf78b..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-08-03-13-35-08.gh-issue-91207.eJ4pPf.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix stylesheet not working in Windows CHM htmlhelp docs -and add warning that they are deprecated. -Contributed by C.A.M. Gerlach. diff --git a/Misc/NEWS.d/next/Documentation/2022-08-12-01-12-52.gh-issue-95588.PA0FI7.rst b/Misc/NEWS.d/next/Documentation/2022-08-12-01-12-52.gh-issue-95588.PA0FI7.rst deleted file mode 100644 index c070bbc19517fc..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-08-12-01-12-52.gh-issue-95588.PA0FI7.rst +++ /dev/null @@ -1,6 +0,0 @@ -Clarified the conflicting advice given in the :mod:`ast` documentation about -:func:`ast.literal_eval` being "safe" for use on untrusted input while at -the same time warning that it can crash the process. The latter statement is -true and is deemed unfixable without a large amount of work unsuitable for a -bugfix. So we keep the warning and no longer claim that ``literal_eval`` is -safe. diff --git a/Misc/NEWS.d/next/Documentation/2022-08-13-20-34-51.gh-issue-95957.W9ZZAx.rst b/Misc/NEWS.d/next/Documentation/2022-08-13-20-34-51.gh-issue-95957.W9ZZAx.rst deleted file mode 100644 index c617bd42abd95a..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-08-13-20-34-51.gh-issue-95957.W9ZZAx.rst +++ /dev/null @@ -1,2 +0,0 @@ -What's New 3.11 now has instructions for how to provide compiler and -linker flags for Tcl/Tk and OpenSSL on RHEL 7 and CentOS 7. diff --git a/Misc/NEWS.d/next/Documentation/2022-08-19-17-07-45.gh-issue-96098.nDp43u.rst b/Misc/NEWS.d/next/Documentation/2022-08-19-17-07-45.gh-issue-96098.nDp43u.rst deleted file mode 100644 index 5ead20bbb5357b..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-08-19-17-07-45.gh-issue-96098.nDp43u.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improve discoverability of the higher level concurrent.futures module by -providing clearer links from the lower level threading and multiprocessing -modules. diff --git a/Misc/NEWS.d/next/Documentation/2022-09-01-17-03-04.gh-issue-96432.1EJ1-4.rst b/Misc/NEWS.d/next/Documentation/2022-09-01-17-03-04.gh-issue-96432.1EJ1-4.rst deleted file mode 100644 index a5858f432932ca..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-09-01-17-03-04.gh-issue-96432.1EJ1-4.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fraction literals now support whitespace around the forward slash, -`Fraction('2 / 3')`. diff --git a/Misc/NEWS.d/next/Documentation/2022-10-02-10-58-52.gh-issue-97741.39l023.rst b/Misc/NEWS.d/next/Documentation/2022-10-02-10-58-52.gh-issue-97741.39l023.rst deleted file mode 100644 index 8da9c92f6fd879..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2022-10-02-10-58-52.gh-issue-97741.39l023.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix ``!`` in c domain ref target syntax via a ``conf.py`` patch, so it works -as intended to disable ref target resolution. diff --git a/Misc/NEWS.d/next/Documentation/2022-11-16-12-52-23.gh-issue-92892.TS-P0j.rst b/Misc/NEWS.d/next/Documentation/2022-11-16-12-52-23.gh-issue-92892.TS-P0j.rst new file mode 100644 index 00000000000000..54e421d19d9da3 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2022-11-16-12-52-23.gh-issue-92892.TS-P0j.rst @@ -0,0 +1 @@ +Document that calling variadic functions with ctypes requires special care on macOS/arm64 (and possibly other platforms). diff --git a/Misc/NEWS.d/next/IDLE/2022-07-28-18-56-57.gh-issue-89610.hcosiM.rst b/Misc/NEWS.d/next/IDLE/2022-07-28-18-56-57.gh-issue-89610.hcosiM.rst deleted file mode 100644 index 0d283711e3e867..00000000000000 --- a/Misc/NEWS.d/next/IDLE/2022-07-28-18-56-57.gh-issue-89610.hcosiM.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add .pyi as a recognized extension for IDLE on macOS. This allows opening -stub files by double clicking on them in the Finder. diff --git a/Misc/NEWS.d/next/IDLE/2022-07-29-11-08-52.gh-issue-95411.dazlqH.rst b/Misc/NEWS.d/next/IDLE/2022-07-29-11-08-52.gh-issue-95411.dazlqH.rst deleted file mode 100644 index 94ca8b2c2ea959..00000000000000 --- a/Misc/NEWS.d/next/IDLE/2022-07-29-11-08-52.gh-issue-95411.dazlqH.rst +++ /dev/null @@ -1 +0,0 @@ -Enable using IDLE's module browser with .pyw files. diff --git a/Misc/NEWS.d/next/IDLE/2022-07-30-15-10-39.gh-issue-95471.z3scVG.rst b/Misc/NEWS.d/next/IDLE/2022-07-30-15-10-39.gh-issue-95471.z3scVG.rst deleted file mode 100644 index 73a9d8058965cb..00000000000000 --- a/Misc/NEWS.d/next/IDLE/2022-07-30-15-10-39.gh-issue-95471.z3scVG.rst +++ /dev/null @@ -1 +0,0 @@ -In the Edit menu, move ``Select All`` and add a new separator. diff --git a/Misc/NEWS.d/next/IDLE/2022-07-31-22-15-14.gh-issue-95511.WX6PmB.rst b/Misc/NEWS.d/next/IDLE/2022-07-31-22-15-14.gh-issue-95511.WX6PmB.rst deleted file mode 100644 index 803fa5f2a2ab0d..00000000000000 --- a/Misc/NEWS.d/next/IDLE/2022-07-31-22-15-14.gh-issue-95511.WX6PmB.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix the Shell context menu copy-with-prompts bug of copying an extra line -when one selects whole lines. diff --git a/Misc/NEWS.d/next/IDLE/2022-08-01-23-31-48.gh-issue-95191.U7vryB.rst b/Misc/NEWS.d/next/IDLE/2022-08-01-23-31-48.gh-issue-95191.U7vryB.rst deleted file mode 100644 index 94d3dbbd529f03..00000000000000 --- a/Misc/NEWS.d/next/IDLE/2022-08-01-23-31-48.gh-issue-95191.U7vryB.rst +++ /dev/null @@ -1 +0,0 @@ -Include prompts when saving Shell (interactive input and output). diff --git a/Misc/NEWS.d/next/IDLE/2022-08-04-20-07-51.gh-issue-65802.xnThWe.rst b/Misc/NEWS.d/next/IDLE/2022-08-04-20-07-51.gh-issue-65802.xnThWe.rst deleted file mode 100644 index a62a784b6e6926..00000000000000 --- a/Misc/NEWS.d/next/IDLE/2022-08-04-20-07-51.gh-issue-65802.xnThWe.rst +++ /dev/null @@ -1 +0,0 @@ -Document handling of extensions in Save As dialogs. diff --git a/Misc/NEWS.d/next/Library/2017-07-31-13-35-28.bpo-26253.8v_sCs.rst b/Misc/NEWS.d/next/Library/2017-07-31-13-35-28.bpo-26253.8v_sCs.rst deleted file mode 100644 index fa0dc95b7d62b3..00000000000000 --- a/Misc/NEWS.d/next/Library/2017-07-31-13-35-28.bpo-26253.8v_sCs.rst +++ /dev/null @@ -1,2 +0,0 @@ -Allow adjustable compression level for tarfile streams in -:func:`tarfile.open`. diff --git a/Misc/NEWS.d/next/Library/2018-09-23-07-47-29.bpo-32990.2FVVTU.rst b/Misc/NEWS.d/next/Library/2018-09-23-07-47-29.bpo-32990.2FVVTU.rst deleted file mode 100644 index 7d7f7e3faaec2b..00000000000000 --- a/Misc/NEWS.d/next/Library/2018-09-23-07-47-29.bpo-32990.2FVVTU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Support reading wave files with the ``WAVE_FORMAT_EXTENSIBLE`` format in the -:mod:`wave` module. diff --git a/Misc/NEWS.d/next/Library/2018-09-28-22-18-03.bpo-34828.5Zyi_S.rst b/Misc/NEWS.d/next/Library/2018-09-28-22-18-03.bpo-34828.5Zyi_S.rst deleted file mode 100644 index b0e10a158b5b19..00000000000000 --- a/Misc/NEWS.d/next/Library/2018-09-28-22-18-03.bpo-34828.5Zyi_S.rst +++ /dev/null @@ -1 +0,0 @@ -:meth:`sqlite3.Connection.iterdump` now handles databases that use ``AUTOINCREMENT`` in one or more tables. diff --git a/Misc/NEWS.d/next/Library/2019-09-25-00-37-51.bpo-38267.X9Jb5V.rst b/Misc/NEWS.d/next/Library/2019-09-25-00-37-51.bpo-38267.X9Jb5V.rst deleted file mode 100644 index b842fdcb73e99c..00000000000000 --- a/Misc/NEWS.d/next/Library/2019-09-25-00-37-51.bpo-38267.X9Jb5V.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add *timeout* parameter to :meth:`asyncio.loop.shutdown_default_executor`. -The default value is ``None``, which means the executor will be given an unlimited amount of time. -When called from :class:`asyncio.Runner` or :func:`asyncio.run`, the default timeout is 5 minutes. diff --git a/Misc/NEWS.d/next/Library/2020-01-09-01-57-12.bpo-39264.GsBL9-.rst b/Misc/NEWS.d/next/Library/2020-01-09-01-57-12.bpo-39264.GsBL9-.rst deleted file mode 100644 index 5f9ffdffce5c26..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-01-09-01-57-12.bpo-39264.GsBL9-.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed :meth:`collections.UserDict.get` to not call -:meth:`__missing__` when a value is not found. This matches the behavior of -:class:`dict`. Patch by Bar Harel. diff --git a/Misc/NEWS.d/next/Library/2020-08-02-23-46-22.bpo-41260.Q2BNzY.rst b/Misc/NEWS.d/next/Library/2020-08-02-23-46-22.bpo-41260.Q2BNzY.rst new file mode 100644 index 00000000000000..ae2fdd9b84a00e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-08-02-23-46-22.bpo-41260.Q2BNzY.rst @@ -0,0 +1,2 @@ +Rename the *fmt* parameter of the pure Python implementation of +:meth:`datetime.date.strftime` to *format*. diff --git a/Misc/NEWS.d/next/Library/2020-09-28-04-56-04.bpo-14243.YECnxv.rst b/Misc/NEWS.d/next/Library/2020-09-28-04-56-04.bpo-14243.YECnxv.rst deleted file mode 100644 index 267535452ef146..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-09-28-04-56-04.bpo-14243.YECnxv.rst +++ /dev/null @@ -1 +0,0 @@ -The :class:`tempfile.NamedTemporaryFile` function has a new optional parameter *delete_on_close* diff --git a/Misc/NEWS.d/next/Library/2020-10-15-18-37-12.bpo-42047.XDdoSF.rst b/Misc/NEWS.d/next/Library/2020-10-15-18-37-12.bpo-42047.XDdoSF.rst deleted file mode 100644 index 4c23763cf8d89b..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-10-15-18-37-12.bpo-42047.XDdoSF.rst +++ /dev/null @@ -1 +0,0 @@ -Add :func:`threading.get_native_id` support for DragonFly BSD. Patch by David Carlier. diff --git a/Misc/NEWS.d/next/Library/2021-05-22-07-58-59.bpo-42627.EejtD0.rst b/Misc/NEWS.d/next/Library/2021-05-22-07-58-59.bpo-42627.EejtD0.rst deleted file mode 100644 index f165b9ced05d90..00000000000000 --- a/Misc/NEWS.d/next/Library/2021-05-22-07-58-59.bpo-42627.EejtD0.rst +++ /dev/null @@ -1 +0,0 @@ -Fix incorrect parsing of Windows registry proxy settings diff --git a/Misc/NEWS.d/next/Library/2021-08-27-18-07-35.bpo-44173.oW92Ev.rst b/Misc/NEWS.d/next/Library/2021-08-27-18-07-35.bpo-44173.oW92Ev.rst deleted file mode 100644 index abc98266afb0ce..00000000000000 --- a/Misc/NEWS.d/next/Library/2021-08-27-18-07-35.bpo-44173.oW92Ev.rst +++ /dev/null @@ -1 +0,0 @@ -Enable fast seeking of uncompressed unencrypted :class:`zipfile.ZipExtFile` diff --git a/Misc/NEWS.d/next/Library/2021-08-29-19-59-16.bpo-45046.eGq0NC.rst b/Misc/NEWS.d/next/Library/2021-08-29-19-59-16.bpo-45046.eGq0NC.rst deleted file mode 100644 index 8072afaf445c50..00000000000000 --- a/Misc/NEWS.d/next/Library/2021-08-29-19-59-16.bpo-45046.eGq0NC.rst +++ /dev/null @@ -1,7 +0,0 @@ -Add support of context managers in :mod:`unittest`: methods -:meth:`~unittest.TestCase.enterContext` and -:meth:`~unittest.TestCase.enterClassContext` of class -:class:`~unittest.TestCase`, method -:meth:`~unittest.IsolatedAsyncioTestCase.enterAsyncContext` of class -:class:`~unittest.IsolatedAsyncioTestCase` and function -:func:`unittest.enterModuleContext`. diff --git a/Misc/NEWS.d/next/Library/2021-12-27-15-32-15.bpo-45924.0ZpHX2.rst b/Misc/NEWS.d/next/Library/2021-12-27-15-32-15.bpo-45924.0ZpHX2.rst deleted file mode 100644 index 5cda22737adb79..00000000000000 --- a/Misc/NEWS.d/next/Library/2021-12-27-15-32-15.bpo-45924.0ZpHX2.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :mod:`asyncio` incorrect traceback when future's exception is raised multiple times. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-01-03-15-07-06.bpo-46197.Z0djv6.rst b/Misc/NEWS.d/next/Library/2022-01-03-15-07-06.bpo-46197.Z0djv6.rst deleted file mode 100644 index 7a3b2d59dfaf42..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-01-03-15-07-06.bpo-46197.Z0djv6.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :mod:`ensurepip` environment isolation for subprocess running ``pip``. diff --git a/Misc/NEWS.d/next/Library/2022-01-09-14-23-00.bpo-28249.4dzB80.rst b/Misc/NEWS.d/next/Library/2022-01-09-14-23-00.bpo-28249.4dzB80.rst deleted file mode 100644 index b5f1312d768669..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-01-09-14-23-00.bpo-28249.4dzB80.rst +++ /dev/null @@ -1,2 +0,0 @@ -Set :attr:`doctest.DocTest.lineno` to ``None`` when object does not have -:attr:`__doc__`. diff --git a/Misc/NEWS.d/next/Library/2022-02-05-18-46-54.bpo-46642.YI6nHQ.rst b/Misc/NEWS.d/next/Library/2022-02-05-18-46-54.bpo-46642.YI6nHQ.rst deleted file mode 100644 index 2d2815c1e4b00d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-02-05-18-46-54.bpo-46642.YI6nHQ.rst +++ /dev/null @@ -1 +0,0 @@ -Improve error message when trying to subclass an instance of :data:`typing.TypeVar`, :data:`typing.ParamSpec`, :data:`typing.TypeVarTuple`, etc. Based on patch by Gregory Beauregard. diff --git a/Misc/NEWS.d/next/Library/2022-02-09-23-44-27.bpo-45393.9v5Y8U.rst b/Misc/NEWS.d/next/Library/2022-02-09-23-44-27.bpo-45393.9v5Y8U.rst deleted file mode 100644 index 0a239b07d76bd1..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-02-09-23-44-27.bpo-45393.9v5Y8U.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix the formatting for ``await x`` and ``not x`` in the operator precedence -table when using the :func:`help` system. diff --git a/Misc/NEWS.d/next/Library/2022-02-15-12-40-48.bpo-46755.zePJfx.rst b/Misc/NEWS.d/next/Library/2022-02-15-12-40-48.bpo-46755.zePJfx.rst deleted file mode 100644 index 399caf72535932..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-02-15-12-40-48.bpo-46755.zePJfx.rst +++ /dev/null @@ -1,2 +0,0 @@ -In :class:`QueueHandler`, clear ``stack_info`` from :class:`LogRecord` to -prevent stack trace from being written twice. diff --git a/Misc/NEWS.d/next/Library/2022-02-21-01-37-00.bpo-42777.nWK3E6.rst b/Misc/NEWS.d/next/Library/2022-02-21-01-37-00.bpo-42777.nWK3E6.rst deleted file mode 100644 index 24912380fb590a..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-02-21-01-37-00.bpo-42777.nWK3E6.rst +++ /dev/null @@ -1 +0,0 @@ -Implement :meth:`pathlib.Path.is_mount` for Windows paths. diff --git a/Misc/NEWS.d/next/Library/2022-03-08-04-46-44.bpo-46951.SWAz97.rst b/Misc/NEWS.d/next/Library/2022-03-08-04-46-44.bpo-46951.SWAz97.rst deleted file mode 100644 index cd7601aa8c4de7..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-03-08-04-46-44.bpo-46951.SWAz97.rst +++ /dev/null @@ -1 +0,0 @@ -Order the contents of zipapp archives, to make builds more reproducible. diff --git a/Misc/NEWS.d/next/Library/2022-03-16-14-24-14.bpo-47025.qtT3CE.rst b/Misc/NEWS.d/next/Library/2022-03-16-14-24-14.bpo-47025.qtT3CE.rst deleted file mode 100644 index 1c7c7ace9706d7..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-03-16-14-24-14.bpo-47025.qtT3CE.rst +++ /dev/null @@ -1 +0,0 @@ -Drop support for :class:`bytes` on :attr:`sys.path`. diff --git a/Misc/NEWS.d/next/Library/2022-03-19-04-41-42.bpo-47063.nwRfUo.rst b/Misc/NEWS.d/next/Library/2022-03-19-04-41-42.bpo-47063.nwRfUo.rst deleted file mode 100644 index b889d3c6520753..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-03-19-04-41-42.bpo-47063.nwRfUo.rst +++ /dev/null @@ -1 +0,0 @@ -Add an index_pages parameter to support using non-default index page names. diff --git a/Misc/NEWS.d/next/Library/2022-04-01-09-43-54.bpo-32547.NIUiNC.rst b/Misc/NEWS.d/next/Library/2022-04-01-09-43-54.bpo-32547.NIUiNC.rst deleted file mode 100644 index 4599b73cc342ca..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-01-09-43-54.bpo-32547.NIUiNC.rst +++ /dev/null @@ -1 +0,0 @@ -The constructors for :class:`~csv.DictWriter` and :class:`~csv.DictReader` now coerce the ``fieldnames`` argument to a :class:`list` if it is an iterator. diff --git a/Misc/NEWS.d/next/Library/2022-04-01-12-35-44.gh-issue-90005.pvaLHQ.rst b/Misc/NEWS.d/next/Library/2022-04-01-12-35-44.gh-issue-90005.pvaLHQ.rst deleted file mode 100644 index ef6a881a4d094d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-01-12-35-44.gh-issue-90005.pvaLHQ.rst +++ /dev/null @@ -1 +0,0 @@ -:mod:`ctypes` dependency ``libffi`` is now detected with ``pkg-config``. diff --git a/Misc/NEWS.d/next/Library/2022-04-03-11-25-02.bpo-41287.8CTdwf.rst b/Misc/NEWS.d/next/Library/2022-04-03-11-25-02.bpo-41287.8CTdwf.rst deleted file mode 100644 index ef80ec664c4a86..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-03-11-25-02.bpo-41287.8CTdwf.rst +++ /dev/null @@ -1 +0,0 @@ -Fix handling of the ``doc`` argument in subclasses of :func:`property`. diff --git a/Misc/NEWS.d/next/Library/2022-04-03-19-40-09.bpo-39064.76PbIz.rst b/Misc/NEWS.d/next/Library/2022-04-03-19-40-09.bpo-39064.76PbIz.rst deleted file mode 100644 index 34d31527e332dc..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-03-19-40-09.bpo-39064.76PbIz.rst +++ /dev/null @@ -1,2 +0,0 @@ -:class:`zipfile.ZipFile` now raises :exc:`zipfile.BadZipFile` instead of ``ValueError`` when reading a -corrupt zip file in which the central directory offset is negative. diff --git a/Misc/NEWS.d/next/Library/2022-04-08-22-12-11.bpo-47231.lvyglt.rst b/Misc/NEWS.d/next/Library/2022-04-08-22-12-11.bpo-47231.lvyglt.rst deleted file mode 100644 index ee05c5e2856756..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-08-22-12-11.bpo-47231.lvyglt.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an issue with inconsistent trailing slashes in tarfile longname directories. diff --git a/Misc/NEWS.d/next/Library/2022-04-11-16-55-41.gh-issue-91456.DK3KKl.rst b/Misc/NEWS.d/next/Library/2022-04-11-16-55-41.gh-issue-91456.DK3KKl.rst deleted file mode 100644 index a4c853149bdf02..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-11-16-55-41.gh-issue-91456.DK3KKl.rst +++ /dev/null @@ -1,3 +0,0 @@ -Deprecate current default auto() behavior: In 3.13 the default will be for -for auto() to always return the largest member value incremented by -1, and to raise if incompatible value types are used. diff --git a/Misc/NEWS.d/next/Library/2022-04-12-18-05-40.gh-issue-91447.N_Fs4H.rst b/Misc/NEWS.d/next/Library/2022-04-12-18-05-40.gh-issue-91447.N_Fs4H.rst deleted file mode 100644 index 6f9be2d3e9be41..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-12-18-05-40.gh-issue-91447.N_Fs4H.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix findtext in the xml module to only give an empty string when the text -attribute is set to None. diff --git a/Misc/NEWS.d/next/Library/2022-04-15-11-29-38.gh-issue-91539.7WgVuA.rst b/Misc/NEWS.d/next/Library/2022-04-15-11-29-38.gh-issue-91539.7WgVuA.rst deleted file mode 100644 index 16d61f1b91102d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-15-11-29-38.gh-issue-91539.7WgVuA.rst +++ /dev/null @@ -1 +0,0 @@ -Improve performance of ``urllib.request.getproxies_environment`` when there are many environment variables diff --git a/Misc/NEWS.d/next/Library/2022-04-15-13-16-25.gh-issue-91581.9OGsrN.rst b/Misc/NEWS.d/next/Library/2022-04-15-13-16-25.gh-issue-91581.9OGsrN.rst deleted file mode 100644 index 1c3008f4255783..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-15-13-16-25.gh-issue-91581.9OGsrN.rst +++ /dev/null @@ -1,6 +0,0 @@ -Remove an unhandled error case in the C implementation of calls to -:meth:`datetime.fromtimestamp ` with no time -zone (i.e. getting a local time from an epoch timestamp). This should have no -user-facing effect other than giving a possibly more accurate error message -when called with timestamps that fall on 10000-01-01 in the local time. Patch -by Paul Ganssle. diff --git a/Misc/NEWS.d/next/Library/2022-04-15-17-38-55.gh-issue-91577.Ah7cLL.rst b/Misc/NEWS.d/next/Library/2022-04-15-17-38-55.gh-issue-91577.Ah7cLL.rst deleted file mode 100644 index 0f44f34011f9c7..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-15-17-38-55.gh-issue-91577.Ah7cLL.rst +++ /dev/null @@ -1 +0,0 @@ -Move imports in :class:`~multiprocessing.SharedMemory` methods to module level so that they can be executed late in python finalization. diff --git a/Misc/NEWS.d/next/Library/2022-04-15-22-07-36.gh-issue-90622.0C6l8h.rst b/Misc/NEWS.d/next/Library/2022-04-15-22-07-36.gh-issue-90622.0C6l8h.rst deleted file mode 100644 index 5db0a1bbe721df..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-15-22-07-36.gh-issue-90622.0C6l8h.rst +++ /dev/null @@ -1,4 +0,0 @@ -Worker processes for :class:`concurrent.futures.ProcessPoolExecutor` are no -longer spawned on demand (a feature added in 3.9) when the multiprocessing -context start method is ``"fork"`` as that can lead to deadlocks in the -child processes due to a fork happening while threads are running. diff --git a/Misc/NEWS.d/next/Library/2022-04-21-19-14-29.gh-issue-91760.54AR-m.rst b/Misc/NEWS.d/next/Library/2022-04-21-19-14-29.gh-issue-91760.54AR-m.rst deleted file mode 100644 index ac3e7cdd4bace2..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-21-19-14-29.gh-issue-91760.54AR-m.rst +++ /dev/null @@ -1,5 +0,0 @@ -Apply more strict rules for numerical group references and group names in -regular expressions. Only sequence of ASCII digits is now accepted as -a numerical reference. The group name in -bytes patterns and replacement strings can now only contain ASCII letters -and digits and underscore. diff --git a/Misc/NEWS.d/next/Library/2022-04-24-22-26-45.gh-issue-81790.M5Rvpm.rst b/Misc/NEWS.d/next/Library/2022-04-24-22-26-45.gh-issue-81790.M5Rvpm.rst deleted file mode 100644 index 8894493e97410f..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-24-22-26-45.gh-issue-81790.M5Rvpm.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`os.path.splitdrive` now understands DOS device paths with UNC -links (beginning ``\\?\UNC\``). Contributed by Barney Gale. diff --git a/Misc/NEWS.d/next/Library/2022-04-25-10-23-01.gh-issue-91810.DOHa6B.rst b/Misc/NEWS.d/next/Library/2022-04-25-10-23-01.gh-issue-91810.DOHa6B.rst deleted file mode 100644 index 0711f8466b818f..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-25-10-23-01.gh-issue-91810.DOHa6B.rst +++ /dev/null @@ -1,5 +0,0 @@ -:class:`~xml.etree.ElementTree.ElementTree` method -:meth:`~xml.etree.ElementTree.ElementTree.write` and function -:func:`~xml.etree.ElementTree.tostring` now use the text file's encoding -("UTF-8" if not available) instead of locale encoding in XML declaration -when ``encoding="unicode"`` is specified. diff --git a/Misc/NEWS.d/next/Library/2022-04-26-18-37-24.gh-issue-91968.fuuH1_.rst b/Misc/NEWS.d/next/Library/2022-04-26-18-37-24.gh-issue-91968.fuuH1_.rst deleted file mode 100644 index f16f5d3a668bd8..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-04-26-18-37-24.gh-issue-91968.fuuH1_.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``SO_RTABLE`` and ``SO_USER_COOKIE`` constants to :mod:`socket`. diff --git a/Misc/NEWS.d/next/Library/2022-05-06-13-00-57.gh-issue-92391.s-Lase.rst b/Misc/NEWS.d/next/Library/2022-05-06-13-00-57.gh-issue-92391.s-Lase.rst deleted file mode 100644 index e042671dae816b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-06-13-00-57.gh-issue-92391.s-Lase.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add :meth:`~object.__class_getitem__` to :class:`csv.DictReader` and -:class:`csv.DictWriter`, allowing them to be parameterized at runtime. -Patch by Marc Mueller. diff --git a/Misc/NEWS.d/next/Library/2022-05-08-18-51-14.gh-issue-89336.TL6ip7.rst b/Misc/NEWS.d/next/Library/2022-05-08-18-51-14.gh-issue-89336.TL6ip7.rst deleted file mode 100644 index b4c58c0e48b25e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-08-18-51-14.gh-issue-89336.TL6ip7.rst +++ /dev/null @@ -1,4 +0,0 @@ -Removed :mod:`configparser` module APIs: -the ``SafeConfigParser`` class alias, the ``ParsingError.filename`` -property and parameter, and the ``ConfigParser.readfp`` method, all -of which were deprecated since Python 3.2. diff --git a/Misc/NEWS.d/next/Library/2022-05-08-19-21-14.gh-issue-84131.rG5kI7.rst b/Misc/NEWS.d/next/Library/2022-05-08-19-21-14.gh-issue-84131.rG5kI7.rst deleted file mode 100644 index 4a930bde01153d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-08-19-21-14.gh-issue-84131.rG5kI7.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :class:`pathlib.Path` deprecated method ``link_to`` has been removed. -Use 3.10's :meth:`~pathlib.Path.hardlink_to` method instead as its semantics -are consistent with that of :meth:`~pathlib.Path.symlink_to`. diff --git a/Misc/NEWS.d/next/Library/2022-05-09-01-27-25.gh-issue-92531.vV7S_O.rst b/Misc/NEWS.d/next/Library/2022-05-09-01-27-25.gh-issue-92531.vV7S_O.rst deleted file mode 100644 index 574fa6c4d97991..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-09-01-27-25.gh-issue-92531.vV7S_O.rst +++ /dev/null @@ -1,3 +0,0 @@ -The statistics.median_grouped() function now always return a float. -Formerly, it did not convert the input type when for sequences of length -one. diff --git a/Misc/NEWS.d/next/Library/2022-05-09-09-28-02.gh-issue-92530.M4Q1RS.rst b/Misc/NEWS.d/next/Library/2022-05-09-09-28-02.gh-issue-92530.M4Q1RS.rst deleted file mode 100644 index 8bb8ca0488c962..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-09-09-28-02.gh-issue-92530.M4Q1RS.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue that occurred after interrupting -:func:`threading.Condition.notify`. diff --git a/Misc/NEWS.d/next/Library/2022-05-09-11-55-04.gh-issue-92547.CzVZft.rst b/Misc/NEWS.d/next/Library/2022-05-09-11-55-04.gh-issue-92547.CzVZft.rst deleted file mode 100644 index 52626974c40198..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-09-11-55-04.gh-issue-92547.CzVZft.rst +++ /dev/null @@ -1,6 +0,0 @@ -Remove undocumented :mod:`sqlite3` features deprecated in Python 3.10: - -* ``sqlite3.enable_shared_cache()`` -* ``sqlite3.OptimizedUnicode`` - -Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2022-05-09-21-31-41.gh-issue-92445.tJosdm.rst b/Misc/NEWS.d/next/Library/2022-05-09-21-31-41.gh-issue-92445.tJosdm.rst deleted file mode 100644 index ba69a011601fd0..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-09-21-31-41.gh-issue-92445.tJosdm.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a bug in :mod:`argparse` where `nargs="*"` would raise an error instead of returning -an empty list when 0 arguments were supplied if choice was also defined in -``parser.add_argument``. diff --git a/Misc/NEWS.d/next/Library/2022-05-09-22-27-11.gh-issue-92591.V7RCk2.rst b/Misc/NEWS.d/next/Library/2022-05-09-22-27-11.gh-issue-92591.V7RCk2.rst deleted file mode 100644 index cd9b598d1dbca1..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-09-22-27-11.gh-issue-92591.V7RCk2.rst +++ /dev/null @@ -1,3 +0,0 @@ -Allow :mod:`logging` filters to return a :class:`logging.LogRecord` instance -so that filters attached to :class:`logging.Handler`\ s can enrich records without -side effects on other handlers. diff --git a/Misc/NEWS.d/next/Library/2022-05-10-07-57-27.gh-issue-92550.Rk_UzM.rst b/Misc/NEWS.d/next/Library/2022-05-10-07-57-27.gh-issue-92550.Rk_UzM.rst deleted file mode 100644 index 1f0fde31108a7b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-10-07-57-27.gh-issue-92550.Rk_UzM.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :meth:`pathlib.Path.rglob` for empty pattern. diff --git a/Misc/NEWS.d/next/Library/2022-05-10-16-30-40.gh-issue-90385.1_wBRQ.rst b/Misc/NEWS.d/next/Library/2022-05-10-16-30-40.gh-issue-90385.1_wBRQ.rst deleted file mode 100644 index 24aa4403f8a68a..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-10-16-30-40.gh-issue-90385.1_wBRQ.rst +++ /dev/null @@ -1 +0,0 @@ -Add :meth:`pathlib.Path.walk` as an alternative to :func:`os.walk`. diff --git a/Misc/NEWS.d/next/Library/2022-05-11-10-06-31.gh-issue-86388.7ivUtT.rst b/Misc/NEWS.d/next/Library/2022-05-11-10-06-31.gh-issue-86388.7ivUtT.rst deleted file mode 100644 index 13eb5d122b28ae..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-11-10-06-31.gh-issue-86388.7ivUtT.rst +++ /dev/null @@ -1,5 +0,0 @@ -Removed randrange() functionality deprecated since Python 3.10. Formerly, -randrange(10.0) losslessly converted to randrange(10). Now, it raises a -TypeError. Also, the exception raised for non-integral values such as -randrange(10.5) or randrange('10') has been changed from ValueError to -TypeError. diff --git a/Misc/NEWS.d/next/Library/2022-05-11-14-34-09.gh-issue-91581.glkou2.rst b/Misc/NEWS.d/next/Library/2022-05-11-14-34-09.gh-issue-91581.glkou2.rst deleted file mode 100644 index 846f57844a6751..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-11-14-34-09.gh-issue-91581.glkou2.rst +++ /dev/null @@ -1,5 +0,0 @@ -:meth:`~datetime.datetime.utcfromtimestamp` no longer attempts to resolve -``fold`` in the pure Python implementation, since the fold is never 1 in UTC. -In addition to being slightly faster in the common case, this also prevents -some errors when the timestamp is close to :attr:`datetime.min -`. Patch by Paul Ganssle. diff --git a/Misc/NEWS.d/next/Library/2022-05-11-19-33-27.gh-issue-92671.KE4v6a.rst b/Misc/NEWS.d/next/Library/2022-05-11-19-33-27.gh-issue-92671.KE4v6a.rst deleted file mode 100644 index b50677ab5ca105..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-11-19-33-27.gh-issue-92671.KE4v6a.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed :func:`ast.unparse` for empty tuples in the assignment target context. diff --git a/Misc/NEWS.d/next/Library/2022-05-12-15-19-00.gh-issue-92734.d0wjDt.rst b/Misc/NEWS.d/next/Library/2022-05-12-15-19-00.gh-issue-92734.d0wjDt.rst deleted file mode 100644 index a2fcd1ed3dc76f..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-12-15-19-00.gh-issue-92734.d0wjDt.rst +++ /dev/null @@ -1 +0,0 @@ -Allow multi-element reprs emitted by :mod:`reprlib` to be pretty-printed using configurable indentation. diff --git a/Misc/NEWS.d/next/Library/2022-05-14-09-01-38.gh-issue-89325.ys-2BZ.rst b/Misc/NEWS.d/next/Library/2022-05-14-09-01-38.gh-issue-89325.ys-2BZ.rst deleted file mode 100644 index 175869624f75dd..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-14-09-01-38.gh-issue-89325.ys-2BZ.rst +++ /dev/null @@ -1,6 +0,0 @@ -Removed many old deprecated :mod:`unittest` features: -:class:`~unittest.TestCase` method aliases, undocumented and broken -:class:`~unittest.TestCase` method ``assertDictContainsSubset``, -undocumented :meth:`TestLoader.loadTestsFromModule -` parameter *use_load_tests*, and -an underscored alias of the :class:`~unittest.TextTestResult` class. diff --git a/Misc/NEWS.d/next/Library/2022-05-14-11-41-23.gh-issue-90473.kPdOZl.rst b/Misc/NEWS.d/next/Library/2022-05-14-11-41-23.gh-issue-90473.kPdOZl.rst deleted file mode 100644 index bf5ee542182e02..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-14-11-41-23.gh-issue-90473.kPdOZl.rst +++ /dev/null @@ -1,2 +0,0 @@ -:mod:`subprocess` now fails early on Emscripten and WASI platforms to work -around missing :func:`os.pipe` on WASI. diff --git a/Misc/NEWS.d/next/Library/2022-05-16-14-35-39.gh-issue-92839.owSMyo.rst b/Misc/NEWS.d/next/Library/2022-05-16-14-35-39.gh-issue-92839.owSMyo.rst deleted file mode 100644 index b425bd9c47bc91..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-16-14-35-39.gh-issue-92839.owSMyo.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed crash resulting from calling bisect.insort() or bisect.insort_left() with the key argument not equal to None. diff --git a/Misc/NEWS.d/next/Library/2022-05-17-06-27-39.gh-issue-92869.t8oBkw.rst b/Misc/NEWS.d/next/Library/2022-05-17-06-27-39.gh-issue-92869.t8oBkw.rst deleted file mode 100644 index 7787f3419de60e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-17-06-27-39.gh-issue-92869.t8oBkw.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added :class:`~ctypes.c_time_t` to :mod:`ctypes`, which has the same size as -the :c:type:`time_t` type in C. diff --git a/Misc/NEWS.d/next/Library/2022-05-18-17-18-41.gh-issue-91922.DwWIsJ.rst b/Misc/NEWS.d/next/Library/2022-05-18-17-18-41.gh-issue-91922.DwWIsJ.rst deleted file mode 100644 index 30f7bba115606d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-18-17-18-41.gh-issue-91922.DwWIsJ.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix function :func:`sqlite.connect` and the :class:`sqlite.Connection` -constructor on non-UTF-8 locales. Also, they now support bytes paths -non-decodable with the current FS encoding. diff --git a/Misc/NEWS.d/next/Library/2022-05-18-21-04-09.gh-issue-87901.lnf041.rst b/Misc/NEWS.d/next/Library/2022-05-18-21-04-09.gh-issue-87901.lnf041.rst deleted file mode 100644 index 3488541eb3d77c..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-18-21-04-09.gh-issue-87901.lnf041.rst +++ /dev/null @@ -1,2 +0,0 @@ -Removed the ``encoding`` argument from :func:`os.popen` that was added in -3.11b1. diff --git a/Misc/NEWS.d/next/Library/2022-05-19-13-33-18.gh-issue-92675.ZeerMZ.rst b/Misc/NEWS.d/next/Library/2022-05-19-13-33-18.gh-issue-92675.ZeerMZ.rst deleted file mode 100644 index 6adc024fc54154..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-19-13-33-18.gh-issue-92675.ZeerMZ.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :func:`venv.ensure_directories` to accept :class:`pathlib.Path` arguments -in addition to :class:`str` paths. Patch by David Foster. diff --git a/Misc/NEWS.d/next/Library/2022-05-19-17-49-58.gh-issue-92932.o2peTh.rst b/Misc/NEWS.d/next/Library/2022-05-19-17-49-58.gh-issue-92932.o2peTh.rst deleted file mode 100644 index cb76ac5cbd60e1..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-19-17-49-58.gh-issue-92932.o2peTh.rst +++ /dev/null @@ -1,3 +0,0 @@ -Now :func:`~dis.dis` and :func:`~dis.get_instructions` handle operand values -for instructions prefixed by ``EXTENDED_ARG_QUICK``. -Patch by Sam Gross and Dong-hee Na. diff --git a/Misc/NEWS.d/next/Library/2022-05-19-22-34-42.gh-issue-92986.e6uKxj.rst b/Misc/NEWS.d/next/Library/2022-05-19-22-34-42.gh-issue-92986.e6uKxj.rst deleted file mode 100644 index 691c0dd3759f8c..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-19-22-34-42.gh-issue-92986.e6uKxj.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :func:`ast.unparse` when ``ImportFrom.level`` is None diff --git a/Misc/NEWS.d/next/Library/2022-05-20-15-52-43.gh-issue-93010.WF-cAc.rst b/Misc/NEWS.d/next/Library/2022-05-20-15-52-43.gh-issue-93010.WF-cAc.rst deleted file mode 100644 index 24208b5160ed52..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-20-15-52-43.gh-issue-93010.WF-cAc.rst +++ /dev/null @@ -1 +0,0 @@ -In a very special case, the email package tried to append the nonexistent ``InvalidHeaderError`` to the defect list. It should have been ``InvalidHeaderDefect``. diff --git a/Misc/NEWS.d/next/Library/2022-05-21-13-16-16.gh-issue-93044.eJ_XkZ.rst b/Misc/NEWS.d/next/Library/2022-05-21-13-16-16.gh-issue-93044.eJ_XkZ.rst deleted file mode 100644 index c9df8676bcdda0..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-21-13-16-16.gh-issue-93044.eJ_XkZ.rst +++ /dev/null @@ -1,2 +0,0 @@ -No longer convert the database argument of :func:`sqlite3.connect` to bytes -before passing it to the factory. diff --git a/Misc/NEWS.d/next/Library/2022-05-22-16-08-01.gh-issue-89973.jc-Q4g.rst b/Misc/NEWS.d/next/Library/2022-05-22-16-08-01.gh-issue-89973.jc-Q4g.rst deleted file mode 100644 index 7e61fd7d46a0bb..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-22-16-08-01.gh-issue-89973.jc-Q4g.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix :exc:`re.error` raised in :mod:`fnmatch` if the pattern contains a -character range with upper bound lower than lower bound (e.g. ``[c-a]``). -Now such ranges are interpreted as empty ranges. diff --git a/Misc/NEWS.d/next/Library/2022-05-22-23-46-18.gh-issue-93033.wZfiL-.rst b/Misc/NEWS.d/next/Library/2022-05-22-23-46-18.gh-issue-93033.wZfiL-.rst deleted file mode 100644 index 3cee530339fb51..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-22-23-46-18.gh-issue-93033.wZfiL-.rst +++ /dev/null @@ -1 +0,0 @@ -Search in some strings (platform dependent i.e [U+0xFFFF, U+0x0100] on Windows or [U+0xFFFFFFFF, U+0x00010000] on Linux 64-bit) are now up to 10 times faster. diff --git a/Misc/NEWS.d/next/Library/2022-05-24-10-59-02.gh-issue-92728.zxTifq.rst b/Misc/NEWS.d/next/Library/2022-05-24-10-59-02.gh-issue-92728.zxTifq.rst deleted file mode 100644 index b39609be2c4cf5..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-24-10-59-02.gh-issue-92728.zxTifq.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :func:`re.template` function and the corresponding :const:`re.TEMPLATE` -and :const:`re.T` flags are restored after they were removed in 3.11.0b1, -but they are now deprecated, so they might be removed from Python 3.13. diff --git a/Misc/NEWS.d/next/Library/2022-05-24-11-19-04.gh-issue-74696.-cnf-A.rst b/Misc/NEWS.d/next/Library/2022-05-24-11-19-04.gh-issue-74696.-cnf-A.rst deleted file mode 100644 index 5b2e460e9ea075..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-24-11-19-04.gh-issue-74696.-cnf-A.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`shutil.make_archive` no longer temporarily changes the current -working directory during creation of standard ``.zip`` or tar archives. diff --git a/Misc/NEWS.d/next/Library/2022-05-25-00-21-28.gh-issue-91513.9VyCT4.rst b/Misc/NEWS.d/next/Library/2022-05-25-00-21-28.gh-issue-91513.9VyCT4.rst deleted file mode 100644 index f9f93767ed173d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-25-00-21-28.gh-issue-91513.9VyCT4.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``taskName`` attribute to :mod:`logging` module for use with :mod:`asyncio` tasks. diff --git a/Misc/NEWS.d/next/Library/2022-05-25-02-45-41.gh-issue-90817.yxANgU.rst b/Misc/NEWS.d/next/Library/2022-05-25-02-45-41.gh-issue-90817.yxANgU.rst deleted file mode 100644 index 06937e88691725..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-25-02-45-41.gh-issue-90817.yxANgU.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :func:`locale.resetlocale` function is deprecated and will be removed in -Python 3.13. Use ``locale.setlocale(locale.LC_ALL, "")`` instead. Patch by -Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-05-25-15-57-39.gh-issue-90155.YMstB5.rst b/Misc/NEWS.d/next/Library/2022-05-25-15-57-39.gh-issue-90155.YMstB5.rst deleted file mode 100644 index 8def76914eda08..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-25-15-57-39.gh-issue-90155.YMstB5.rst +++ /dev/null @@ -1 +0,0 @@ -Fix broken :class:`asyncio.Semaphore` when acquire is cancelled. diff --git a/Misc/NEWS.d/next/Library/2022-05-26-08-41-34.gh-issue-93243.uw6x5z.rst b/Misc/NEWS.d/next/Library/2022-05-26-08-41-34.gh-issue-93243.uw6x5z.rst deleted file mode 100644 index f03ed7b5efc546..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-26-08-41-34.gh-issue-93243.uw6x5z.rst +++ /dev/null @@ -1 +0,0 @@ -The :mod:`smtpd` module was removed per the schedule in :pep:`594`. diff --git a/Misc/NEWS.d/next/Library/2022-05-26-09-24-41.gh-issue-93162.W1VuhU.rst b/Misc/NEWS.d/next/Library/2022-05-26-09-24-41.gh-issue-93162.W1VuhU.rst deleted file mode 100644 index 4d916a1df5e065..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-26-09-24-41.gh-issue-93162.W1VuhU.rst +++ /dev/null @@ -1,4 +0,0 @@ -Add the ability for :func:`logging.config.dictConfig` to usefully configure -:class:`~logging.handlers.QueueHandler` and :class:`~logging.handlers.QueueListener` -as a pair, and add :func:`logging.getHandlerByName` and :func:`logging.getHandlerNames` -APIs to allow access to handlers by name. diff --git a/Misc/NEWS.d/next/Library/2022-05-26-23-10-55.gh-issue-93156.4XfDVN.rst b/Misc/NEWS.d/next/Library/2022-05-26-23-10-55.gh-issue-93156.4XfDVN.rst deleted file mode 100644 index 165baa08aaab14..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-26-23-10-55.gh-issue-93156.4XfDVN.rst +++ /dev/null @@ -1,2 +0,0 @@ -Accessing the :attr:`pathlib.PurePath.parents` sequence of an absolute path -using negative index values produced incorrect results. diff --git a/Misc/NEWS.d/next/Library/2022-05-27-10-52-06.gh-issue-85308.K6r-tJ.rst b/Misc/NEWS.d/next/Library/2022-05-27-10-52-06.gh-issue-85308.K6r-tJ.rst deleted file mode 100644 index 4574264dd4d433..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-27-10-52-06.gh-issue-85308.K6r-tJ.rst +++ /dev/null @@ -1,4 +0,0 @@ -Changed :class:`argparse.ArgumentParser` to use :term:`filesystem encoding -and error handler` instead of default text encoding to read arguments from -file (e.g. ``fromfile_prefix_chars`` option). This change affects Windows; -argument file should be encoded with UTF-8 instead of ANSI Codepage. diff --git a/Misc/NEWS.d/next/Library/2022-05-27-13-18-18.gh-issue-93297.e2zuHz.rst b/Misc/NEWS.d/next/Library/2022-05-27-13-18-18.gh-issue-93297.e2zuHz.rst deleted file mode 100644 index a8e4cd93d3047a..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-27-13-18-18.gh-issue-93297.e2zuHz.rst +++ /dev/null @@ -1 +0,0 @@ -Make asyncio task groups prevent child tasks from being GCed diff --git a/Misc/NEWS.d/next/Library/2022-05-27-22-17-11.gh-issue-88123.mkYl5q.rst b/Misc/NEWS.d/next/Library/2022-05-27-22-17-11.gh-issue-88123.mkYl5q.rst deleted file mode 100644 index 46bd37a85a7ca1..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-27-22-17-11.gh-issue-88123.mkYl5q.rst +++ /dev/null @@ -1,2 +0,0 @@ -Implement Enum __contains__ that returns True or False to replace the -deprecated behaviour that would sometimes raise a TypeError. diff --git a/Misc/NEWS.d/next/Library/2022-05-28-08-02-55.gh-issue-93312.HY0Uzj.rst b/Misc/NEWS.d/next/Library/2022-05-28-08-02-55.gh-issue-93312.HY0Uzj.rst deleted file mode 100644 index f11d04f63532f3..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-28-08-02-55.gh-issue-93312.HY0Uzj.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add :data:`os.PIDFD_NONBLOCK` flag to open a file descriptor -for a process with :func:`os.pidfd_open` in non-blocking mode. -Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-05-30-21-42-50.gh-issue-83658.01Ntx0.rst b/Misc/NEWS.d/next/Library/2022-05-30-21-42-50.gh-issue-83658.01Ntx0.rst deleted file mode 100644 index a1873095409803..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-30-21-42-50.gh-issue-83658.01Ntx0.rst +++ /dev/null @@ -1 +0,0 @@ -Make :class:`multiprocessing.Pool` raise an exception if ``maxtasksperchild`` is not ``None`` or a positive int. diff --git a/Misc/NEWS.d/next/Library/2022-05-31-14-58-40.gh-issue-93353.9Hvm6o.rst b/Misc/NEWS.d/next/Library/2022-05-31-14-58-40.gh-issue-93353.9Hvm6o.rst deleted file mode 100644 index 67be3c68f47cba..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-05-31-14-58-40.gh-issue-93353.9Hvm6o.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix the :func:`importlib.resources.as_file` context manager to remove the -temporary file if destroyed late during Python finalization: keep a local -reference to the :func:`os.remove` function. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-01-11-24-13.gh-issue-91162.NxvU_u.rst b/Misc/NEWS.d/next/Library/2022-06-01-11-24-13.gh-issue-91162.NxvU_u.rst deleted file mode 100644 index 09fa47c0d23840..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-01-11-24-13.gh-issue-91162.NxvU_u.rst +++ /dev/null @@ -1,5 +0,0 @@ -Support splitting of unpacked arbitrary-length tuple over ``TypeVar`` and -``TypeVarTuple`` parameters. For example: - -* ``A[T, *Ts][*tuple[int, ...]]`` -> ``A[int, *tuple[int, ...]]`` -* ``A[*Ts, T][*tuple[int, ...]]`` -> ``A[*tuple[int, ...], int]`` diff --git a/Misc/NEWS.d/next/Library/2022-06-02-08-40-58.gh-issue-91810.Gtk44w.rst b/Misc/NEWS.d/next/Library/2022-06-02-08-40-58.gh-issue-91810.Gtk44w.rst deleted file mode 100644 index e40005886afc3e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-02-08-40-58.gh-issue-91810.Gtk44w.rst +++ /dev/null @@ -1,2 +0,0 @@ -Suppress writing an XML declaration in open files in ``ElementTree.write()`` -with ``encoding='unicode'`` and ``xml_declaration=None``. diff --git a/Misc/NEWS.d/next/Library/2022-06-03-22-13-28.gh-issue-93370.tjfu9L.rst b/Misc/NEWS.d/next/Library/2022-06-03-22-13-28.gh-issue-93370.tjfu9L.rst deleted file mode 100644 index bd531503800319..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-03-22-13-28.gh-issue-93370.tjfu9L.rst +++ /dev/null @@ -1 +0,0 @@ -Deprecate :data:`sqlite3.version` and :data:`sqlite3.version_info`. diff --git a/Misc/NEWS.d/next/Library/2022-06-04-00-11-54.gh-issue-93475.vffFw1.rst b/Misc/NEWS.d/next/Library/2022-06-04-00-11-54.gh-issue-93475.vffFw1.rst deleted file mode 100644 index efe7ff3e9b4fb6..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-04-00-11-54.gh-issue-93475.vffFw1.rst +++ /dev/null @@ -1,2 +0,0 @@ -Expose ``FICLONE`` and ``FICLONERANGE`` constants in :mod:`fcntl`. Patch by -Illia Volochii. diff --git a/Misc/NEWS.d/next/Library/2022-06-05-22-22-42.gh-issue-93421.43UO_8.rst b/Misc/NEWS.d/next/Library/2022-06-05-22-22-42.gh-issue-93421.43UO_8.rst deleted file mode 100644 index 9e1d6554e0ab2b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-05-22-22-42.gh-issue-93421.43UO_8.rst +++ /dev/null @@ -1,3 +0,0 @@ -Update :data:`sqlite3.Cursor.rowcount` when a DML statement has run to -completion. This fixes the row count for SQL queries like -``UPDATE ... RETURNING``. Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2022-06-06-12-58-27.gh-issue-79579.e8rB-M.rst b/Misc/NEWS.d/next/Library/2022-06-06-12-58-27.gh-issue-79579.e8rB-M.rst deleted file mode 100644 index 82b1a1c28a6001..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-06-12-58-27.gh-issue-79579.e8rB-M.rst +++ /dev/null @@ -1,2 +0,0 @@ -:mod:`sqlite3` now correctly detects DML queries with leading comments. -Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2022-06-06-13-19-43.gh-issue-93521._vE8m9.rst b/Misc/NEWS.d/next/Library/2022-06-06-13-19-43.gh-issue-93521._vE8m9.rst deleted file mode 100644 index 3a3ff4736d2940..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-06-13-19-43.gh-issue-93521._vE8m9.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fixed a case where dataclasses would try to add ``__weakref__`` into the -``__slots__`` for a dataclass that specified ``weakref_slot=True`` when it was -already defined in one of its bases. This resulted in a ``TypeError`` upon the -new class being created. diff --git a/Misc/NEWS.d/next/Library/2022-06-07-14-53-46.gh-issue-90549.T4FMKY.rst b/Misc/NEWS.d/next/Library/2022-06-07-14-53-46.gh-issue-90549.T4FMKY.rst deleted file mode 100644 index 6ebdc394900e63..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-07-14-53-46.gh-issue-90549.T4FMKY.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a multiprocessing bug where a global named resource (such as a semaphore) -could leak when a child process is spawned (as opposed to forked). diff --git a/Misc/NEWS.d/next/Library/2022-06-08-20-11-02.gh-issue-90494.LIZT85.rst b/Misc/NEWS.d/next/Library/2022-06-08-20-11-02.gh-issue-90494.LIZT85.rst deleted file mode 100644 index 95416768793eaf..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-08-20-11-02.gh-issue-90494.LIZT85.rst +++ /dev/null @@ -1,3 +0,0 @@ -:func:`copy.copy` and :func:`copy.deepcopy` now always raise a TypeError if -``__reduce__()`` returns a tuple with length 6 instead of silently ignore -the 6th item or produce incorrect result. diff --git a/Misc/NEWS.d/next/Library/2022-06-09-10-12-55.gh-issue-90473.683m_C.rst b/Misc/NEWS.d/next/Library/2022-06-09-10-12-55.gh-issue-90473.683m_C.rst deleted file mode 100644 index b053a8e9a08135..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-09-10-12-55.gh-issue-90473.683m_C.rst +++ /dev/null @@ -1,2 +0,0 @@ -Emscripten and WASI have no home directory and cannot provide :pep:`370` -user site directory. diff --git a/Misc/NEWS.d/next/Library/2022-06-09-14-44-21.gh-issue-93626.sfghs46.rst b/Misc/NEWS.d/next/Library/2022-06-09-14-44-21.gh-issue-93626.sfghs46.rst deleted file mode 100644 index c6fa103706e9cd..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-09-14-44-21.gh-issue-93626.sfghs46.rst +++ /dev/null @@ -1 +0,0 @@ -Set ``__future__.annotations`` to have a ``None`` mandatoryRelease to indicate that it is currently 'TBD'. diff --git a/Misc/NEWS.d/next/Library/2022-06-09-17-15-26.gh-issue-91389.OE4vS5.rst b/Misc/NEWS.d/next/Library/2022-06-09-17-15-26.gh-issue-91389.OE4vS5.rst deleted file mode 100644 index 0a126551e4110b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-09-17-15-26.gh-issue-91389.OE4vS5.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue where :mod:`dis` utilities could report missing or incorrect -position information in the presence of ``CACHE`` entries. diff --git a/Misc/NEWS.d/next/Library/2022-06-11-13-32-17.gh-issue-79512.A1KTDr.rst b/Misc/NEWS.d/next/Library/2022-06-11-13-32-17.gh-issue-79512.A1KTDr.rst deleted file mode 100644 index 5393fb52e93c30..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-11-13-32-17.gh-issue-79512.A1KTDr.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed names and ``__module__`` value of :mod:`weakref` classes -:class:`~weakref.ReferenceType`, :class:`~weakref.ProxyType`, -:class:`~weakref.CallableProxyType`. It makes them pickleable. diff --git a/Misc/NEWS.d/next/Library/2022-06-15-21-20-02.gh-issue-93820.FAMLY8.rst b/Misc/NEWS.d/next/Library/2022-06-15-21-20-02.gh-issue-93820.FAMLY8.rst deleted file mode 100644 index e06d897e7d8e6b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-15-21-20-02.gh-issue-93820.FAMLY8.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed a regression when :func:`copy.copy`-ing :class:`enum.Flag` with -multiple flag members. diff --git a/Misc/NEWS.d/next/Library/2022-06-15-21-28-16.gh-issue-83499.u3DQJ-.rst b/Misc/NEWS.d/next/Library/2022-06-15-21-28-16.gh-issue-83499.u3DQJ-.rst deleted file mode 100644 index 6b32b238dfdede..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-15-21-28-16.gh-issue-83499.u3DQJ-.rst +++ /dev/null @@ -1 +0,0 @@ -Fix double closing of file description in :mod:`tempfile`. diff --git a/Misc/NEWS.d/next/Library/2022-06-15-21-35-11.gh-issue-91404.39TZzW.rst b/Misc/NEWS.d/next/Library/2022-06-15-21-35-11.gh-issue-91404.39TZzW.rst deleted file mode 100644 index e20b15c7b75864..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-15-21-35-11.gh-issue-91404.39TZzW.rst +++ /dev/null @@ -1,3 +0,0 @@ -Revert the :mod:`re` memory leak when a match is terminated by a signal or -memory allocation failure as the implemented fix caused a major performance -regression. diff --git a/Misc/NEWS.d/next/Library/2022-06-16-09-24-50.gh-issue-93847.kuv8bN.rst b/Misc/NEWS.d/next/Library/2022-06-16-09-24-50.gh-issue-93847.kuv8bN.rst deleted file mode 100644 index c6947575e67e1c..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-16-09-24-50.gh-issue-93847.kuv8bN.rst +++ /dev/null @@ -1 +0,0 @@ -Fix repr of enum of generic aliases. diff --git a/Misc/NEWS.d/next/Library/2022-06-16-11-16-53.gh-issue-93820.00X0Y5.rst b/Misc/NEWS.d/next/Library/2022-06-16-11-16-53.gh-issue-93820.00X0Y5.rst deleted file mode 100644 index 70bb1e6c0cd764..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-16-11-16-53.gh-issue-93820.00X0Y5.rst +++ /dev/null @@ -1 +0,0 @@ -Pickle :class:`enum.Flag` by name. diff --git a/Misc/NEWS.d/next/Library/2022-06-17-12-02-30.gh-issue-93858.R49ARc.rst b/Misc/NEWS.d/next/Library/2022-06-17-12-02-30.gh-issue-93858.R49ARc.rst deleted file mode 100644 index 508ba626bab41d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-17-12-02-30.gh-issue-93858.R49ARc.rst +++ /dev/null @@ -1 +0,0 @@ -Prevent error when activating venv in nested fish instances. diff --git a/Misc/NEWS.d/next/Library/2022-06-17-16-00-55.gh-issue-93963.8YYZ-2.rst b/Misc/NEWS.d/next/Library/2022-06-17-16-00-55.gh-issue-93963.8YYZ-2.rst deleted file mode 100644 index 0973982dfeeffd..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-17-16-00-55.gh-issue-93963.8YYZ-2.rst +++ /dev/null @@ -1,2 +0,0 @@ -Officially deprecate from ``importlib.abc`` classes moved to -``importlib.resources.abc``. diff --git a/Misc/NEWS.d/next/Library/2022-06-18-15-06-54.gh-issue-93973.4y6UQT.rst b/Misc/NEWS.d/next/Library/2022-06-18-15-06-54.gh-issue-93973.4y6UQT.rst deleted file mode 100644 index a3e68ce4fab9df..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-18-15-06-54.gh-issue-93973.4y6UQT.rst +++ /dev/null @@ -1 +0,0 @@ -Add keyword argument ``all_errors`` to ``asyncio.create_connection`` so that multiple connection errors can be raised as an ``ExceptionGroup``. diff --git a/Misc/NEWS.d/next/Library/2022-06-20-23-14-43.gh-issue-94028.UofEcX.rst b/Misc/NEWS.d/next/Library/2022-06-20-23-14-43.gh-issue-94028.UofEcX.rst deleted file mode 100644 index 5775b2276d70b4..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-20-23-14-43.gh-issue-94028.UofEcX.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a regression in the :mod:`sqlite3` where statement objects were not -properly cleared and reset after use in cursor iters. The regression was -introduced by PR 27884 in Python 3.11a1. Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2022-06-21-11-40-31.gh-issue-84753.FW1pxO.rst b/Misc/NEWS.d/next/Library/2022-06-21-11-40-31.gh-issue-84753.FW1pxO.rst deleted file mode 100644 index eeae2edf7161aa..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-21-11-40-31.gh-issue-84753.FW1pxO.rst +++ /dev/null @@ -1,7 +0,0 @@ -:func:`inspect.iscoroutinefunction`, :func:`inspect.isgeneratorfunction`, -and :func:`inspect.isasyncgenfunction` now properly return ``True`` for -duck-typed function-like objects like instances of -:class:`unittest.mock.AsyncMock`. - -This makes :func:`inspect.iscoroutinefunction` consistent with the -behavior of :func:`asyncio.iscoroutinefunction`. Patch by Mehdi ABAAKOUK. diff --git a/Misc/NEWS.d/next/Library/2022-06-22-11-16-11.gh-issue-94101.V9vDG8.rst b/Misc/NEWS.d/next/Library/2022-06-22-11-16-11.gh-issue-94101.V9vDG8.rst deleted file mode 100644 index bcef0ca07470a6..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-22-11-16-11.gh-issue-94101.V9vDG8.rst +++ /dev/null @@ -1,3 +0,0 @@ -Manual instantiation of :class:`ssl.SSLSession` objects is no longer allowed -as it lead to misconfigured instances that crashed the interpreter when -attributes where accessed on them. diff --git a/Misc/NEWS.d/next/Library/2022-06-23-13-12-05.gh-issue-91742.sNytVX.rst b/Misc/NEWS.d/next/Library/2022-06-23-13-12-05.gh-issue-91742.sNytVX.rst deleted file mode 100644 index 30c92363b10b36..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-23-13-12-05.gh-issue-91742.sNytVX.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :mod:`pdb` crash after jump caused by a null pointer dereference. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-06-23-14-35-10.gh-issue-94169.jeba90.rst b/Misc/NEWS.d/next/Library/2022-06-23-14-35-10.gh-issue-94169.jeba90.rst deleted file mode 100644 index 40c1fc10bc0e46..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-23-14-35-10.gh-issue-94169.jeba90.rst +++ /dev/null @@ -1,4 +0,0 @@ -Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python -3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) -function is a built-in function. Since Python 3.10, :func:`_pyio.open` is -also a static method. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-09-41-41.gh-issue-94196.r2KyfS.rst b/Misc/NEWS.d/next/Library/2022-06-24-09-41-41.gh-issue-94196.r2KyfS.rst deleted file mode 100644 index e22776f1b45e6f..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-09-41-41.gh-issue-94196.r2KyfS.rst +++ /dev/null @@ -1,4 +0,0 @@ -:mod:`gzip`: Remove the ``filename`` attribute of :class:`gzip.GzipFile`, -deprecated since Python 2.6, use the :attr:`~gzip.GzipFile.name` attribute -instead. In write mode, the ``filename`` attribute added ``'.gz'`` file -extension if it was not present. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-10-18-59.gh-issue-94199.kYOo8g.rst b/Misc/NEWS.d/next/Library/2022-06-24-10-18-59.gh-issue-94199.kYOo8g.rst deleted file mode 100644 index f3a9a35e8fcaa5..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-10-18-59.gh-issue-94199.kYOo8g.rst +++ /dev/null @@ -1,5 +0,0 @@ -:mod:`hashlib`: Remove the pure Python implementation of -:func:`hashlib.pbkdf2_hmac()`, deprecated in Python 3.10. Python 3.10 and -newer requires OpenSSL 1.1.1 (:pep:`644`): this OpenSSL version provides -a C implementation of :func:`~hashlib.pbkdf2_hmac()` which is faster. Patch -by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-10-29-19.gh-issue-94199.pfehmz.rst b/Misc/NEWS.d/next/Library/2022-06-24-10-29-19.gh-issue-94199.pfehmz.rst deleted file mode 100644 index ed325c0f6886f5..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-10-29-19.gh-issue-94199.pfehmz.rst +++ /dev/null @@ -1,3 +0,0 @@ -Remove the :func:`ssl.RAND_pseudo_bytes` function, deprecated in Python 3.6: -use :func:`os.urandom` or :func:`ssl.RAND_bytes` instead. Patch by Victor -Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-10-39-56.gh-issue-94199.MIuckY.rst b/Misc/NEWS.d/next/Library/2022-06-24-10-39-56.gh-issue-94199.MIuckY.rst deleted file mode 100644 index e1fb163d55d599..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-10-39-56.gh-issue-94199.MIuckY.rst +++ /dev/null @@ -1,7 +0,0 @@ -Remove the :func:`ssl.wrap_socket` function, deprecated in Python 3.7: instead, -create a :class:`ssl.SSLContext` object and call its -:class:`ssl.SSLContext.wrap_socket` method. Any package that still uses -:func:`ssl.wrap_socket` is broken and insecure. The function neither sends a -SNI TLS extension nor validates server hostname. Code is subject to `CWE-295 -`_: Improper Certificate -Validation. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-14-25-26.gh-issue-94214.03pXR5.rst b/Misc/NEWS.d/next/Library/2022-06-24-14-25-26.gh-issue-94214.03pXR5.rst deleted file mode 100644 index 7dccc0abd4af8e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-14-25-26.gh-issue-94214.03pXR5.rst +++ /dev/null @@ -1 +0,0 @@ -Document the ``context`` object used in the ``venv.EnvBuilder`` class, and add the new environment's library path to it. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-17-11-33.gh-issue-94199.7releN.rst b/Misc/NEWS.d/next/Library/2022-06-24-17-11-33.gh-issue-94199.7releN.rst deleted file mode 100644 index 68bd283b990741..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-17-11-33.gh-issue-94199.7releN.rst +++ /dev/null @@ -1,4 +0,0 @@ -Remove the :func:`ssl.match_hostname` function. The -:func:`ssl.match_hostname` was deprecated in Python 3.7. OpenSSL performs -hostname matching since Python 3.7, Python no longer uses the -:func:`ssl.match_hostname` function. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-18-20-42.gh-issue-94226.8ZL4Fm.rst b/Misc/NEWS.d/next/Library/2022-06-24-18-20-42.gh-issue-94226.8ZL4Fm.rst deleted file mode 100644 index 099f945c23f2e4..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-18-20-42.gh-issue-94226.8ZL4Fm.rst +++ /dev/null @@ -1,2 +0,0 @@ -Remove the :func:`locale.format` function, deprecated in Python 3.7: use -:func:`locale.format_string` instead. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-19-16-09.gh-issue-93096.r1_oIc.rst b/Misc/NEWS.d/next/Library/2022-06-24-19-16-09.gh-issue-93096.r1_oIc.rst deleted file mode 100644 index 536a9d7cab1b81..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-19-16-09.gh-issue-93096.r1_oIc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Removed undocumented ``-t`` argument of ``python -m base64``. Use -``python -m unittest test.test_base64.LegacyBase64TestCase.test_encodebytes`` -instead. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-19-23-59.gh-issue-94207.VhS1eS.rst b/Misc/NEWS.d/next/Library/2022-06-24-19-23-59.gh-issue-94207.VhS1eS.rst deleted file mode 100644 index 3d38524ac0e83c..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-19-23-59.gh-issue-94207.VhS1eS.rst +++ /dev/null @@ -1,2 +0,0 @@ -Made :class:`_struct.Struct` GC-tracked in order to fix a reference leak in -the :mod:`_struct` module. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-19-40-40.gh-issue-93096.3RlK2d.rst b/Misc/NEWS.d/next/Library/2022-06-24-19-40-40.gh-issue-93096.3RlK2d.rst deleted file mode 100644 index f7d9e33eb414db..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-19-40-40.gh-issue-93096.3RlK2d.rst +++ /dev/null @@ -1,2 +0,0 @@ -Removed undocumented ``python -m codecs``. Use ``python -m unittest -test.test_codecs.EncodedFileTest`` instead. diff --git a/Misc/NEWS.d/next/Library/2022-06-24-20-00-57.gh-issue-94216.hxnQPu.rst b/Misc/NEWS.d/next/Library/2022-06-24-20-00-57.gh-issue-94216.hxnQPu.rst deleted file mode 100644 index ae3c2e7e71f355..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-24-20-00-57.gh-issue-94216.hxnQPu.rst +++ /dev/null @@ -1 +0,0 @@ -The :mod:`dis` module now has the opcodes for pseudo instructions (those which are used by the compiler during code generation but then removed or replaced by real opcodes before the final bytecode is emitted). diff --git a/Misc/NEWS.d/next/Library/2022-06-25-09-12-23.gh-issue-74696.fxC9ua.rst b/Misc/NEWS.d/next/Library/2022-06-25-09-12-23.gh-issue-74696.fxC9ua.rst deleted file mode 100644 index 48beaff59a16a8..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-25-09-12-23.gh-issue-74696.fxC9ua.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`shutil.make_archive` now passes the *root_dir* argument to custom -archivers which support it. diff --git a/Misc/NEWS.d/next/Library/2022-06-25-13-38-53.gh-issue-93259.FAGw-2.rst b/Misc/NEWS.d/next/Library/2022-06-25-13-38-53.gh-issue-93259.FAGw-2.rst deleted file mode 100644 index d346b65836b7a3..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-25-13-38-53.gh-issue-93259.FAGw-2.rst +++ /dev/null @@ -1,2 +0,0 @@ -Now raise ``ValueError`` when ``None`` or an empty string are passed to -``Distribution.from_name`` (and other callers). diff --git a/Misc/NEWS.d/next/Library/2022-06-25-16-27-02.gh-issue-94254.beP16v.rst b/Misc/NEWS.d/next/Library/2022-06-25-16-27-02.gh-issue-94254.beP16v.rst deleted file mode 100644 index 81482bcd4f8974..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-25-16-27-02.gh-issue-94254.beP16v.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed types of :mod:`struct` module to be immutable. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-06-25-23-44-44.gh-issue-90016.EB409s.rst b/Misc/NEWS.d/next/Library/2022-06-25-23-44-44.gh-issue-90016.EB409s.rst deleted file mode 100644 index 040ba44be2b912..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-25-23-44-44.gh-issue-90016.EB409s.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate :mod:`sqlite3` :ref:`default adapters and converters -`. Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2022-06-26-10-59-15.gh-issue-89988.K8rnmt.rst b/Misc/NEWS.d/next/Library/2022-06-26-10-59-15.gh-issue-89988.K8rnmt.rst deleted file mode 100644 index 811a8d6031e0b5..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-26-10-59-15.gh-issue-89988.K8rnmt.rst +++ /dev/null @@ -1 +0,0 @@ -Fix memory leak in :class:`pickle.Pickler` when looking up :attr:`dispatch_table`. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-06-27-10-33-18.gh-issue-94318.jR4_QV.rst b/Misc/NEWS.d/next/Library/2022-06-27-10-33-18.gh-issue-94318.jR4_QV.rst deleted file mode 100644 index 97a7cd8c9e7a50..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-27-10-33-18.gh-issue-94318.jR4_QV.rst +++ /dev/null @@ -1 +0,0 @@ -Strip trailing spaces in :mod:`pydoc` text output. diff --git a/Misc/NEWS.d/next/Library/2022-06-28-00-24-48.gh-issue-94352.JY1Ayt.rst b/Misc/NEWS.d/next/Library/2022-06-28-00-24-48.gh-issue-94352.JY1Ayt.rst deleted file mode 100644 index 3a166abdcc3203..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-28-00-24-48.gh-issue-94352.JY1Ayt.rst +++ /dev/null @@ -1,3 +0,0 @@ -:func:`shlex.split`: Passing ``None`` for *s* argument now raises an exception, -rather than reading :data:`sys.stdin`. The feature was deprecated in Python -3.9. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-28-14-29-21.gh-issue-94379.RrgKfh.rst b/Misc/NEWS.d/next/Library/2022-06-28-14-29-21.gh-issue-94379.RrgKfh.rst deleted file mode 100644 index 24eafa1048ab4b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-28-14-29-21.gh-issue-94379.RrgKfh.rst +++ /dev/null @@ -1,3 +0,0 @@ -:mod:`zipimport`: Remove ``find_loader()`` and ``find_module()`` methods, -deprecated in Python 3.10: use the ``find_spec()`` method instead. See -:pep:`451` for the rationale. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-28-14-41-22.gh-issue-94383.CXnquo.rst b/Misc/NEWS.d/next/Library/2022-06-28-14-41-22.gh-issue-94383.CXnquo.rst deleted file mode 100644 index 9ed476b717f334..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-28-14-41-22.gh-issue-94383.CXnquo.rst +++ /dev/null @@ -1,5 +0,0 @@ -:mod:`xml.etree`: Remove the ``ElementTree.Element.copy()`` method of the -pure Python implementation, deprecated in Python 3.10, use the -:func:`copy.copy` function instead. The C implementation of :mod:`xml.etree` -has no ``copy()`` method, only a ``__copy__()`` method. Patch by Victor -Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-06-29-04-42-56.gh-issue-94398.YOq_bJ.rst b/Misc/NEWS.d/next/Library/2022-06-29-04-42-56.gh-issue-94398.YOq_bJ.rst deleted file mode 100644 index c6e7e967d106e1..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-29-04-42-56.gh-issue-94398.YOq_bJ.rst +++ /dev/null @@ -1 +0,0 @@ -Once a :class:`asyncio.TaskGroup` has started shutting down (i.e., at least one task has failed and the task group has started cancelling the remaining tasks), it should not be possible to add new tasks to the task group. diff --git a/Misc/NEWS.d/next/Library/2022-06-29-09-48-37.gh-issue-92336.otA6c6.rst b/Misc/NEWS.d/next/Library/2022-06-29-09-48-37.gh-issue-92336.otA6c6.rst deleted file mode 100644 index eb74e0ceb74466..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-29-09-48-37.gh-issue-92336.otA6c6.rst +++ /dev/null @@ -1 +0,0 @@ -Fix bug where :meth:`linecache.getline` fails on bad files with :exc:`UnicodeDecodeError` or :exc:`SyntaxError`. It now returns an empty string as per the documentation. diff --git a/Misc/NEWS.d/next/Library/2022-07-02-19-46-30.gh-issue-94510.xOatDC.rst b/Misc/NEWS.d/next/Library/2022-07-02-19-46-30.gh-issue-94510.xOatDC.rst deleted file mode 100644 index 55856d5756559f..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-02-19-46-30.gh-issue-94510.xOatDC.rst +++ /dev/null @@ -1,2 +0,0 @@ -Re-entrant calls to :func:`sys.setprofile` and :func:`sys.settrace` now -raise :exc:`RuntimeError`. Patch by Pablo Galindo. diff --git a/Misc/NEWS.d/next/Library/2022-07-03-16-26-35.gh-issue-78724.XNiJzf.rst b/Misc/NEWS.d/next/Library/2022-07-03-16-26-35.gh-issue-78724.XNiJzf.rst deleted file mode 100644 index 9621e4d3f83daf..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-03-16-26-35.gh-issue-78724.XNiJzf.rst +++ /dev/null @@ -1 +0,0 @@ -Fix crash in :class:`struct.Struct` when it was not completely initialized by initializing it in :meth:`~object.__new__``. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-03-16-41-03.gh-issue-94382.zuVZeM.rst b/Misc/NEWS.d/next/Library/2022-07-03-16-41-03.gh-issue-94382.zuVZeM.rst deleted file mode 100644 index d79300778f762c..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-03-16-41-03.gh-issue-94382.zuVZeM.rst +++ /dev/null @@ -1 +0,0 @@ -Port static types of ``_multiprocessing`` module to heap types. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-05-17-22-00.gh-issue-94343.kf4H5r.rst b/Misc/NEWS.d/next/Library/2022-07-05-17-22-00.gh-issue-94343.kf4H5r.rst deleted file mode 100644 index f666c2b0ec125e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-05-17-22-00.gh-issue-94343.kf4H5r.rst +++ /dev/null @@ -1 +0,0 @@ -Allow setting the attributes of ``reprlib.Repr`` during object initialization diff --git a/Misc/NEWS.d/next/Library/2022-07-06-06-02-02.gh-issue-93896.vIgWGr.rst b/Misc/NEWS.d/next/Library/2022-07-06-06-02-02.gh-issue-93896.vIgWGr.rst deleted file mode 100644 index 2283e14de9a9ad..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-06-06-02-02.gh-issue-93896.vIgWGr.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :func:`asyncio.run` and :class:`unittest.IsolatedAsyncioTestCase` to always the set event loop as it was done in Python 3.10 and earlier. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-06-14-45-12.gh-issue-93910.iZcp67.rst b/Misc/NEWS.d/next/Library/2022-07-06-14-45-12.gh-issue-93910.iZcp67.rst deleted file mode 100644 index 2e589118e3efd3..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-06-14-45-12.gh-issue-93910.iZcp67.rst +++ /dev/null @@ -1,3 +0,0 @@ -The ability to access the other values of an enum on an enum (e.g. -``Color.RED.BLUE``) has been restored in order to fix a performance -regression. diff --git a/Misc/NEWS.d/next/Library/2022-07-06-14-57-33.gh-issue-94619.PRqKVX.rst b/Misc/NEWS.d/next/Library/2022-07-06-14-57-33.gh-issue-94619.PRqKVX.rst deleted file mode 100644 index a7905034424684..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-06-14-57-33.gh-issue-94619.PRqKVX.rst +++ /dev/null @@ -1 +0,0 @@ -Remove the long-deprecated `module_repr()` from `importlib`. diff --git a/Misc/NEWS.d/next/Library/2022-07-06-16-01-08.gh-issue-94607.Q6RYfz.rst b/Misc/NEWS.d/next/Library/2022-07-06-16-01-08.gh-issue-94607.Q6RYfz.rst deleted file mode 100644 index 3bbb9172f26195..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-06-16-01-08.gh-issue-94607.Q6RYfz.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix subclassing complex generics with type variables in :mod:`typing`. Previously an error message saying ``Some type variables ... are not listed in Generic[...]`` was shown. -:mod:`typing` no longer populates ``__parameters__`` with the ``__parameters__`` of a Python class. diff --git a/Misc/NEWS.d/next/Library/2022-07-06-21-24-03.gh-issue-92546.s5Upkh.rst b/Misc/NEWS.d/next/Library/2022-07-06-21-24-03.gh-issue-92546.s5Upkh.rst deleted file mode 100644 index 0ea676ef5b0d8d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-06-21-24-03.gh-issue-92546.s5Upkh.rst +++ /dev/null @@ -1,2 +0,0 @@ -An undocumented ``python -m pprint`` benchmark is moved into ``pprint`` -suite of pyperformance. Patch by Oleg Iarygin. diff --git a/Misc/NEWS.d/next/Library/2022-07-06-22-41-51.gh-issue-94309._XswsX.rst b/Misc/NEWS.d/next/Library/2022-07-06-22-41-51.gh-issue-94309._XswsX.rst deleted file mode 100644 index b1d45586e9c496..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-06-22-41-51.gh-issue-94309._XswsX.rst +++ /dev/null @@ -1 +0,0 @@ -Deprecate aliases :class:`typing.Hashable` and :class:`typing.Sized` diff --git a/Misc/NEWS.d/next/Library/2022-07-07-15-46-55.gh-issue-94637.IYEiUM.rst b/Misc/NEWS.d/next/Library/2022-07-07-15-46-55.gh-issue-94637.IYEiUM.rst deleted file mode 100644 index 20cbbcd5088b78..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-07-15-46-55.gh-issue-94637.IYEiUM.rst +++ /dev/null @@ -1,3 +0,0 @@ -:meth:`SSLContext.set_default_verify_paths` now releases the GIL around -``SSL_CTX_set_default_verify_paths`` call. The function call performs I/O -and CPU intensive work. diff --git a/Misc/NEWS.d/next/Library/2022-07-08-08-39-35.gh-issue-88050.0aOC_m.rst b/Misc/NEWS.d/next/Library/2022-07-08-08-39-35.gh-issue-88050.0aOC_m.rst deleted file mode 100644 index 43c0765d940d3a..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-08-08-39-35.gh-issue-88050.0aOC_m.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :mod:`asyncio` subprocess transport to kill process cleanly when process is blocked and avoid ``RuntimeError`` when loop is closed. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-08-17-49-12.gh-issue-87822.F9dzkf.rst b/Misc/NEWS.d/next/Library/2022-07-08-17-49-12.gh-issue-87822.F9dzkf.rst deleted file mode 100644 index 7b27f5df45ba20..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-08-17-49-12.gh-issue-87822.F9dzkf.rst +++ /dev/null @@ -1 +0,0 @@ -When called with ``capture_locals=True``, the :mod:`traceback` module functions swallow exceptions raised from calls to ``repr()`` on local variables of frames. This is in order to prioritize the original exception over rendering errors. An indication of the failure is printed in place of the missing value. (Patch by Simon-Martin Schroeder). diff --git a/Misc/NEWS.d/next/Library/2022-07-09-08-55-04.gh-issue-74116.0XwYC1.rst b/Misc/NEWS.d/next/Library/2022-07-09-08-55-04.gh-issue-74116.0XwYC1.rst deleted file mode 100644 index 33782598745b78..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-09-08-55-04.gh-issue-74116.0XwYC1.rst +++ /dev/null @@ -1 +0,0 @@ -Allow :meth:`asyncio.StreamWriter.drain` to be awaited concurrently by multiple tasks. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-09-15-17-02.gh-issue-81620.L0O_bV.rst b/Misc/NEWS.d/next/Library/2022-07-09-15-17-02.gh-issue-81620.L0O_bV.rst deleted file mode 100644 index b4ccea4924ff67..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-09-15-17-02.gh-issue-81620.L0O_bV.rst +++ /dev/null @@ -1 +0,0 @@ -Add random.binomialvariate(). diff --git a/Misc/NEWS.d/next/Library/2022-07-11-10-41-48.gh-issue-94736.EbsgeK.rst b/Misc/NEWS.d/next/Library/2022-07-11-10-41-48.gh-issue-94736.EbsgeK.rst deleted file mode 100644 index 3080672ecdfbb8..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-11-10-41-48.gh-issue-94736.EbsgeK.rst +++ /dev/null @@ -1 +0,0 @@ -Fix crash when deallocating an instance of a subclass of ``_multiprocessing.SemLock``. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-14-00-43-52.gh-issue-94821.e17ghU.rst b/Misc/NEWS.d/next/Library/2022-07-14-00-43-52.gh-issue-94821.e17ghU.rst deleted file mode 100644 index bf7885aef8cbf9..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-14-00-43-52.gh-issue-94821.e17ghU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix binding of unix socket to empty address on Linux to use an available -address from the abstract namespace, instead of "\0". diff --git a/Misc/NEWS.d/next/Library/2022-07-15-08-13-51.gh-issue-94857.9_KvZJ.rst b/Misc/NEWS.d/next/Library/2022-07-15-08-13-51.gh-issue-94857.9_KvZJ.rst deleted file mode 100644 index e684415595d1d2..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-15-08-13-51.gh-issue-94857.9_KvZJ.rst +++ /dev/null @@ -1 +0,0 @@ -Fix refleak in ``_io.TextIOWrapper.reconfigure``. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-19-15-37-11.gh-issue-95005.iRmZ74.rst b/Misc/NEWS.d/next/Library/2022-07-19-15-37-11.gh-issue-95005.iRmZ74.rst deleted file mode 100644 index 787f3146ab7770..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-19-15-37-11.gh-issue-95005.iRmZ74.rst +++ /dev/null @@ -1,2 +0,0 @@ -Replace :c:expr:`_PyAccu` with :c:expr:`_PyUnicodeWriter` in JSON encoder -and StringIO and remove the :c:expr:`_PyAccu` implementation. diff --git a/Misc/NEWS.d/next/Library/2022-07-20-00-23-58.gh-issue-77617.XGaqSQ.rst b/Misc/NEWS.d/next/Library/2022-07-20-00-23-58.gh-issue-77617.XGaqSQ.rst deleted file mode 100644 index 1cbaa7dfe15ec2..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-20-00-23-58.gh-issue-77617.XGaqSQ.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add :mod:`sqlite3` :ref:`command-line interface `. -Patch by Erlend Aasland. diff --git a/Misc/NEWS.d/next/Library/2022-07-20-22-49-48.gh-issue-95066.TuCu0E.rst b/Misc/NEWS.d/next/Library/2022-07-20-22-49-48.gh-issue-95066.TuCu0E.rst deleted file mode 100644 index 05ae4a6a2761af..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-20-22-49-48.gh-issue-95066.TuCu0E.rst +++ /dev/null @@ -1 +0,0 @@ -Replaced assert with exception in :func:`ast.parse`, when ``feature_version`` has an invalid major version. Patch by Shantanu Jain. diff --git a/Misc/NEWS.d/next/Library/2022-07-21-19-55-49.gh-issue-95105.BIX2Km.rst b/Misc/NEWS.d/next/Library/2022-07-21-19-55-49.gh-issue-95105.BIX2Km.rst deleted file mode 100644 index 58af62b1edc871..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-21-19-55-49.gh-issue-95105.BIX2Km.rst +++ /dev/null @@ -1 +0,0 @@ -:meth:`wsgiref.types.InputStream.__iter__` should return ``Iterator[bytes]``, not ``Iterable[bytes]``. Patch by Shantanu Jain. diff --git a/Misc/NEWS.d/next/Library/2022-07-21-22-59-22.gh-issue-95109.usxA9r.rst b/Misc/NEWS.d/next/Library/2022-07-21-22-59-22.gh-issue-95109.usxA9r.rst deleted file mode 100644 index 40196dd214a283..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-21-22-59-22.gh-issue-95109.usxA9r.rst +++ /dev/null @@ -1 +0,0 @@ -Ensure that timeouts scheduled with :class:`asyncio.Timeout` that have already expired are delivered promptly. diff --git a/Misc/NEWS.d/next/Library/2022-07-22-00-58-49.gh-issue-95077.4Z6CNC.rst b/Misc/NEWS.d/next/Library/2022-07-22-00-58-49.gh-issue-95077.4Z6CNC.rst deleted file mode 100644 index 09f350067cd31b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-22-00-58-49.gh-issue-95077.4Z6CNC.rst +++ /dev/null @@ -1 +0,0 @@ -Add deprecation warning for enum ``member.member`` access (e.g. ``Color.RED.BLUE``). diff --git a/Misc/NEWS.d/next/Library/2022-07-22-09-09-08.gh-issue-91212.53O8Ab.rst b/Misc/NEWS.d/next/Library/2022-07-22-09-09-08.gh-issue-91212.53O8Ab.rst deleted file mode 100644 index 8552f51196b5d8..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-22-09-09-08.gh-issue-91212.53O8Ab.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed flickering of the turtle window when the tracer is turned off. Patch by Shin-myoung-serp. diff --git a/Misc/NEWS.d/next/Library/2022-07-22-17-19-57.gh-issue-93157.RXByAk.rst b/Misc/NEWS.d/next/Library/2022-07-22-17-19-57.gh-issue-93157.RXByAk.rst deleted file mode 100644 index 054b318ec63f0c..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-22-17-19-57.gh-issue-93157.RXByAk.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :mod:`fileinput` module didn't support ``errors`` option when -``inplace`` is true. diff --git a/Misc/NEWS.d/next/Library/2022-07-22-21-18-17.gh-issue-95132.n9anlw.rst b/Misc/NEWS.d/next/Library/2022-07-22-21-18-17.gh-issue-95132.n9anlw.rst deleted file mode 100644 index 64666ad84fb4a6..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-22-21-18-17.gh-issue-95132.n9anlw.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix a :mod:`sqlite3` regression where ``*args`` and ``**kwds`` were -incorrectly relayed from :py:func:`~sqlite3.connect` to the -:class:`~sqlite3.Connection` factory. The regression was introduced in 3.11a1 -with PR 24421 (:gh:`85128`). Patch by Erlend E. Aasland.` diff --git a/Misc/NEWS.d/next/Library/2022-07-23-10-42-05.gh-issue-95166.xw6p3C.rst b/Misc/NEWS.d/next/Library/2022-07-23-10-42-05.gh-issue-95166.xw6p3C.rst deleted file mode 100644 index 34b017078436d2..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-23-10-42-05.gh-issue-95166.xw6p3C.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :meth:`concurrent.futures.Executor.map` to cancel the currently waiting on future on an error - e.g. TimeoutError or KeyboardInterrupt. diff --git a/Misc/NEWS.d/next/Library/2022-07-23-10-50-05.gh-issue-93899.VT34A5.rst b/Misc/NEWS.d/next/Library/2022-07-23-10-50-05.gh-issue-93899.VT34A5.rst deleted file mode 100644 index e63475f8ba96f6..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-23-10-50-05.gh-issue-93899.VT34A5.rst +++ /dev/null @@ -1 +0,0 @@ -Fix check for existence of :data:`os.EFD_CLOEXEC`, :data:`os.EFD_NONBLOCK` and :data:`os.EFD_SEMAPHORE` flags on older kernel versions where these flags are not present. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-24-09-15-35.gh-issue-95194.ERVmqG.rst b/Misc/NEWS.d/next/Library/2022-07-24-09-15-35.gh-issue-95194.ERVmqG.rst deleted file mode 100644 index c69651923b410e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-24-09-15-35.gh-issue-95194.ERVmqG.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade bundled pip to 22.2. diff --git a/Misc/NEWS.d/next/Library/2022-07-24-12-00-06.gh-issue-95199.-5A64k.rst b/Misc/NEWS.d/next/Library/2022-07-24-12-00-06.gh-issue-95199.-5A64k.rst deleted file mode 100644 index f3d9cf3306b8d0..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-24-12-00-06.gh-issue-95199.-5A64k.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade bundled setuptools to 63.2.0. diff --git a/Misc/NEWS.d/next/Library/2022-07-24-12-59-02.gh-issue-95087.VvqXkN.rst b/Misc/NEWS.d/next/Library/2022-07-24-12-59-02.gh-issue-95087.VvqXkN.rst deleted file mode 100644 index 48a5c1af74907a..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-24-12-59-02.gh-issue-95087.VvqXkN.rst +++ /dev/null @@ -1 +0,0 @@ -Fix IndexError in parsing invalid date in the :mod:`email` module. diff --git a/Misc/NEWS.d/next/Library/2022-07-24-18-00-42.gh-issue-95097.lu5qNf.rst b/Misc/NEWS.d/next/Library/2022-07-24-18-00-42.gh-issue-95097.lu5qNf.rst deleted file mode 100644 index 2840f057a9c1dc..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-24-18-00-42.gh-issue-95097.lu5qNf.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :func:`asyncio.run` for :class:`asyncio.Task` implementations without :meth:`~asyncio.Task.uncancel` method. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-25-15-45-06.gh-issue-95231.i807-g.rst b/Misc/NEWS.d/next/Library/2022-07-25-15-45-06.gh-issue-95231.i807-g.rst deleted file mode 100644 index aa53f2938bc930..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-25-15-45-06.gh-issue-95231.i807-g.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fail gracefully if :data:`~errno.EPERM` or :data:`~errno.ENOSYS` is raised when loading -:mod:`crypt` methods. This may happen when trying to load ``MD5`` on a Linux kernel -with :abbr:`FIPS (Federal Information Processing Standard)` enabled. diff --git a/Misc/NEWS.d/next/Library/2022-07-27-11-35-45.gh-issue-95045.iysT-Q.rst b/Misc/NEWS.d/next/Library/2022-07-27-11-35-45.gh-issue-95045.iysT-Q.rst deleted file mode 100644 index d4ab325e036583..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-27-11-35-45.gh-issue-95045.iysT-Q.rst +++ /dev/null @@ -1 +0,0 @@ -Fix GC crash when deallocating ``_lsprof.Profiler`` by untracking it before calling any callbacks. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-07-27-19-43-07.gh-issue-95339.NuVQ68.rst b/Misc/NEWS.d/next/Library/2022-07-27-19-43-07.gh-issue-95339.NuVQ68.rst deleted file mode 100644 index 6674a4a2695398..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-27-19-43-07.gh-issue-95339.NuVQ68.rst +++ /dev/null @@ -1 +0,0 @@ -Update bundled pip to 22.2.1. diff --git a/Misc/NEWS.d/next/Library/2022-07-28-17-14-38.gh-issue-95385.6YlsDI.rst b/Misc/NEWS.d/next/Library/2022-07-28-17-14-38.gh-issue-95385.6YlsDI.rst deleted file mode 100644 index 89fa9c2b27664d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-28-17-14-38.gh-issue-95385.6YlsDI.rst +++ /dev/null @@ -1 +0,0 @@ -Faster ``json.dumps()`` when sorting of keys is not requested (default). diff --git a/Misc/NEWS.d/next/Library/2022-07-29-20-58-37.gh-issue-94909.YjMusj.rst b/Misc/NEWS.d/next/Library/2022-07-29-20-58-37.gh-issue-94909.YjMusj.rst deleted file mode 100644 index b6d853888101c2..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-29-20-58-37.gh-issue-94909.YjMusj.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix incorrect joining of relative Windows paths with drives in -:class:`pathlib.PurePath` initializer. diff --git a/Misc/NEWS.d/next/Library/2022-08-03-16-52-32.gh-issue-95289.FMnHlV.rst b/Misc/NEWS.d/next/Library/2022-08-03-16-52-32.gh-issue-95289.FMnHlV.rst deleted file mode 100644 index d802f557217b48..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-03-16-52-32.gh-issue-95289.FMnHlV.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :class:`asyncio.TaskGroup` to propagate exception when :exc:`asyncio.CancelledError` was replaced with another exception by a context manger. Patch by Kumar Aditya and Guido van Rossum. diff --git a/Misc/NEWS.d/next/Library/2022-08-03-21-01-17.gh-issue-95609.xxyjyX.rst b/Misc/NEWS.d/next/Library/2022-08-03-21-01-17.gh-issue-95609.xxyjyX.rst deleted file mode 100644 index 81c02ae900f7a5..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-03-21-01-17.gh-issue-95609.xxyjyX.rst +++ /dev/null @@ -1 +0,0 @@ -Update bundled pip to 22.2.2. diff --git a/Misc/NEWS.d/next/Library/2022-08-06-12-18-07.gh-issue-88863.NnqsuJ.rst b/Misc/NEWS.d/next/Library/2022-08-06-12-18-07.gh-issue-88863.NnqsuJ.rst new file mode 100644 index 00000000000000..23f8cb01cf0ac8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-08-06-12-18-07.gh-issue-88863.NnqsuJ.rst @@ -0,0 +1,3 @@ +To avoid apparent memory leaks when :func:`asyncio.open_connection` raises, +break reference cycles generated by local exception and future instances +(which has exception instance as its member var). Patch by Dong Uk, Kang. diff --git a/Misc/NEWS.d/next/Library/2022-08-07-14-56-23.gh-issue-95149.U0c6Ib.rst b/Misc/NEWS.d/next/Library/2022-08-07-14-56-23.gh-issue-95149.U0c6Ib.rst deleted file mode 100644 index 6393444b53fb64..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-07-14-56-23.gh-issue-95149.U0c6Ib.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :class:`HTTPStatus ` enum offers a couple of properties -to indicate the HTTP status category e.g. ``HTTPStatus.OK.is_success``. diff --git a/Misc/NEWS.d/next/Library/2022-08-08-01-42-11.gh-issue-95704.MOPFfX.rst b/Misc/NEWS.d/next/Library/2022-08-08-01-42-11.gh-issue-95704.MOPFfX.rst deleted file mode 100644 index 31f9fc6547d90f..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-08-01-42-11.gh-issue-95704.MOPFfX.rst +++ /dev/null @@ -1,2 +0,0 @@ -When a task catches :exc:`asyncio.CancelledError` and raises some other error, -the other error should generally not silently be suppressed. diff --git a/Misc/NEWS.d/next/Library/2022-08-10-11-54-04.gh-issue-95804.i5FCFK.rst b/Misc/NEWS.d/next/Library/2022-08-10-11-54-04.gh-issue-95804.i5FCFK.rst deleted file mode 100644 index 46434cb466046e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-10-11-54-04.gh-issue-95804.i5FCFK.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix ``logging`` shutdown handler so it respects -``MemoryHandler.flushOnClose``. diff --git a/Misc/NEWS.d/next/Library/2022-08-10-17-34-07.gh-issue-95861.qv-T5s.rst b/Misc/NEWS.d/next/Library/2022-08-10-17-34-07.gh-issue-95861.qv-T5s.rst deleted file mode 100644 index aae76c74e2fd7d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-10-17-34-07.gh-issue-95861.qv-T5s.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add support for computing Spearman's correlation coefficient to the existing -statistics.correlation() function. diff --git a/Misc/NEWS.d/next/Library/2022-08-11-03-16-48.gh-issue-95865.0IOkFP.rst b/Misc/NEWS.d/next/Library/2022-08-11-03-16-48.gh-issue-95865.0IOkFP.rst deleted file mode 100644 index aa7c73ff1a35ab..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-11-03-16-48.gh-issue-95865.0IOkFP.rst +++ /dev/null @@ -1 +0,0 @@ -Speed up :func:`urllib.parse.quote_from_bytes` by replacing a list comprehension with ``map()``. diff --git a/Misc/NEWS.d/next/Library/2022-08-11-18-22-29.gh-issue-95736.LzRZXe.rst b/Misc/NEWS.d/next/Library/2022-08-11-18-22-29.gh-issue-95736.LzRZXe.rst deleted file mode 100644 index abc270fe35ca65..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-11-18-22-29.gh-issue-95736.LzRZXe.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :class:`unittest.IsolatedAsyncioTestCase` to set event loop before calling setup functions. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-08-11-18-52-17.gh-issue-95899._Bi4uG.rst b/Misc/NEWS.d/next/Library/2022-08-11-18-52-17.gh-issue-95899._Bi4uG.rst deleted file mode 100644 index d2386cf3ae2296..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-11-18-52-17.gh-issue-95899._Bi4uG.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :class:`asyncio.Runner` to call :func:`asyncio.set_event_loop` only once to avoid calling :meth:`~asyncio.AbstractChildWatcher.attach_loop` multiple times on child watchers. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-08-14-18-59-54.gh-issue-69142.6is5Pq.rst b/Misc/NEWS.d/next/Library/2022-08-14-18-59-54.gh-issue-69142.6is5Pq.rst deleted file mode 100644 index 0db8b3730cf54d..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-14-18-59-54.gh-issue-69142.6is5Pq.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``%:z`` strftime format code (generates tzoffset with colons as separator), see :ref:`strftime-strptime-behavior`. diff --git a/Misc/NEWS.d/next/Library/2022-08-18-14-53-53.gh-issue-95463.GpP05c.rst b/Misc/NEWS.d/next/Library/2022-08-18-14-53-53.gh-issue-95463.GpP05c.rst deleted file mode 100644 index 553c55436aab08..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-18-14-53-53.gh-issue-95463.GpP05c.rst +++ /dev/null @@ -1,2 +0,0 @@ -Remove an incompatible change from :issue:`28080` that caused a regression -that ignored the utf8 in ``ZipInfo.flag_bits``. Patch by Pablo Galindo. diff --git a/Misc/NEWS.d/next/Library/2022-08-19-10-19-32.gh-issue-96019.b7uAVP.rst b/Misc/NEWS.d/next/Library/2022-08-19-10-19-32.gh-issue-96019.b7uAVP.rst deleted file mode 100644 index 296963fd817248..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-19-10-19-32.gh-issue-96019.b7uAVP.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a bug in the ``makeunicodedata.py`` script leading to about 13 KiB of -space saving in the ``unicodedata`` module, specifically the character -decomposition data. diff --git a/Misc/NEWS.d/next/Library/2022-08-19-18-21-01.gh-issue-96125.ODcF1Y.rst b/Misc/NEWS.d/next/Library/2022-08-19-18-21-01.gh-issue-96125.ODcF1Y.rst deleted file mode 100644 index ba7d26a965821f..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-19-18-21-01.gh-issue-96125.ODcF1Y.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix incorrect condition that causes ``sys.thread_info.name`` to be wrong on -pthread platforms. diff --git a/Misc/NEWS.d/next/Library/2022-08-20-10-31-01.gh-issue-96052.a6FhaD.rst b/Misc/NEWS.d/next/Library/2022-08-20-10-31-01.gh-issue-96052.a6FhaD.rst deleted file mode 100644 index c190fb7dbcb9da..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-20-10-31-01.gh-issue-96052.a6FhaD.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix handling compiler warnings (SyntaxWarning and DeprecationWarning) in -:func:`codeop.compile_command` when checking for incomplete input. -Previously it emitted warnings and raised a SyntaxError. Now it always -returns ``None`` for incomplete input without emitting any warnings. diff --git a/Misc/NEWS.d/next/Library/2022-08-20-12-56-15.gh-issue-96145.8ah3pE.rst b/Misc/NEWS.d/next/Library/2022-08-20-12-56-15.gh-issue-96145.8ah3pE.rst deleted file mode 100644 index 540ec8b71ebf90..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-20-12-56-15.gh-issue-96145.8ah3pE.rst +++ /dev/null @@ -1 +0,0 @@ -Add AttrDict to JSON module for use with object_hook. diff --git a/Misc/NEWS.d/next/Library/2022-08-22-13-54-20.gh-issue-96175.bH7zGU.rst b/Misc/NEWS.d/next/Library/2022-08-22-13-54-20.gh-issue-96175.bH7zGU.rst deleted file mode 100644 index c34eff22b3d456..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-22-13-54-20.gh-issue-96175.bH7zGU.rst +++ /dev/null @@ -1 +0,0 @@ -Fix unused ``localName`` parameter in the ``Attr`` class in :mod:`xml.dom.minidom`. diff --git a/Misc/NEWS.d/next/Library/2022-08-22-18-42-17.gh-issue-96159.3bFU39.rst b/Misc/NEWS.d/next/Library/2022-08-22-18-42-17.gh-issue-96159.3bFU39.rst deleted file mode 100644 index f64469e563f340..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-22-18-42-17.gh-issue-96159.3bFU39.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a performance regression in logging TimedRotatingFileHandler. Only check for special files when the rollover time has passed. diff --git a/Misc/NEWS.d/next/Library/2022-08-23-13-30-30.gh-issue-96172.7WTHer.rst b/Misc/NEWS.d/next/Library/2022-08-23-13-30-30.gh-issue-96172.7WTHer.rst deleted file mode 100644 index 1bb57f17788761..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-23-13-30-30.gh-issue-96172.7WTHer.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a bug in ``unicodedata``: ``east_asian_width`` used to return the wrong -value for unassigned characters; and for yet unassigned, but reserved -characters. diff --git a/Misc/NEWS.d/next/Library/2022-08-27-14-38-49.gh-issue-90467.VOOB0p.rst b/Misc/NEWS.d/next/Library/2022-08-27-14-38-49.gh-issue-90467.VOOB0p.rst deleted file mode 100644 index 282c0e76a8c81c..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-27-14-38-49.gh-issue-90467.VOOB0p.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :class:`asyncio.streams.StreamReaderProtocol` to keep a strong reference -to the created task, so that it's not garbage collected diff --git a/Misc/NEWS.d/next/Library/2022-08-27-21-26-52.gh-issue-96349.XyYLlO.rst b/Misc/NEWS.d/next/Library/2022-08-27-21-26-52.gh-issue-96349.XyYLlO.rst deleted file mode 100644 index 59eb3517191ddd..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-27-21-26-52.gh-issue-96349.XyYLlO.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a minor performance regression in :func:`threading.Event.__init__` diff --git a/Misc/NEWS.d/next/Library/2022-08-29-07-04-03.gh-issue-89258.ri7ncj.rst b/Misc/NEWS.d/next/Library/2022-08-29-07-04-03.gh-issue-89258.ri7ncj.rst deleted file mode 100644 index 74300c108c893b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-29-07-04-03.gh-issue-89258.ri7ncj.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added a :meth:`~logging.Logger.getChildren` method to -:class:`logging.Logger`, to get the immediate child loggers of a logger. diff --git a/Misc/NEWS.d/next/Library/2022-08-29-12-49-30.gh-issue-96142.PdCMez.rst b/Misc/NEWS.d/next/Library/2022-08-29-12-49-30.gh-issue-96142.PdCMez.rst deleted file mode 100644 index 43d1c3de992216..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-29-12-49-30.gh-issue-96142.PdCMez.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add ``match_args``, ``kw_only``, ``slots``, and ``weakref_slot`` to -``_DataclassParams``. diff --git a/Misc/NEWS.d/next/Library/2022-08-29-15-28-39.gh-issue-96385.uLRTsf.rst b/Misc/NEWS.d/next/Library/2022-08-29-15-28-39.gh-issue-96385.uLRTsf.rst deleted file mode 100644 index 57354826f34926..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-29-15-28-39.gh-issue-96385.uLRTsf.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix ``TypeVarTuple.__typing_prepare_subst__``. ``TypeError`` was not raised -when using more than one ``TypeVarTuple``, like ``[*T, *V]`` in type alias -substitutions. diff --git a/Misc/NEWS.d/next/Library/2022-08-29-16-54-36.gh-issue-96388.dCpJcu.rst b/Misc/NEWS.d/next/Library/2022-08-29-16-54-36.gh-issue-96388.dCpJcu.rst deleted file mode 100644 index 3a35c4734871d1..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-29-16-54-36.gh-issue-96388.dCpJcu.rst +++ /dev/null @@ -1,2 +0,0 @@ -Work around missing socket functions in :class:`~socket.socket`'s -``__repr__``. diff --git a/Misc/NEWS.d/next/Library/2022-08-30-11-46-36.gh-issue-95987.CV7_u4.rst b/Misc/NEWS.d/next/Library/2022-08-30-11-46-36.gh-issue-95987.CV7_u4.rst deleted file mode 100644 index 232bba1b92440b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-30-11-46-36.gh-issue-95987.CV7_u4.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ``repr`` of ``Any`` subclasses. diff --git a/Misc/NEWS.d/next/Library/2022-08-31-11-10-21.gh-issue-96079.uqrXdJ.rst b/Misc/NEWS.d/next/Library/2022-08-31-11-10-21.gh-issue-96079.uqrXdJ.rst deleted file mode 100644 index 4cb8d276cbe7f7..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-08-31-11-10-21.gh-issue-96079.uqrXdJ.rst +++ /dev/null @@ -1 +0,0 @@ -In :mod:`typing`, fix missing field ``name`` and incorrect ``__module__`` in _AnnotatedAlias. diff --git a/Misc/NEWS.d/next/Library/2022-09-01-13-54-38.gh-issue-96465.0IJmrH.rst b/Misc/NEWS.d/next/Library/2022-09-01-13-54-38.gh-issue-96465.0IJmrH.rst deleted file mode 100644 index c78d7538ef943e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-01-13-54-38.gh-issue-96465.0IJmrH.rst +++ /dev/null @@ -1 +0,0 @@ -Fraction hashes are now cached. diff --git a/Misc/NEWS.d/next/Library/2022-09-03-18-39-05.gh-issue-96538.W156-D.rst b/Misc/NEWS.d/next/Library/2022-09-03-18-39-05.gh-issue-96538.W156-D.rst deleted file mode 100644 index 22e5b4fc259964..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-03-18-39-05.gh-issue-96538.W156-D.rst +++ /dev/null @@ -1 +0,0 @@ -Speed up ``bisect.bisect()`` functions by taking advantage of type-stability. diff --git a/Misc/NEWS.d/next/Library/2022-09-04-12-32-52.gh-issue-68163.h6TJCc.rst b/Misc/NEWS.d/next/Library/2022-09-04-12-32-52.gh-issue-68163.h6TJCc.rst deleted file mode 100644 index 756f6c9eb9e842..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-04-12-32-52.gh-issue-68163.h6TJCc.rst +++ /dev/null @@ -1 +0,0 @@ -Correct conversion of :class:`numbers.Rational`'s to :class:`float`. diff --git a/Misc/NEWS.d/next/Library/2022-09-07-22-49-37.gh-issue-96652.YqOKxI.rst b/Misc/NEWS.d/next/Library/2022-09-07-22-49-37.gh-issue-96652.YqOKxI.rst deleted file mode 100644 index 1d04db7b2a2531..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-07-22-49-37.gh-issue-96652.YqOKxI.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix the faulthandler implementation of ``faulthandler.register(signal, -chain=True)`` if the ``sigaction()`` function is not available: don't call -the previous signal handler if it's NULL. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2022-09-08-20-12-48.gh-issue-46412.r_cfTh.rst b/Misc/NEWS.d/next/Library/2022-09-08-20-12-48.gh-issue-46412.r_cfTh.rst deleted file mode 100644 index 27fcd0328bd267..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-08-20-12-48.gh-issue-46412.r_cfTh.rst +++ /dev/null @@ -1 +0,0 @@ -Improve performance of ``bool(db)`` for large ndb/gdb databases. Previously this would call ``len(db)`` which would iterate over all keys -- the answer (empty or not) is known after the first key. diff --git a/Misc/NEWS.d/next/Library/2022-09-10-16-46-16.gh-issue-96735.0YzJuG.rst b/Misc/NEWS.d/next/Library/2022-09-10-16-46-16.gh-issue-96735.0YzJuG.rst deleted file mode 100644 index a29ada98ac20e9..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-10-16-46-16.gh-issue-96735.0YzJuG.rst +++ /dev/null @@ -1 +0,0 @@ -Fix undefined behaviour in :func:`struct.unpack`. diff --git a/Misc/NEWS.d/next/Library/2022-09-13-15-12-31.gh-issue-96734.G08vjz.rst b/Misc/NEWS.d/next/Library/2022-09-13-15-12-31.gh-issue-96734.G08vjz.rst deleted file mode 100644 index e5c8a25b4bc6ab..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-13-15-12-31.gh-issue-96734.G08vjz.rst +++ /dev/null @@ -1 +0,0 @@ -Update :mod:`unicodedata` database to Unicode 15.0.0. diff --git a/Misc/NEWS.d/next/Library/2022-09-14-21-56-15.gh-issue-96828.ZoOY5G.rst b/Misc/NEWS.d/next/Library/2022-09-14-21-56-15.gh-issue-96828.ZoOY5G.rst new file mode 100644 index 00000000000000..d8a448851f4779 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-09-14-21-56-15.gh-issue-96828.ZoOY5G.rst @@ -0,0 +1,2 @@ +Add an :data:`~ssl.OP_ENABLE_KTLS` option for enabling the use of the kernel +TLS (kTLS). Patch by Illia Volochii. diff --git a/Misc/NEWS.d/next/Library/2022-09-15-00-37-33.gh-issue-96741.4b6czN.rst b/Misc/NEWS.d/next/Library/2022-09-15-00-37-33.gh-issue-96741.4b6czN.rst deleted file mode 100644 index e7f53311e589f2..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-15-00-37-33.gh-issue-96741.4b6czN.rst +++ /dev/null @@ -1 +0,0 @@ -Corrected type annotation for dataclass attribute ``pstats.FunctionProfile.ncalls`` to be ``str``. diff --git a/Misc/NEWS.d/next/Library/2022-09-16-07-53-29.gh-issue-95865.oHjX0A.rst b/Misc/NEWS.d/next/Library/2022-09-16-07-53-29.gh-issue-95865.oHjX0A.rst deleted file mode 100644 index 03a5be722949a8..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-16-07-53-29.gh-issue-95865.oHjX0A.rst +++ /dev/null @@ -1,3 +0,0 @@ -Reduce :func:`urllib.parse.quote_from_bytes` memory use on large values. - -Contributed by Dennis Sweeney. diff --git a/Misc/NEWS.d/next/Library/2022-09-17-13-15-10.gh-issue-96819.6RfqM7.rst b/Misc/NEWS.d/next/Library/2022-09-17-13-15-10.gh-issue-96819.6RfqM7.rst deleted file mode 100644 index 07b62a883b85e7..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-17-13-15-10.gh-issue-96819.6RfqM7.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed check in :mod:`multiprocessing.resource_tracker` that guarantees that the length of a write to a pipe is not greater than ``PIPE_BUF``. diff --git a/Misc/NEWS.d/next/Library/2022-09-18-04-51-30.gh-issue-96704.DmamRX.rst b/Misc/NEWS.d/next/Library/2022-09-18-04-51-30.gh-issue-96704.DmamRX.rst deleted file mode 100644 index 6ac99197e685c1..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-18-04-51-30.gh-issue-96704.DmamRX.rst +++ /dev/null @@ -1 +0,0 @@ -Pass the correct ``contextvars.Context`` when a ``asyncio`` exception handler is called on behalf of a task or callback handle. This adds a new ``Task`` method, ``get_context``, and also a new ``Handle`` method with the same name. If this method is not found on a task object (perhaps because it is a third-party library that does not yet provide this method), the context prevailing at the time the exception handler is called is used. diff --git a/Misc/NEWS.d/next/Library/2022-09-22-11-50-29.gh-issue-85760.DETTPd.rst b/Misc/NEWS.d/next/Library/2022-09-22-11-50-29.gh-issue-85760.DETTPd.rst deleted file mode 100644 index af8ae2026f16d3..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-22-11-50-29.gh-issue-85760.DETTPd.rst +++ /dev/null @@ -1 +0,0 @@ -Fix race condition in :mod:`asyncio` where :meth:`~asyncio.SubprocessProtocol.process_exited` called before the :meth:`~asyncio.SubprocessProtocol.pipe_data_received` leading to inconsistent output. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-09-22-14-35-02.gh-issue-97005.yf21Q7.rst b/Misc/NEWS.d/next/Library/2022-09-22-14-35-02.gh-issue-97005.yf21Q7.rst deleted file mode 100644 index d57999aa29b70b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-22-14-35-02.gh-issue-97005.yf21Q7.rst +++ /dev/null @@ -1 +0,0 @@ -Update bundled libexpat to 2.4.9 diff --git a/Misc/NEWS.d/next/Library/2022-09-24-18-56-23.gh-issue-96865.o9WUkW.rst b/Misc/NEWS.d/next/Library/2022-09-24-18-56-23.gh-issue-96865.o9WUkW.rst deleted file mode 100644 index b054fdeee0785c..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-24-18-56-23.gh-issue-96865.o9WUkW.rst +++ /dev/null @@ -1,9 +0,0 @@ -fix Flag to use boundary CONFORM - -This restores previous Flag behavior of allowing flags with non-sequential values to be combined; e.g. - - class Skip(Flag): - TWO = 2 - EIGHT = 8 - - Skip.TWO | Skip.EIGHT -> diff --git a/Misc/NEWS.d/next/Library/2022-09-25-20-42-33.gh-issue-73588.uVtjEA.rst b/Misc/NEWS.d/next/Library/2022-09-25-20-42-33.gh-issue-73588.uVtjEA.rst deleted file mode 100644 index d8a0e690e29105..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-25-20-42-33.gh-issue-73588.uVtjEA.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix generation of the default name of :class:`tkinter.Checkbutton`. -Previously, checkbuttons in different parent widgets could have the same -short name and share the same state if arguments "name" and "variable" are -not specified. Now they are globally unique. diff --git a/Misc/NEWS.d/next/Library/2022-09-25-23-24-52.gh-issue-97545.HZLSNt.rst b/Misc/NEWS.d/next/Library/2022-09-25-23-24-52.gh-issue-97545.HZLSNt.rst deleted file mode 100644 index a53902ea670b73..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-25-23-24-52.gh-issue-97545.HZLSNt.rst +++ /dev/null @@ -1 +0,0 @@ -Make Semaphore run faster. diff --git a/Misc/NEWS.d/next/Library/2022-09-29-08-15-55.gh-issue-97639.JSjWYW.rst b/Misc/NEWS.d/next/Library/2022-09-29-08-15-55.gh-issue-97639.JSjWYW.rst deleted file mode 100644 index 65c3105f3bc36b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-29-08-15-55.gh-issue-97639.JSjWYW.rst +++ /dev/null @@ -1 +0,0 @@ -Remove ``tokenize.NL`` check from :mod:`tabnanny`. diff --git a/Misc/NEWS.d/next/Library/2022-09-29-23-22-24.gh-issue-97592.tpJg_J.rst b/Misc/NEWS.d/next/Library/2022-09-29-23-22-24.gh-issue-97592.tpJg_J.rst deleted file mode 100644 index aa245cf944004e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-29-23-22-24.gh-issue-97592.tpJg_J.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid a crash in the C version of :meth:`asyncio.Future.remove_done_callback` when an evil argument is passed. diff --git a/Misc/NEWS.d/next/Library/2022-09-30-15-56-20.gh-issue-96827.lzy1iw.rst b/Misc/NEWS.d/next/Library/2022-09-30-15-56-20.gh-issue-96827.lzy1iw.rst deleted file mode 100644 index 159ab32ffbfc34..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-09-30-15-56-20.gh-issue-96827.lzy1iw.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid spurious tracebacks from :mod:`asyncio` when default executor cleanup is delayed until after the event loop is closed (e.g. as the result of a keyboard interrupt). diff --git a/Misc/NEWS.d/next/Library/2022-10-03-14-42-13.gh-issue-97799.Y1iJvf.rst b/Misc/NEWS.d/next/Library/2022-10-03-14-42-13.gh-issue-97799.Y1iJvf.rst deleted file mode 100644 index 71097d29d3441b..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-03-14-42-13.gh-issue-97799.Y1iJvf.rst +++ /dev/null @@ -1,2 +0,0 @@ -:mod:`dataclass` now uses :func:`inspect.get_annotations` to examine the -annotations on class objects. diff --git a/Misc/NEWS.d/next/Library/2022-10-04-00-43-43.gh-issue-97008.3rjtt6.rst b/Misc/NEWS.d/next/Library/2022-10-04-00-43-43.gh-issue-97008.3rjtt6.rst deleted file mode 100644 index b41f88d07890d1..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-04-00-43-43.gh-issue-97008.3rjtt6.rst +++ /dev/null @@ -1,5 +0,0 @@ -:exc:`NameError` and :exc:`AttributeError` spelling suggestions provided -since :gh:`82711` are now also emitted by the pure Python -:mod:`traceback` module. Tests for those suggestions now exercise both -implementations to ensure they are equivalent. Patch by Carl Friedrich -Bolz-Tereick and Łukasz Langa. diff --git a/Misc/NEWS.d/next/Library/2022-10-04-07-55-19.gh-issue-97825.mNdv1l.rst b/Misc/NEWS.d/next/Library/2022-10-04-07-55-19.gh-issue-97825.mNdv1l.rst deleted file mode 100644 index 4633dce7b663e7..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-04-07-55-19.gh-issue-97825.mNdv1l.rst +++ /dev/null @@ -1 +0,0 @@ -Fixes :exc:`AttributeError` when :meth:`subprocess.check_output` is used with argument ``input=None`` and either of the arguments *encoding* or *errors* are used. diff --git a/Misc/NEWS.d/next/Library/2022-10-04-21-21-41.gh-issue-97837.19q-eg.rst b/Misc/NEWS.d/next/Library/2022-10-04-21-21-41.gh-issue-97837.19q-eg.rst deleted file mode 100644 index b1350c959e2b69..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-04-21-21-41.gh-issue-97837.19q-eg.rst +++ /dev/null @@ -1,7 +0,0 @@ -Change deprecate warning message in :mod:`unittest` from - -``It is deprecated to return a value!=None`` - -to - -``It is deprecated to return a value that is not None from a test case`` diff --git a/Misc/NEWS.d/next/Library/2022-10-16-18-52-00.gh-issue-97966.humlhz.rst b/Misc/NEWS.d/next/Library/2022-10-16-18-52-00.gh-issue-97966.humlhz.rst new file mode 100644 index 00000000000000..b725465ae4f0ef --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-10-16-18-52-00.gh-issue-97966.humlhz.rst @@ -0,0 +1,2 @@ +On ``uname_result``, restored expectation that ``_fields`` and ``_asdict`` +would include all six properties including ``processor``. diff --git a/Misc/NEWS.d/next/Library/2022-11-02-23-47-07.gh-issue-99029.7uCiIB.rst b/Misc/NEWS.d/next/Library/2022-11-02-23-47-07.gh-issue-99029.7uCiIB.rst new file mode 100644 index 00000000000000..0bfba5e1e32662 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-02-23-47-07.gh-issue-99029.7uCiIB.rst @@ -0,0 +1,2 @@ +:meth:`pathlib.PurePath.relative_to()` now treats naked Windows drive paths +as relative. This brings its behaviour in line with other parts of pathlib. diff --git a/Misc/NEWS.d/next/Library/2022-11-08-11-18-51.gh-issue-64490.VcBgrN.rst b/Misc/NEWS.d/next/Library/2022-11-08-11-18-51.gh-issue-64490.VcBgrN.rst new file mode 100644 index 00000000000000..f98c181cc9c54b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-08-11-18-51.gh-issue-64490.VcBgrN.rst @@ -0,0 +1 @@ +Fix refcount error when arguments are packed to tuple in Argument Clinic. diff --git a/Misc/NEWS.d/next/Library/2022-11-08-15-54-43.gh-issue-99240.MhYwcz.rst b/Misc/NEWS.d/next/Library/2022-11-08-15-54-43.gh-issue-99240.MhYwcz.rst new file mode 100644 index 00000000000000..0a4db052755f87 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-08-15-54-43.gh-issue-99240.MhYwcz.rst @@ -0,0 +1,2 @@ +Fix double-free bug in Argument Clinic ``str_converter`` by +extracting memory clean up to a new ``post_parsing`` section. diff --git a/Misc/NEWS.d/next/Library/2022-11-09-03-34-29.gh-issue-99201.lDJ7xI.rst b/Misc/NEWS.d/next/Library/2022-11-09-03-34-29.gh-issue-99201.lDJ7xI.rst new file mode 100644 index 00000000000000..6d03574fdaf5bf --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-09-03-34-29.gh-issue-99201.lDJ7xI.rst @@ -0,0 +1,2 @@ +Fix :exc:`IndexError` when initializing the config variables on Windows if +``HAVE_DYNAMIC_LOADING`` is not set. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-08-11-11-01-56.gh-issue-95818.iClLdl.rst b/Misc/NEWS.d/next/Library/2022-11-09-12-36-12.gh-issue-99284.9p4J2l.rst similarity index 50% rename from Misc/NEWS.d/next/Core and Builtins/2022-08-11-11-01-56.gh-issue-95818.iClLdl.rst rename to Misc/NEWS.d/next/Library/2022-11-09-12-36-12.gh-issue-99284.9p4J2l.rst index 1e243f5614f1ca..48576bd457aa0d 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2022-08-11-11-01-56.gh-issue-95818.iClLdl.rst +++ b/Misc/NEWS.d/next/Library/2022-11-09-12-36-12.gh-issue-99284.9p4J2l.rst @@ -1 +1,2 @@ -Skip over incomplete frames in :c:func:`PyThreadState_GetFrame`. +Remove ``_use_broken_old_ctypes_structure_semantics_`` +old untested and undocumented hack from :mod:`ctypes`. diff --git a/Misc/NEWS.d/next/Library/2022-11-13-02-06-56.gh-issue-99341.8-OlwB.rst b/Misc/NEWS.d/next/Library/2022-11-13-02-06-56.gh-issue-99341.8-OlwB.rst new file mode 100644 index 00000000000000..451561c579daff --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-13-02-06-56.gh-issue-99341.8-OlwB.rst @@ -0,0 +1,2 @@ +Fix :func:`ast.increment_lineno` to also cover :class:`ast.TypeIgnore` when +changing line numbers. diff --git a/Misc/NEWS.d/next/Library/2022-11-14-08-21-56.gh-issue-99388.UWSlwp.rst b/Misc/NEWS.d/next/Library/2022-11-14-08-21-56.gh-issue-99388.UWSlwp.rst new file mode 100644 index 00000000000000..f35799d454573e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-14-08-21-56.gh-issue-99388.UWSlwp.rst @@ -0,0 +1,2 @@ +Add *loop_factory* parameter to :func:`asyncio.run` to allow specifying a custom event loop factory. +Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2022-11-15-04-08-25.gh-issue-92647.cZcjnJ.rst b/Misc/NEWS.d/next/Library/2022-11-15-04-08-25.gh-issue-92647.cZcjnJ.rst new file mode 100644 index 00000000000000..c6e2a0ca25ff2a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-15-04-08-25.gh-issue-92647.cZcjnJ.rst @@ -0,0 +1 @@ +Use final status of an enum to determine lookup or creation branch of functional API. diff --git a/Misc/NEWS.d/next/Library/2022-11-15-10-55-24.gh-issue-97001.KeQuVF.rst b/Misc/NEWS.d/next/Library/2022-11-15-10-55-24.gh-issue-97001.KeQuVF.rst new file mode 100644 index 00000000000000..014161cf7b1d44 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-15-10-55-24.gh-issue-97001.KeQuVF.rst @@ -0,0 +1 @@ +Release the GIL when calling termios APIs to avoid blocking threads. diff --git a/Misc/NEWS.d/next/Library/2022-11-21-10-45-54.gh-issue-99508.QqVbby.rst b/Misc/NEWS.d/next/Library/2022-11-21-10-45-54.gh-issue-99508.QqVbby.rst new file mode 100644 index 00000000000000..82720d17bcafd3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-21-10-45-54.gh-issue-99508.QqVbby.rst @@ -0,0 +1,2 @@ +Fix ``TypeError`` in ``Lib/importlib/_bootstrap_external.py`` while calling +``_imp.source_hash()``. diff --git a/Misc/NEWS.d/next/Library/2022-11-21-13-49-03.gh-issue-99645.9w1QKq.rst b/Misc/NEWS.d/next/Library/2022-11-21-13-49-03.gh-issue-99645.9w1QKq.rst new file mode 100644 index 00000000000000..f6ee449891d9f6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-21-13-49-03.gh-issue-99645.9w1QKq.rst @@ -0,0 +1,3 @@ +Fix a bug in handling class cleanups in :class:`unittest.TestCase`. Now +``addClassCleanup()`` uses separate lists for different ``TestCase`` +subclasses, and ``doClassCleanups()`` only cleans up the particular class. diff --git a/Misc/NEWS.d/next/Library/2022-11-22-19-31-26.gh-issue-79033.MW6kHq.rst b/Misc/NEWS.d/next/Library/2022-11-22-19-31-26.gh-issue-79033.MW6kHq.rst new file mode 100644 index 00000000000000..4b12fd9c8d798f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-22-19-31-26.gh-issue-79033.MW6kHq.rst @@ -0,0 +1 @@ +Fix :func:`asyncio.Server.wait_closed` to actually do what the docs promise -- wait for all existing connections to complete, after closing the server. diff --git a/Misc/NEWS.d/next/Security/2022-04-27-18-25-30.gh-issue-68966.gjS8zs.rst b/Misc/NEWS.d/next/Security/2022-04-27-18-25-30.gh-issue-68966.gjS8zs.rst deleted file mode 100644 index da81a1f6993dbe..00000000000000 --- a/Misc/NEWS.d/next/Security/2022-04-27-18-25-30.gh-issue-68966.gjS8zs.rst +++ /dev/null @@ -1,4 +0,0 @@ -The deprecated mailcap module now refuses to inject unsafe text (filenames, -MIME types, parameters) into shell commands. Instead of using such text, it -will warn and act as if a match was not found (or for test commands, as if -the test failed). diff --git a/Misc/NEWS.d/next/Security/2022-05-19-08-53-07.gh-issue-92888.TLtR9W.rst b/Misc/NEWS.d/next/Security/2022-05-19-08-53-07.gh-issue-92888.TLtR9W.rst deleted file mode 100644 index 4841b8a90a0879..00000000000000 --- a/Misc/NEWS.d/next/Security/2022-05-19-08-53-07.gh-issue-92888.TLtR9W.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix ``memoryview`` use after free when accessing the backing buffer in certain cases. - diff --git a/Misc/NEWS.d/next/Security/2022-06-03-12-52-53.gh-issue-79096.YVoxgC.rst b/Misc/NEWS.d/next/Security/2022-06-03-12-52-53.gh-issue-79096.YVoxgC.rst deleted file mode 100644 index 9ec3335dc71b92..00000000000000 --- a/Misc/NEWS.d/next/Security/2022-06-03-12-52-53.gh-issue-79096.YVoxgC.rst +++ /dev/null @@ -1 +0,0 @@ -LWPCookieJar and MozillaCookieJar create files with file mode 600 instead of 644 (Microsoft Windows is not affected) diff --git a/Misc/NEWS.d/next/Security/2022-06-15-20-09-23.gh-issue-87389.QVaC3f.rst b/Misc/NEWS.d/next/Security/2022-06-15-20-09-23.gh-issue-87389.QVaC3f.rst deleted file mode 100644 index 029d437190deb5..00000000000000 --- a/Misc/NEWS.d/next/Security/2022-06-15-20-09-23.gh-issue-87389.QVaC3f.rst +++ /dev/null @@ -1,3 +0,0 @@ -:mod:`http.server`: Fix an open redirection vulnerability in the HTTP server -when an URI path starts with ``//``. Vulnerability discovered, and initial -fix proposed, by Hamza Avvan. diff --git a/Misc/NEWS.d/next/Security/2022-09-28-17-09-37.gh-issue-97616.K1e3Xs.rst b/Misc/NEWS.d/next/Security/2022-09-28-17-09-37.gh-issue-97616.K1e3Xs.rst deleted file mode 100644 index 721427fe646575..00000000000000 --- a/Misc/NEWS.d/next/Security/2022-09-28-17-09-37.gh-issue-97616.K1e3Xs.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix multiplying a list by an integer (``list *= int``): detect the integer -overflow when the new allocated length is close to the maximum size. Issue -reported by Jordan Limor. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Security/2022-11-11-12-50-28.gh-issue-87604.OtwH5L.rst b/Misc/NEWS.d/next/Security/2022-11-11-12-50-28.gh-issue-87604.OtwH5L.rst new file mode 100644 index 00000000000000..c931409b817122 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2022-11-11-12-50-28.gh-issue-87604.OtwH5L.rst @@ -0,0 +1,2 @@ +Avoid publishing list of active per-interpreter audit hooks via the +:mod:`gc` module diff --git a/Misc/NEWS.d/next/Tests/2022-03-14-23-28-17.bpo-47016.K-t2QX.rst b/Misc/NEWS.d/next/Tests/2022-03-14-23-28-17.bpo-47016.K-t2QX.rst deleted file mode 100644 index 774bfafc021efc..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-03-14-23-28-17.bpo-47016.K-t2QX.rst +++ /dev/null @@ -1,2 +0,0 @@ -Create a GitHub Actions workflow for verifying bundled pip and setuptools. -Patch by Illia Volochii and Adam Turner. diff --git a/Misc/NEWS.d/next/Tests/2022-05-08-15-40-41.gh-issue-92514.Xbf5JY.rst b/Misc/NEWS.d/next/Tests/2022-05-08-15-40-41.gh-issue-92514.Xbf5JY.rst deleted file mode 100644 index 6b82196a25e408..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-05-08-15-40-41.gh-issue-92514.Xbf5JY.rst +++ /dev/null @@ -1 +0,0 @@ -Remove unused ``test.support.BasicTestRunner``. Patch by Jelle Zijlstra. diff --git a/Misc/NEWS.d/next/Tests/2022-05-12-05-51-06.gh-issue-92670.7L43Z_.rst b/Misc/NEWS.d/next/Tests/2022-05-12-05-51-06.gh-issue-92670.7L43Z_.rst deleted file mode 100644 index c1349519e7c37c..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-05-12-05-51-06.gh-issue-92670.7L43Z_.rst +++ /dev/null @@ -1,3 +0,0 @@ -Skip ``test_shutil.TestCopy.test_copyfile_nonexistent_dir`` test on AIX as the test uses a trailing -slash to force the OS consider the path as a directory, but on AIX the -trailing slash has no effect and is considered as a file. diff --git a/Misc/NEWS.d/next/Tests/2022-05-25-23-00-35.gh-issue-92886.Y-vrWj.rst b/Misc/NEWS.d/next/Tests/2022-05-25-23-00-35.gh-issue-92886.Y-vrWj.rst deleted file mode 100644 index 93c1ffe33f28a6..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-05-25-23-00-35.gh-issue-92886.Y-vrWj.rst +++ /dev/null @@ -1 +0,0 @@ -Fixing tests that fail when running with optimizations (``-O``) in ``test_zipimport.py`` diff --git a/Misc/NEWS.d/next/Tests/2022-05-25-23-07-15.gh-issue-92886.Aki63_.rst b/Misc/NEWS.d/next/Tests/2022-05-25-23-07-15.gh-issue-92886.Aki63_.rst deleted file mode 100644 index 581f6bfea24b6e..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-05-25-23-07-15.gh-issue-92886.Aki63_.rst +++ /dev/null @@ -1 +0,0 @@ -Fixing tests that fail when running with optimizations (``-O``) in ``test_imaplib.py``. diff --git a/Misc/NEWS.d/next/Tests/2022-06-03-12-22-44.gh-issue-89858.ftBvjE.rst b/Misc/NEWS.d/next/Tests/2022-06-03-12-22-44.gh-issue-89858.ftBvjE.rst deleted file mode 100644 index ef806a93c018cc..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-03-12-22-44.gh-issue-89858.ftBvjE.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ``test_embed`` for out-of-tree builds. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Tests/2022-06-03-14-18-37.gh-issue-90473.7iXVRK.rst b/Misc/NEWS.d/next/Tests/2022-06-03-14-18-37.gh-issue-90473.7iXVRK.rst deleted file mode 100644 index a3165a01111fb9..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-03-14-18-37.gh-issue-90473.7iXVRK.rst +++ /dev/null @@ -1,2 +0,0 @@ -Skip symlink tests on WASI. wasmtime uses ``openat2(2)`` with -``RESOLVE_BENEATH`` flag, which prevents symlinks with absolute paths. diff --git a/Misc/NEWS.d/next/Tests/2022-06-03-16-26-04.gh-issue-57539.HxWgYO.rst b/Misc/NEWS.d/next/Tests/2022-06-03-16-26-04.gh-issue-57539.HxWgYO.rst deleted file mode 100644 index 0734b599f4ad95..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-03-16-26-04.gh-issue-57539.HxWgYO.rst +++ /dev/null @@ -1 +0,0 @@ -Increase calendar test coverage for :meth:`calendar.LocaleTextCalendar.formatweekday`. diff --git a/Misc/NEWS.d/next/Tests/2022-06-04-12-05-31.gh-issue-90473.RSpjF7.rst b/Misc/NEWS.d/next/Tests/2022-06-04-12-05-31.gh-issue-90473.RSpjF7.rst deleted file mode 100644 index 07d579995c0911..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-04-12-05-31.gh-issue-90473.RSpjF7.rst +++ /dev/null @@ -1 +0,0 @@ -Skip tests on WASI that require symlinks with absolute paths. diff --git a/Misc/NEWS.d/next/Tests/2022-06-05-10-16-45.gh-issue-90473.QMu7A8.rst b/Misc/NEWS.d/next/Tests/2022-06-05-10-16-45.gh-issue-90473.QMu7A8.rst deleted file mode 100644 index 6c76b7f4990e4f..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-05-10-16-45.gh-issue-90473.QMu7A8.rst +++ /dev/null @@ -1,2 +0,0 @@ -WASI does not have a ``chmod(2)`` syscall. :func:`os.chmod` is now a dummy -function on WASI. Skip all tests that depend on working :func:`os.chmod`. diff --git a/Misc/NEWS.d/next/Tests/2022-06-08-14-17-59.gh-issue-93575.Xb2LNB.rst b/Misc/NEWS.d/next/Tests/2022-06-08-14-17-59.gh-issue-93575.Xb2LNB.rst deleted file mode 100644 index 98d15328a087ae..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-08-14-17-59.gh-issue-93575.Xb2LNB.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix issue with test_unicode test_raiseMemError. The test case now use -``test.support.calcobjsize`` to calculate size of PyUnicode structs. -:func:`sys.getsizeof` may return different size when string has UTF-8 -memory. diff --git a/Misc/NEWS.d/next/Tests/2022-06-08-22-32-56.gh-issue-93616.e5Kkx2.rst b/Misc/NEWS.d/next/Tests/2022-06-08-22-32-56.gh-issue-93616.e5Kkx2.rst deleted file mode 100644 index de8ec52cc88b2a..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-08-22-32-56.gh-issue-93616.e5Kkx2.rst +++ /dev/null @@ -1,2 +0,0 @@ -``test_modulefinder`` now creates a temporary directory in -``ModuleFinderTest.setUp()`` instead of module scope. diff --git a/Misc/NEWS.d/next/Tests/2022-06-10-21-18-14.gh-issue-84461.9TAb26.rst b/Misc/NEWS.d/next/Tests/2022-06-10-21-18-14.gh-issue-84461.9TAb26.rst deleted file mode 100644 index 7cdf5bee721870..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-10-21-18-14.gh-issue-84461.9TAb26.rst +++ /dev/null @@ -1,2 +0,0 @@ -``run_tests.py`` now handles cross compiling env vars correctly and pass -``HOSTRUNNER`` to regression tests. diff --git a/Misc/NEWS.d/next/Tests/2022-06-16-17-50-58.gh-issue-93353.JdpATx.rst b/Misc/NEWS.d/next/Tests/2022-06-16-17-50-58.gh-issue-93353.JdpATx.rst deleted file mode 100644 index 4e232948f49ee7..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-16-17-50-58.gh-issue-93353.JdpATx.rst +++ /dev/null @@ -1,2 +0,0 @@ -regrtest now checks if a test leaks temporary files or directories if run -with -jN option. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tests/2022-06-16-21-38-18.gh-issue-93852.U_Hl6s.rst b/Misc/NEWS.d/next/Tests/2022-06-16-21-38-18.gh-issue-93852.U_Hl6s.rst deleted file mode 100644 index ce86eead02660c..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-16-21-38-18.gh-issue-93852.U_Hl6s.rst +++ /dev/null @@ -1,4 +0,0 @@ -test_asyncio, test_logging, test_socket and test_socketserver now create -AF_UNIX domains in the current directory to no longer fail with -``OSError("AF_UNIX path too long")`` if the temporary directory (the -:envvar:`TMPDIR` environment variable) is too long. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tests/2022-06-17-13-27-21.gh-issue-93884.5pvPvl.rst b/Misc/NEWS.d/next/Tests/2022-06-17-13-27-21.gh-issue-93884.5pvPvl.rst deleted file mode 100644 index ce389149aaae86..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-17-13-27-21.gh-issue-93884.5pvPvl.rst +++ /dev/null @@ -1 +0,0 @@ -Add test cases for :c:func:`PyNumber_ToBase` that take a large number or a non-int object as parameter. diff --git a/Misc/NEWS.d/next/Tests/2022-06-17-13-55-11.gh-issue-93957.X4ovYV.rst b/Misc/NEWS.d/next/Tests/2022-06-17-13-55-11.gh-issue-93957.X4ovYV.rst deleted file mode 100644 index 2719933f6b94c7..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-17-13-55-11.gh-issue-93957.X4ovYV.rst +++ /dev/null @@ -1,2 +0,0 @@ -Provide nicer error reporting from subprocesses in -test_venv.EnsurePipTest.test_with_pip. diff --git a/Misc/NEWS.d/next/Tests/2022-06-17-15-20-09.gh-issue-93951.CW1Vv4.rst b/Misc/NEWS.d/next/Tests/2022-06-17-15-20-09.gh-issue-93951.CW1Vv4.rst deleted file mode 100644 index b627466b4b221c..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-17-15-20-09.gh-issue-93951.CW1Vv4.rst +++ /dev/null @@ -1 +0,0 @@ -In test_bdb.StateTestCase.test_skip, avoid including auxiliary importers. diff --git a/Misc/NEWS.d/next/Tests/2022-06-20-23-04-52.gh-issue-93839.OE3Ybk.rst b/Misc/NEWS.d/next/Tests/2022-06-20-23-04-52.gh-issue-93839.OE3Ybk.rst deleted file mode 100644 index 121b64b13307ae..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-20-23-04-52.gh-issue-93839.OE3Ybk.rst +++ /dev/null @@ -1,2 +0,0 @@ -Move ``Lib/ctypes/test/`` to ``Lib/test/test_ctypes/``. Patch by Victor -Stinner. diff --git a/Misc/NEWS.d/next/Tests/2022-06-21-17-37-46.gh-issue-54781.BjVAVg.rst b/Misc/NEWS.d/next/Tests/2022-06-21-17-37-46.gh-issue-54781.BjVAVg.rst deleted file mode 100644 index f7ae7b976af95b..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-21-17-37-46.gh-issue-54781.BjVAVg.rst +++ /dev/null @@ -1,2 +0,0 @@ -Rename test_tk to test_tkinter, and rename test_ttk_guionly to test_ttk. -Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tests/2022-06-27-08-53-40.gh-issue-94315.MoZT9t.rst b/Misc/NEWS.d/next/Tests/2022-06-27-08-53-40.gh-issue-94315.MoZT9t.rst deleted file mode 100644 index 09d5d7e56ae835..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-27-08-53-40.gh-issue-94315.MoZT9t.rst +++ /dev/null @@ -1,2 +0,0 @@ -Tests now check for DAC override capability instead of relying on -:func:`os.geteuid`. diff --git a/Misc/NEWS.d/next/Tests/2022-06-27-21-27-20.gh-issue-94208.VR6HX-.rst b/Misc/NEWS.d/next/Tests/2022-06-27-21-27-20.gh-issue-94208.VR6HX-.rst deleted file mode 100644 index d0f970ad286b1d..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-06-27-21-27-20.gh-issue-94208.VR6HX-.rst +++ /dev/null @@ -1,2 +0,0 @@ -``test_ssl`` is now checking for supported TLS version and protocols in more -tests. diff --git a/Misc/NEWS.d/next/Tests/2022-07-05-17-53-13.gh-issue-91330.Qys5IL.rst b/Misc/NEWS.d/next/Tests/2022-07-05-17-53-13.gh-issue-91330.Qys5IL.rst deleted file mode 100644 index 6f44dcfd7e66a2..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-07-05-17-53-13.gh-issue-91330.Qys5IL.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added more tests for :mod:`dataclasses` to cover behavior with data -descriptor-based fields. diff --git a/Misc/NEWS.d/next/Tests/2022-07-08-12-22-00.gh-issue-94675.IiTs5f.rst b/Misc/NEWS.d/next/Tests/2022-07-08-12-22-00.gh-issue-94675.IiTs5f.rst deleted file mode 100644 index d0005d9f6014cd..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-07-08-12-22-00.gh-issue-94675.IiTs5f.rst +++ /dev/null @@ -1 +0,0 @@ -Add a regression test for :mod:`re` exponentional slowdown when using rjsmin. diff --git a/Misc/NEWS.d/next/Tests/2022-07-24-16-28-31.gh-issue-93963.UB9azu.rst b/Misc/NEWS.d/next/Tests/2022-07-24-16-28-31.gh-issue-93963.UB9azu.rst deleted file mode 100644 index 89ce124c55f226..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-07-24-16-28-31.gh-issue-93963.UB9azu.rst +++ /dev/null @@ -1 +0,0 @@ -Updated tests to use preferred location for ``importlib.resources`` ABCs. diff --git a/Misc/NEWS.d/next/Tests/2022-07-24-17-24-42.gh-issue-95218.zfBLtu.rst b/Misc/NEWS.d/next/Tests/2022-07-24-17-24-42.gh-issue-95218.zfBLtu.rst deleted file mode 100644 index 7326689657a7ac..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-07-24-17-24-42.gh-issue-95218.zfBLtu.rst +++ /dev/null @@ -1 +0,0 @@ -Move tests for importlib.resources into test_importlib.resources. diff --git a/Misc/NEWS.d/next/Tests/2022-07-24-20-19-05.gh-issue-95212.fHiU4e.rst b/Misc/NEWS.d/next/Tests/2022-07-24-20-19-05.gh-issue-95212.fHiU4e.rst deleted file mode 100644 index 44cea181cc1f1e..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-07-24-20-19-05.gh-issue-95212.fHiU4e.rst +++ /dev/null @@ -1,2 +0,0 @@ -Make multiprocessing test case ``test_shared_memory_recreate`` -parallel-safe. diff --git a/Misc/NEWS.d/next/Tests/2022-07-26-15-22-19.gh-issue-95280.h8HvbP.rst b/Misc/NEWS.d/next/Tests/2022-07-26-15-22-19.gh-issue-95280.h8HvbP.rst deleted file mode 100644 index 523d9d5f2f8bf3..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-07-26-15-22-19.gh-issue-95280.h8HvbP.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix problem with ``test_ssl`` ``test_get_ciphers`` on systems that require -perfect forward secrecy (PFS) ciphers. diff --git a/Misc/NEWS.d/next/Tests/2022-08-05-09-57-43.gh-issue-95573.edMdQB.rst b/Misc/NEWS.d/next/Tests/2022-08-05-09-57-43.gh-issue-95573.edMdQB.rst deleted file mode 100644 index 8580556965e1da..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-08-05-09-57-43.gh-issue-95573.edMdQB.rst +++ /dev/null @@ -1,6 +0,0 @@ -:source:`Lib/test/test_asyncio/test_ssl.py` exposed a bug in the macOS -kernel where intense concurrent load on non-blocking sockets occasionally -causes :const:`errno.ENOBUFS` ("No buffer space available") to be emitted. -FB11063974 filed with Apple, in the mean time as a workaround buffer size -used in tests on macOS is decreased to avoid intermittent failures. Patch -by Fantix King. diff --git a/Misc/NEWS.d/next/Tests/2022-08-22-14-59-42.gh-issue-95243.DeD66V.rst b/Misc/NEWS.d/next/Tests/2022-08-22-14-59-42.gh-issue-95243.DeD66V.rst deleted file mode 100644 index a9ca1f8b86764c..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-08-22-14-59-42.gh-issue-95243.DeD66V.rst +++ /dev/null @@ -1,3 +0,0 @@ -Mitigate the inherent race condition from using find_unused_port() in -testSockName() by trying to find an unused port a few times before failing. -Patch by Ross Burton. diff --git a/Misc/NEWS.d/next/Tests/2022-08-22-15-49-14.gh-issue-96002.4UE9UE.rst b/Misc/NEWS.d/next/Tests/2022-08-22-15-49-14.gh-issue-96002.4UE9UE.rst new file mode 100644 index 00000000000000..dc86e1d70f1289 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2022-08-22-15-49-14.gh-issue-96002.4UE9UE.rst @@ -0,0 +1 @@ +Add functional test for Argument Clinic. diff --git a/Misc/NEWS.d/next/Tests/2022-09-08-18-31-26.gh-issue-96624.5cANM1.rst b/Misc/NEWS.d/next/Tests/2022-09-08-18-31-26.gh-issue-96624.5cANM1.rst deleted file mode 100644 index 2d1bcc03e1c8b8..00000000000000 --- a/Misc/NEWS.d/next/Tests/2022-09-08-18-31-26.gh-issue-96624.5cANM1.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the failure of repeated runs of ``test.test_unittest`` caused by side effects in ``test_dotted_but_module_not_loaded``. diff --git a/Misc/NEWS.d/next/Tests/2022-11-21-19-21-30.gh-issue-99659.4gP0nm.rst b/Misc/NEWS.d/next/Tests/2022-11-21-19-21-30.gh-issue-99659.4gP0nm.rst new file mode 100644 index 00000000000000..3db1ec12b5202e --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2022-11-21-19-21-30.gh-issue-99659.4gP0nm.rst @@ -0,0 +1,3 @@ +Optional big memory tests in ``test_sqlite3`` now catch the correct +:exc:`sqlite.DataError` exception type in case of too large strings and/or +blobs passed. diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-06-19-14-56-33.gh-issue-86087.R8MkRy.rst b/Misc/NEWS.d/next/Tools-Demos/2022-06-19-14-56-33.gh-issue-86087.R8MkRy.rst deleted file mode 100644 index a10d4c76884522..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2022-06-19-14-56-33.gh-issue-86087.R8MkRy.rst +++ /dev/null @@ -1,2 +0,0 @@ -The ``Tools/scripts/parseentities.py`` script used to parse HTML4 entities -has been removed. diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-06-29-22-47-11.gh-issue-94430.hdov8L.rst b/Misc/NEWS.d/next/Tools-Demos/2022-06-29-22-47-11.gh-issue-94430.hdov8L.rst deleted file mode 100644 index 88aa8d08665319..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2022-06-29-22-47-11.gh-issue-94430.hdov8L.rst +++ /dev/null @@ -1,2 +0,0 @@ -Allow parameters named ``module`` and ``self`` with custom C names in Argument -Clinic. Patch by Erlend E. Aasland diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-07-04-01-37-42.gh-issue-94538.1rgy1Y.rst b/Misc/NEWS.d/next/Tools-Demos/2022-07-04-01-37-42.gh-issue-94538.1rgy1Y.rst deleted file mode 100644 index e39ae3950c0fe3..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2022-07-04-01-37-42.gh-issue-94538.1rgy1Y.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix Argument Clinic output to custom file destinations. Patch by Erlend E. -Aasland. diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-07-04-10-02-02.gh-issue-93939.U6sW6H.rst b/Misc/NEWS.d/next/Tools-Demos/2022-07-04-10-02-02.gh-issue-93939.U6sW6H.rst deleted file mode 100644 index a129d6800c360a..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2022-07-04-10-02-02.gh-issue-93939.U6sW6H.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add script ``Tools/scripts/check_modules.py`` to check and validate builtin -and shared extension modules. The script also handles ``Modules/Setup`` and -will eventually replace ``setup.py``. diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-08-10-17-08-43.gh-issue-95853.HCjC2m.rst b/Misc/NEWS.d/next/Tools-Demos/2022-08-10-17-08-43.gh-issue-95853.HCjC2m.rst deleted file mode 100644 index c38db3af425e51..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2022-08-10-17-08-43.gh-issue-95853.HCjC2m.rst +++ /dev/null @@ -1,2 +0,0 @@ -The new tool ``Tools/wasm/wasm_builder.py`` automates configure, compile, and -test steps for building CPython on WebAssembly platforms. diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-08-11-09-58-15.gh-issue-64490.PjwhM4.rst b/Misc/NEWS.d/next/Tools-Demos/2022-08-11-09-58-15.gh-issue-64490.PjwhM4.rst new file mode 100644 index 00000000000000..4a308a9306055c --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2022-08-11-09-58-15.gh-issue-64490.PjwhM4.rst @@ -0,0 +1,7 @@ +Argument Clinic varargs bugfixes + +* Fix out-of-bounds error in :c:func:`!_PyArg_UnpackKeywordsWithVararg`. +* Fix incorrect check which allowed more than one varargs in clinic.py. +* Fix miscalculation of ``noptargs`` in generated code. +* Do not generate ``noptargs`` when there is a vararg argument and no optional argument. + diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-08-29-17-25-13.gh-issue-95853.Ce17cT.rst b/Misc/NEWS.d/next/Tools-Demos/2022-08-29-17-25-13.gh-issue-95853.Ce17cT.rst deleted file mode 100644 index 1cd1ce14fac08c..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2022-08-29-17-25-13.gh-issue-95853.Ce17cT.rst +++ /dev/null @@ -1,2 +0,0 @@ -The ``wasm_build.py`` script now pre-builds Emscripten ports, checks for -broken EMSDK versions, and warns about pkg-config env vars. diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-09-30-14-30-12.gh-issue-97669.gvbgcg.rst b/Misc/NEWS.d/next/Tools-Demos/2022-09-30-14-30-12.gh-issue-97669.gvbgcg.rst deleted file mode 100644 index 604f20290fc8c2..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2022-09-30-14-30-12.gh-issue-97669.gvbgcg.rst +++ /dev/null @@ -1,3 +0,0 @@ -Remove outdated example scripts of the ``Tools/scripts/`` directory. A copy can -be found in the `old-demos project `_. -Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-09-30-18-35-11.gh-issue-97681.-KO1Ba.rst b/Misc/NEWS.d/next/Tools-Demos/2022-09-30-18-35-11.gh-issue-97681.-KO1Ba.rst deleted file mode 100644 index 6f1ec12ce0c365..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2022-09-30-18-35-11.gh-issue-97681.-KO1Ba.rst +++ /dev/null @@ -1,3 +0,0 @@ -Remove the ``Tools/demo/`` directory which contained old demo scripts. A copy -can be found in the `old-demos project -`_. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Windows/2020-01-10-23-33-03.bpo-38704.2Idtdn.rst b/Misc/NEWS.d/next/Windows/2020-01-10-23-33-03.bpo-38704.2Idtdn.rst deleted file mode 100644 index 519338f27a47ae..00000000000000 --- a/Misc/NEWS.d/next/Windows/2020-01-10-23-33-03.bpo-38704.2Idtdn.rst +++ /dev/null @@ -1 +0,0 @@ -Prevent installation on unsupported Windows versions. diff --git a/Misc/NEWS.d/next/Windows/2020-06-06-15-10-37.bpo-40882.UvNbdj.rst b/Misc/NEWS.d/next/Windows/2020-06-06-15-10-37.bpo-40882.UvNbdj.rst new file mode 100644 index 00000000000000..2670aeef9a2525 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-06-06-15-10-37.bpo-40882.UvNbdj.rst @@ -0,0 +1,2 @@ +Fix a memory leak in :class:`multiprocessing.shared_memory.SharedMemory` on +Windows. diff --git a/Misc/NEWS.d/next/Windows/2022-03-20-15-47-35.bpo-42658.16eXtb.rst b/Misc/NEWS.d/next/Windows/2022-03-20-15-47-35.bpo-42658.16eXtb.rst deleted file mode 100644 index 852cc77676a31d..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-03-20-15-47-35.bpo-42658.16eXtb.rst +++ /dev/null @@ -1,3 +0,0 @@ -Support native Windows case-insensitive path comparisons by using -``LCMapStringEx`` instead of :func:`str.lower` in :func:`ntpath.normcase`. -Add ``LCMapStringEx`` to the :mod:`_winapi` module. diff --git a/Misc/NEWS.d/next/Windows/2022-04-12-18-35-20.gh-issue-91061.x40hSK.rst b/Misc/NEWS.d/next/Windows/2022-04-12-18-35-20.gh-issue-91061.x40hSK.rst deleted file mode 100644 index aadc3034420251..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-04-12-18-35-20.gh-issue-91061.x40hSK.rst +++ /dev/null @@ -1 +0,0 @@ -Accept os.PathLike for the argument to winsound.PlaySound diff --git a/Misc/NEWS.d/next/Windows/2022-05-05-06-27-59.bpo-46907.IW-uvT.rst b/Misc/NEWS.d/next/Windows/2022-05-05-06-27-59.bpo-46907.IW-uvT.rst deleted file mode 100644 index 420fbd210ef199..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-05-05-06-27-59.bpo-46907.IW-uvT.rst +++ /dev/null @@ -1 +0,0 @@ -Update Windows installer to use SQLite 3.38.4. diff --git a/Misc/NEWS.d/next/Windows/2022-05-16-11-45-06.gh-issue-92841.NQx107.rst b/Misc/NEWS.d/next/Windows/2022-05-16-11-45-06.gh-issue-92841.NQx107.rst deleted file mode 100644 index 5e1897e6ba1bc6..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-05-16-11-45-06.gh-issue-92841.NQx107.rst +++ /dev/null @@ -1,2 +0,0 @@ -:mod:`asyncio` no longer throws ``RuntimeError: Event loop is closed`` on -interpreter exit after asynchronous socket activity. Patch by Oleg Iarygin. diff --git a/Misc/NEWS.d/next/Windows/2022-05-19-14-01-30.gh-issue-92984.Dsxnlr.rst b/Misc/NEWS.d/next/Windows/2022-05-19-14-01-30.gh-issue-92984.Dsxnlr.rst deleted file mode 100644 index 2d3071b00b7618..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-05-19-14-01-30.gh-issue-92984.Dsxnlr.rst +++ /dev/null @@ -1 +0,0 @@ -Explicitly disable incremental linking for non-Debug builds diff --git a/Misc/NEWS.d/next/Windows/2022-05-19-21-44-25.gh-issue-92817.Jrf-Kv.rst b/Misc/NEWS.d/next/Windows/2022-05-19-21-44-25.gh-issue-92817.Jrf-Kv.rst deleted file mode 100644 index 16acba1d6274c9..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-05-19-21-44-25.gh-issue-92817.Jrf-Kv.rst +++ /dev/null @@ -1,3 +0,0 @@ -Ensures that :file:`py.exe` will prefer an active virtual environment over -default tags specified with environment variables or through a -:file:`py.ini` file. diff --git a/Misc/NEWS.d/next/Windows/2022-05-28-19-36-13.gh-issue-43414.NGMJ3g.rst b/Misc/NEWS.d/next/Windows/2022-05-28-19-36-13.gh-issue-43414.NGMJ3g.rst deleted file mode 100644 index 553e8762d5d1e6..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-05-28-19-36-13.gh-issue-43414.NGMJ3g.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`os.get_terminal_size` now attempts to read the size from any provided -handle, rather than only supporting file descriptors 0, 1 and 2. diff --git a/Misc/NEWS.d/next/Windows/2022-06-15-01-03-52.gh-issue-93824.mR4mxu.rst b/Misc/NEWS.d/next/Windows/2022-06-15-01-03-52.gh-issue-93824.mR4mxu.rst deleted file mode 100644 index cabe983847c891..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-06-15-01-03-52.gh-issue-93824.mR4mxu.rst +++ /dev/null @@ -1,2 +0,0 @@ -Drag and drop of files onto Python files in Windows Explorer has been -enabled for Windows ARM64. diff --git a/Misc/NEWS.d/next/Windows/2022-06-20-22-32-14.gh-issue-94018.bycC3A.rst b/Misc/NEWS.d/next/Windows/2022-06-20-22-32-14.gh-issue-94018.bycC3A.rst deleted file mode 100644 index a2e558b3b4b3af..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-06-20-22-32-14.gh-issue-94018.bycC3A.rst +++ /dev/null @@ -1 +0,0 @@ -:mod:`zipfile` will now remove trailing spaces from path components when extracting files on Windows. diff --git a/Misc/NEWS.d/next/Windows/2022-07-12-20-45-43.gh-issue-94772.uNMmdG.rst b/Misc/NEWS.d/next/Windows/2022-07-12-20-45-43.gh-issue-94772.uNMmdG.rst deleted file mode 100644 index bb5ab754484eb3..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-07-12-20-45-43.gh-issue-94772.uNMmdG.rst +++ /dev/null @@ -1 +0,0 @@ -Fix incorrect handling of shebang lines in py.exe launcher diff --git a/Misc/NEWS.d/next/Windows/2022-07-16-16-18-32.gh-issue-90844.vwITT3.rst b/Misc/NEWS.d/next/Windows/2022-07-16-16-18-32.gh-issue-90844.vwITT3.rst deleted file mode 100644 index 8d9e850a5b5b7c..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-07-16-16-18-32.gh-issue-90844.vwITT3.rst +++ /dev/null @@ -1,2 +0,0 @@ -Allow virtual environments to correctly launch when they have spaces in the -path. diff --git a/Misc/NEWS.d/next/Windows/2022-07-26-20-33-12.gh-issue-95285.w6fa22.rst b/Misc/NEWS.d/next/Windows/2022-07-26-20-33-12.gh-issue-95285.w6fa22.rst deleted file mode 100644 index 76b38e7803ccac..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-07-26-20-33-12.gh-issue-95285.w6fa22.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :ref:`launcher` handling of command lines where it is only passed a -short executable name. diff --git a/Misc/NEWS.d/next/Windows/2022-07-28-20-21-38.gh-issue-95359.ywMrgu.rst b/Misc/NEWS.d/next/Windows/2022-07-28-20-21-38.gh-issue-95359.ywMrgu.rst deleted file mode 100644 index 513a2be28a9c20..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-07-28-20-21-38.gh-issue-95359.ywMrgu.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix :ref:`launcher` handling of :file:`py.ini` commands (it was incorrectly -expecting a ``py_`` prefix on keys) and crashes when reading per-user -configuration file. diff --git a/Misc/NEWS.d/next/Windows/2022-07-30-14-18-33.gh-issue-95445.mjrTaq.rst b/Misc/NEWS.d/next/Windows/2022-07-30-14-18-33.gh-issue-95445.mjrTaq.rst deleted file mode 100644 index 565489ebf909d5..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-07-30-14-18-33.gh-issue-95445.mjrTaq.rst +++ /dev/null @@ -1 +0,0 @@ -Fixes the unsuccessful removal of the HTML document directory when uninstalling with Windows msi. diff --git a/Misc/NEWS.d/next/Windows/2022-08-03-00-49-46.gh-issue-94399.KvxHc0.rst b/Misc/NEWS.d/next/Windows/2022-08-03-00-49-46.gh-issue-94399.KvxHc0.rst deleted file mode 100644 index a49e99ca266526..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-08-03-00-49-46.gh-issue-94399.KvxHc0.rst +++ /dev/null @@ -1,3 +0,0 @@ -Restores the behaviour of :ref:`launcher` for ``/usr/bin/env`` shebang -lines, which will now search :envvar:`PATH` for an executable matching the -given command. If none is found, the usual search process is used. diff --git a/Misc/NEWS.d/next/Windows/2022-08-04-01-12-27.gh-issue-95587.Fvdv5q.rst b/Misc/NEWS.d/next/Windows/2022-08-04-01-12-27.gh-issue-95587.Fvdv5q.rst deleted file mode 100644 index 1033e892876c91..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-08-04-01-12-27.gh-issue-95587.Fvdv5q.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixes some issues where the Windows installer would incorrectly detect -certain features of an existing install when upgrading. diff --git a/Misc/NEWS.d/next/Windows/2022-08-04-18-47-54.gh-issue-95656.VJ1d13.rst b/Misc/NEWS.d/next/Windows/2022-08-04-18-47-54.gh-issue-95656.VJ1d13.rst deleted file mode 100644 index 77fea4c33f7a92..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-08-04-18-47-54.gh-issue-95656.VJ1d13.rst +++ /dev/null @@ -1,2 +0,0 @@ -Enable the :meth:`~sqlite3.Connection.enable_load_extension` :mod:`sqlite3` -API. diff --git a/Misc/NEWS.d/next/Windows/2022-08-10-22-46-48.gh-issue-95733.2_urOp.rst b/Misc/NEWS.d/next/Windows/2022-08-10-22-46-48.gh-issue-95733.2_urOp.rst deleted file mode 100644 index 996209211690eb..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-08-10-22-46-48.gh-issue-95733.2_urOp.rst +++ /dev/null @@ -1,2 +0,0 @@ -Make certain requirements of the Windows Store package optional to allow -installing on earlier updates of Windows. diff --git a/Misc/NEWS.d/next/Windows/2022-08-26-00-11-18.gh-issue-89545.zmJMY_.rst b/Misc/NEWS.d/next/Windows/2022-08-26-00-11-18.gh-issue-89545.zmJMY_.rst deleted file mode 100644 index eeedbf9d6fa8f6..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-08-26-00-11-18.gh-issue-89545.zmJMY_.rst +++ /dev/null @@ -1 +0,0 @@ -Updates :mod:`platform` code getting the Windows version to use native Windows Management Instrumentation (WMI) queries to determine OS version, type, and architecture. diff --git a/Misc/NEWS.d/next/Windows/2022-08-30-12-01-51.gh-issue-94781.OxO-Gr.rst b/Misc/NEWS.d/next/Windows/2022-08-30-12-01-51.gh-issue-94781.OxO-Gr.rst deleted file mode 100644 index d343173d40d606..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-08-30-12-01-51.gh-issue-94781.OxO-Gr.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :file:`pcbuild.proj` to clean previous instances of ouput files in ``Python\deepfreeze`` and -``Python\frozen_modules`` directories on Windows. Patch by Charlie Zhao. diff --git a/Misc/NEWS.d/next/Windows/2022-09-05-18-32-47.gh-issue-96559.561sUd.rst b/Misc/NEWS.d/next/Windows/2022-09-05-18-32-47.gh-issue-96559.561sUd.rst deleted file mode 100644 index 275617648f9219..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-09-05-18-32-47.gh-issue-96559.561sUd.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixes the Windows launcher not using the compatible interpretation of -default tags found in configuration files when no tag was passed to the -command. diff --git a/Misc/NEWS.d/next/Windows/2022-09-07-00-11-33.gh-issue-96577.kV4K_1.rst b/Misc/NEWS.d/next/Windows/2022-09-07-00-11-33.gh-issue-96577.kV4K_1.rst deleted file mode 100644 index 6025e5ce413042..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-09-07-00-11-33.gh-issue-96577.kV4K_1.rst +++ /dev/null @@ -1 +0,0 @@ -Fixes a potential buffer overrun in :mod:`msilib`. diff --git a/Misc/NEWS.d/next/Windows/2022-09-23-15-40-04.gh-issue-96965.CsnEGs.rst b/Misc/NEWS.d/next/Windows/2022-09-23-15-40-04.gh-issue-96965.CsnEGs.rst deleted file mode 100644 index de7aff0563e553..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-09-23-15-40-04.gh-issue-96965.CsnEGs.rst +++ /dev/null @@ -1 +0,0 @@ -Update libffi to 3.4.3 diff --git a/Misc/NEWS.d/next/Windows/2022-09-29-22-27-04.gh-issue-97649.bI7OQU.rst b/Misc/NEWS.d/next/Windows/2022-09-29-22-27-04.gh-issue-97649.bI7OQU.rst deleted file mode 100644 index 118f9df7c89d2f..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-09-29-22-27-04.gh-issue-97649.bI7OQU.rst +++ /dev/null @@ -1 +0,0 @@ -The ``Tools`` directory is no longer installed on Windows diff --git a/Misc/NEWS.d/next/Windows/2022-09-29-23-08-49.gh-issue-90989.no89Q2.rst b/Misc/NEWS.d/next/Windows/2022-09-29-23-08-49.gh-issue-90989.no89Q2.rst deleted file mode 100644 index 786b3435042dbd..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-09-29-23-08-49.gh-issue-90989.no89Q2.rst +++ /dev/null @@ -1,2 +0,0 @@ -Made :ref:`launcher` install per-user by default (unless an all users -install already exists), and clarify some text in the installer. diff --git a/Misc/NEWS.d/next/Windows/2022-10-02-11-59-23.gh-issue-97728.dIdlPE.rst b/Misc/NEWS.d/next/Windows/2022-10-02-11-59-23.gh-issue-97728.dIdlPE.rst deleted file mode 100644 index 2a6a253a52aeef..00000000000000 --- a/Misc/NEWS.d/next/Windows/2022-10-02-11-59-23.gh-issue-97728.dIdlPE.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix possible crashes caused by the use of uninitialized variables when pass -invalid arguments in :func:`os.system` on Windows and in Windows-specific -modules (like ``winreg``). diff --git a/Misc/NEWS.d/next/Windows/2022-11-16-19-03-21.gh-issue-99442.6Dgk3Q.rst b/Misc/NEWS.d/next/Windows/2022-11-16-19-03-21.gh-issue-99442.6Dgk3Q.rst new file mode 100644 index 00000000000000..8e19366c429715 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2022-11-16-19-03-21.gh-issue-99442.6Dgk3Q.rst @@ -0,0 +1,2 @@ +Fix handling in :ref:`launcher` when ``argv[0]`` does not include a file +extension. diff --git a/Misc/NEWS.d/next/Windows/2022-11-21-19-50-18.gh-issue-98629.tMmB_B.rst b/Misc/NEWS.d/next/Windows/2022-11-21-19-50-18.gh-issue-98629.tMmB_B.rst new file mode 100644 index 00000000000000..46cbf998eb2001 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2022-11-21-19-50-18.gh-issue-98629.tMmB_B.rst @@ -0,0 +1 @@ +Fix initialization of :data:`sys.version` and ``sys._git`` on Windows diff --git a/Misc/NEWS.d/next/Windows/2022-11-23-17-17-16.gh-issue-99345.jOa3-f.rst b/Misc/NEWS.d/next/Windows/2022-11-23-17-17-16.gh-issue-99345.jOa3-f.rst new file mode 100644 index 00000000000000..99db0c55a67eed --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2022-11-23-17-17-16.gh-issue-99345.jOa3-f.rst @@ -0,0 +1,2 @@ +Use faster initialization functions to detect install location for Windows +Store package diff --git a/Misc/NEWS.d/next/macOS/2022-11-01-10-32-23.gh-issue-98940.W3YzC_.rst b/Misc/NEWS.d/next/macOS/2022-11-01-10-32-23.gh-issue-98940.W3YzC_.rst new file mode 100644 index 00000000000000..18ef0b0e252322 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2022-11-01-10-32-23.gh-issue-98940.W3YzC_.rst @@ -0,0 +1 @@ +Fix ``Mac/Extras.install.py`` file filter bug. diff --git a/Misc/stable_abi.toml b/Misc/stable_abi.toml index a8920d999e818f..aa12bcc85cebc7 100644 --- a/Misc/stable_abi.toml +++ b/Misc/stable_abi.toml @@ -2,7 +2,7 @@ # Please append new items at the end. # The syntax of this file is not fixed. -# It is designed to be read only by Tools/stable_abi.py, which can change +# It is designed to be read only by Tools/build/stable_abi.py, which can change # without notice. # For the history of the stable ABI prior to this file, @@ -94,7 +94,7 @@ added = '3.2' struct_abi_kind = 'full-abi' [struct.PyMemberDef] - added = '3.2' + added = '3.2' # Before 3.12, PyMemberDef required #include "structmember.h" struct_abi_kind = 'full-abi' [struct.PyGetSetDef] added = '3.2' @@ -1777,11 +1777,9 @@ added = '3.2' abi_only = true [function.PyMember_GetOne] - added = '3.2' - abi_only = true + added = '3.2' # Before 3.12, available in "structmember.h" [function.PyMember_SetOne] - added = '3.2' - abi_only = true + added = '3.2' # Before 3.12, available in "structmember.h" # TLS api is deprecated; superseded by TSS API @@ -2293,3 +2291,54 @@ added = '3.12' [typedef.vectorcallfunc] added = '3.12' +[function.PyObject_Vectorcall] + added = '3.12' +[function.PyObject_VectorcallMethod] + added = '3.12' +[macro.PY_VECTORCALL_ARGUMENTS_OFFSET] + added = '3.12' +[typedef.getbufferproc] + added = '3.12' +[typedef.releasebufferproc] + added = '3.12' + +[const.Py_T_BYTE] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_SHORT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_INT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_LONG] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_LONGLONG] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_UBYTE] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_UINT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_USHORT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_ULONG] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_ULONGLONG] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_PYSSIZET] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_FLOAT] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_DOUBLE] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_BOOL] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_STRING] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_STRING_INPLACE] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_CHAR] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_T_OBJECT_EX] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_READONLY] + added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix +[const.Py_AUDIT_READ] + added = '3.12' # Before 3.12, available in "structmember.h" diff --git a/Modules/Setup b/Modules/Setup index 8fe79512787cde..e3a82975b5ff5e 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -291,6 +291,7 @@ PYTHONPATH=$(COREPYTHONPATH) #_testcapi _testcapimodule.c #_testimportmultiple _testimportmultiple.c #_testmultiphase _testmultiphase.c +#_testsinglephase _testsinglephase.c # --- # Uncommenting the following line tells makesetup that all following modules diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index ac8959ebea5bf2..d64752e8ca9609 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -169,12 +169,14 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c _testcapi/getargs.c _testcapi/pytime.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c +@MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c # Some testing modules MUST be built as shared libraries. *shared* @MODULE__TESTIMPORTMULTIPLE_TRUE@_testimportmultiple _testimportmultiple.c @MODULE__TESTMULTIPHASE_TRUE@_testmultiphase _testmultiphase.c +@MODULE__TESTMULTIPHASE_TRUE@_testsinglephase _testsinglephase.c @MODULE__CTYPES_TEST_TRUE@_ctypes_test _ctypes/_ctypes_test.c # Limited API template modules; must be built as shared modules. diff --git a/Modules/_abc.c b/Modules/_abc.c index b22daa81e3ae19..e146d4fd0cac39 100644 --- a/Modules/_abc.c +++ b/Modules/_abc.c @@ -524,8 +524,7 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass) } int result = PyObject_IsSubclass(subclass, self); if (result > 0) { - Py_INCREF(subclass); - return subclass; /* Already a subclass. */ + return Py_NewRef(subclass); /* Already a subclass. */ } if (result < 0) { return NULL; @@ -561,8 +560,7 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass) set_collection_flag_recursive((PyTypeObject *)subclass, collection_flag); } } - Py_INCREF(subclass); - return subclass; + return Py_NewRef(subclass); } @@ -598,8 +596,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self, goto end; } if (incache > 0) { - result = Py_True; - Py_INCREF(result); + result = Py_NewRef(Py_True); goto end; } subtype = (PyObject *)Py_TYPE(instance); @@ -610,8 +607,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self, goto end; } if (incache > 0) { - result = Py_False; - Py_INCREF(result); + result = Py_NewRef(Py_False); goto end; } } @@ -628,8 +624,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self, switch (PyObject_IsTrue(result)) { case -1: - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); break; case 0: Py_DECREF(result); @@ -802,8 +797,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self, end: Py_DECREF(impl); Py_XDECREF(subclasses); - Py_XINCREF(result); - return result; + return Py_XNewRef(result); } @@ -842,8 +836,7 @@ subclasscheck_check_registry(_abc_data *impl, PyObject *subclass, Py_ssize_t i = 0; while (_PySet_NextEntry(impl->_abc_registry, &pos, &key, &hash)) { - Py_INCREF(key); - copy[i++] = key; + copy[i++] = Py_NewRef(key); } assert(i == registry_size); diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index efa0d2d6906eab..3edd8a9b2dbf91 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -1,12 +1,13 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" -#include "pycore_pyerrors.h" // _PyErr_ClearExcState() -#include "pycore_pystate.h" // _PyThreadState_GET() -#include // offsetof() +#include "pycore_pyerrors.h" // _PyErr_ClearExcState() +#include "pycore_pystate.h" // _PyThreadState_GET() +#include "pycore_runtime.h" // _PyRuntime +#include "pycore_global_objects.h" // _Py_ID() +#include // offsetof() /*[clinic input] @@ -15,16 +16,6 @@ module _asyncio /*[clinic end generated code: output=da39a3ee5e6b4b0d input=8fd17862aa989c69]*/ -/* identifiers used from some functions */ -_Py_IDENTIFIER(__asyncio_running_event_loop__); -_Py_IDENTIFIER(_asyncio_future_blocking); -_Py_IDENTIFIER(add_done_callback); -_Py_IDENTIFIER(call_soon); -_Py_IDENTIFIER(cancel); -_Py_IDENTIFIER(get_event_loop); -_Py_IDENTIFIER(throw); - - /* State of the _asyncio module */ static PyObject *asyncio_mod; static PyObject *traceback_extract_stack; @@ -205,17 +196,14 @@ get_future_loop(PyObject *fut) { /* Implementation of `asyncio.futures._get_loop` */ - _Py_IDENTIFIER(get_loop); - _Py_IDENTIFIER(_loop); PyObject *getloop; if (Future_CheckExact(fut) || Task_CheckExact(fut)) { PyObject *loop = ((FutureObj *)fut)->fut_loop; - Py_INCREF(loop); - return loop; + return Py_NewRef(loop); } - if (_PyObject_LookupAttrId(fut, &PyId_get_loop, &getloop) < 0) { + if (_PyObject_LookupAttr(fut, &_Py_ID(get_loop), &getloop) < 0) { return NULL; } if (getloop != NULL) { @@ -224,7 +212,7 @@ get_future_loop(PyObject *fut) return res; } - return _PyObject_GetAttrId(fut, &PyId__loop); + return PyObject_GetAttr(fut, &_Py_ID(_loop)); } @@ -245,8 +233,8 @@ get_running_loop(PyObject **loop) goto not_found; } - rl = _PyDict_GetItemIdWithError( - ts_dict, &PyId___asyncio_running_event_loop__); // borrowed + rl = PyDict_GetItemWithError( + ts_dict, &_Py_ID(__asyncio_running_event_loop__)); // borrowed if (rl == NULL) { if (PyErr_Occurred()) { goto error; @@ -276,8 +264,7 @@ get_running_loop(PyObject **loop) } #endif - Py_INCREF(running_loop); - *loop = running_loop; + *loop = Py_NewRef(running_loop); return 0; not_found: @@ -311,8 +298,8 @@ set_running_loop(PyObject *loop) return -1; } - if (_PyDict_SetItemId( - ts_dict, &PyId___asyncio_running_event_loop__, (PyObject *)rl) < 0) + if (PyDict_SetItem( + ts_dict, &_Py_ID(__asyncio_running_event_loop__), (PyObject *)rl) < 0) { Py_DECREF(rl); // will cleanup loop & current_pid return -1; @@ -351,7 +338,7 @@ get_event_loop(int stacklevel) return NULL; } - loop = _PyObject_CallMethodIdNoArgs(policy, &PyId_get_event_loop); + loop = PyObject_CallMethodNoArgs(policy, &_Py_ID(get_event_loop)); Py_DECREF(policy); return loop; } @@ -365,13 +352,13 @@ call_soon(PyObject *loop, PyObject *func, PyObject *arg, PyObject *ctx) Py_ssize_t nargs; if (ctx == NULL) { - handle = _PyObject_CallMethodIdObjArgs( - loop, &PyId_call_soon, func, arg, NULL); + handle = PyObject_CallMethodObjArgs( + loop, &_Py_ID(call_soon), func, arg, NULL); } else { /* Use FASTCALL to pass a keyword-only argument to call_soon */ - PyObject *callable = _PyObject_GetAttrId(loop, &PyId_call_soon); + PyObject *callable = PyObject_GetAttr(loop, &_Py_ID(call_soon)); if (callable == NULL) { return -1; } @@ -486,7 +473,6 @@ future_init(FutureObj *fut, PyObject *loop) { PyObject *res; int is_true; - _Py_IDENTIFIER(get_debug); // Same to FutureObj_clear() but not clearing fut->dict Py_CLEAR(fut->fut_loop); @@ -515,7 +501,7 @@ future_init(FutureObj *fut, PyObject *loop) } fut->fut_loop = loop; - res = _PyObject_CallMethodIdNoArgs(fut->fut_loop, &PyId_get_debug); + res = PyObject_CallMethodNoArgs(fut->fut_loop, &_Py_ID(get_debug)); if (res == NULL) { return -1; } @@ -553,8 +539,7 @@ future_set_result(FutureObj *fut, PyObject *res) } assert(!fut->fut_result); - Py_INCREF(res); - fut->fut_result = res; + fut->fut_result = Py_NewRef(res); fut->fut_state = STATE_FINISHED; if (future_schedule_callbacks(fut) == -1) { @@ -585,8 +570,7 @@ future_set_exception(FutureObj *fut, PyObject *exc) } } else { - exc_val = exc; - Py_INCREF(exc_val); + exc_val = Py_NewRef(exc); } if (!PyExceptionInstance_Check(exc_val)) { Py_DECREF(exc_val); @@ -667,14 +651,12 @@ future_get_result(FutureObj *fut, PyObject **result) if (PyException_SetTraceback(fut->fut_exception, tb) < 0) { return -1; } - Py_INCREF(fut->fut_exception); - *result = fut->fut_exception; + *result = Py_NewRef(fut->fut_exception); Py_CLEAR(fut->fut_exception_tb); return 1; } - Py_INCREF(fut->fut_result); - *result = fut->fut_result; + *result = Py_NewRef(fut->fut_result); return 0; } @@ -716,10 +698,8 @@ future_add_done_callback(FutureObj *fut, PyObject *arg, PyObject *ctx) */ if (fut->fut_callbacks == NULL && fut->fut_callback0 == NULL) { - Py_INCREF(arg); - fut->fut_callback0 = arg; - Py_INCREF(ctx); - fut->fut_context0 = ctx; + fut->fut_callback0 = Py_NewRef(arg); + fut->fut_context0 = Py_NewRef(ctx); } else { PyObject *tup = PyTuple_New(2); @@ -908,8 +888,7 @@ _asyncio_Future_exception_impl(FutureObj *self) if (self->fut_exception != NULL) { self->fut_log_tb = 0; - Py_INCREF(self->fut_exception); - return self->fut_exception; + return Py_NewRef(self->fut_exception); } Py_RETURN_NONE; @@ -1116,16 +1095,6 @@ static PyObject * _asyncio_Future_cancel_impl(FutureObj *self, PyObject *msg) /*[clinic end generated code: output=3edebbc668e5aba3 input=925eb545251f2c5a]*/ { - if (msg != Py_None) { - if (PyErr_WarnEx(PyExc_DeprecationWarning, - "Passing 'msg' argument to Future.cancel() " - "is deprecated since Python 3.11, and " - "scheduled for removal in Python 3.14.", - 2)) - { - return NULL; - } - } ENSURE_FUTURE_ALIVE(self) return future_cancel(self, msg); } @@ -1180,8 +1149,7 @@ _asyncio_Future_get_loop_impl(FutureObj *self) /*[clinic end generated code: output=119b6ea0c9816c3f input=cba48c2136c79d1f]*/ { ENSURE_FUTURE_ALIVE(self) - Py_INCREF(self->fut_loop); - return self->fut_loop; + return Py_NewRef(self->fut_loop); } static PyObject * @@ -1252,8 +1220,7 @@ FutureObj_get_loop(FutureObj *fut, void *Py_UNUSED(ignored)) if (!future_is_alive(fut)) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_loop); - return fut->fut_loop; + return Py_NewRef(fut->fut_loop); } static PyObject * @@ -1268,8 +1235,7 @@ FutureObj_get_callbacks(FutureObj *fut, void *Py_UNUSED(ignored)) Py_RETURN_NONE; } - Py_INCREF(fut->fut_callbacks); - return fut->fut_callbacks; + return Py_NewRef(fut->fut_callbacks); } Py_ssize_t len = 1; @@ -1315,8 +1281,7 @@ FutureObj_get_result(FutureObj *fut, void *Py_UNUSED(ignored)) if (fut->fut_result == NULL) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_result); - return fut->fut_result; + return Py_NewRef(fut->fut_result); } static PyObject * @@ -1326,8 +1291,7 @@ FutureObj_get_exception(FutureObj *fut, void *Py_UNUSED(ignored)) if (fut->fut_exception == NULL) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_exception); - return fut->fut_exception; + return Py_NewRef(fut->fut_exception); } static PyObject * @@ -1336,8 +1300,7 @@ FutureObj_get_source_traceback(FutureObj *fut, void *Py_UNUSED(ignored)) if (!future_is_alive(fut) || fut->fut_source_tb == NULL) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_source_tb); - return fut->fut_source_tb; + return Py_NewRef(fut->fut_source_tb); } static PyObject * @@ -1346,8 +1309,7 @@ FutureObj_get_cancel_message(FutureObj *fut, void *Py_UNUSED(ignored)) if (fut->fut_cancel_msg == NULL) { Py_RETURN_NONE; } - Py_INCREF(fut->fut_cancel_msg); - return fut->fut_cancel_msg; + return Py_NewRef(fut->fut_cancel_msg); } static int @@ -1366,28 +1328,24 @@ FutureObj_set_cancel_message(FutureObj *fut, PyObject *msg, static PyObject * FutureObj_get_state(FutureObj *fut, void *Py_UNUSED(ignored)) { - _Py_IDENTIFIER(PENDING); - _Py_IDENTIFIER(CANCELLED); - _Py_IDENTIFIER(FINISHED); PyObject *ret = NULL; ENSURE_FUTURE_ALIVE(fut) switch (fut->fut_state) { case STATE_PENDING: - ret = _PyUnicode_FromId(&PyId_PENDING); + ret = &_Py_ID(PENDING); break; case STATE_CANCELLED: - ret = _PyUnicode_FromId(&PyId_CANCELLED); + ret = &_Py_ID(CANCELLED); break; case STATE_FINISHED: - ret = _PyUnicode_FromId(&PyId_FINISHED); + ret = &_Py_ID(FINISHED); break; default: assert (0); } - Py_XINCREF(ret); - return ret; + return Py_XNewRef(ret); } static PyObject * @@ -1416,12 +1374,6 @@ _asyncio_Future__make_cancelled_error_impl(FutureObj *self) static void FutureObj_finalize(FutureObj *fut) { - _Py_IDENTIFIER(call_exception_handler); - _Py_IDENTIFIER(message); - _Py_IDENTIFIER(exception); - _Py_IDENTIFIER(future); - _Py_IDENTIFIER(source_traceback); - PyObject *error_type, *error_value, *error_traceback; PyObject *context; PyObject *message = NULL; @@ -1447,19 +1399,19 @@ FutureObj_finalize(FutureObj *fut) goto finally; } - if (_PyDict_SetItemId(context, &PyId_message, message) < 0 || - _PyDict_SetItemId(context, &PyId_exception, fut->fut_exception) < 0 || - _PyDict_SetItemId(context, &PyId_future, (PyObject*)fut) < 0) { + if (PyDict_SetItem(context, &_Py_ID(message), message) < 0 || + PyDict_SetItem(context, &_Py_ID(exception), fut->fut_exception) < 0 || + PyDict_SetItem(context, &_Py_ID(future), (PyObject*)fut) < 0) { goto finally; } if (fut->fut_source_tb != NULL) { - if (_PyDict_SetItemId(context, &PyId_source_traceback, + if (PyDict_SetItem(context, &_Py_ID(source_traceback), fut->fut_source_tb) < 0) { goto finally; } } - func = _PyObject_GetAttrId(fut->fut_loop, &PyId_call_exception_handler); + func = PyObject_GetAttr(fut->fut_loop, &_Py_ID(call_exception_handler)); if (func != NULL) { PyObject *res = PyObject_CallOneArg(func, context); if (res == NULL) { @@ -1618,8 +1570,7 @@ FutureIter_am_send(futureiterobject *it, if (fut->fut_state == STATE_PENDING) { if (!fut->fut_blocking) { fut->fut_blocking = 1; - Py_INCREF(fut); - *result = (PyObject *)fut; + *result = Py_NewRef(fut); return PYGEN_NEXT; } PyErr_SetString(PyExc_RuntimeError, @@ -1805,8 +1756,7 @@ future_new_iter(PyObject *fut) } } - Py_INCREF(fut); - it->future = (FutureObj*)fut; + it->future = (FutureObj*)Py_NewRef(fut); PyObject_GC_Track(it); return (PyObject*)it; } @@ -1870,8 +1820,7 @@ static PyObject * TaskStepMethWrapper_get___self__(TaskStepMethWrapper *o, void *Py_UNUSED(ignored)) { if (o->sw_task) { - Py_INCREF(o->sw_task); - return (PyObject*)o->sw_task; + return Py_NewRef(o->sw_task); } Py_RETURN_NONE; } @@ -1904,11 +1853,8 @@ TaskStepMethWrapper_new(TaskObj *task, PyObject *arg) return NULL; } - Py_INCREF(task); - o->sw_task = task; - - Py_XINCREF(arg); - o->sw_arg = arg; + o->sw_task = (TaskObj*)Py_NewRef(task); + o->sw_arg = Py_XNewRef(arg); PyObject_GC_Track(o); return (PyObject*) o; @@ -1928,10 +1874,8 @@ static PyMethodDef TaskWakeupDef = { static int register_task(PyObject *task) { - _Py_IDENTIFIER(add); - - PyObject *res = _PyObject_CallMethodIdOneArg(all_tasks, - &PyId_add, task); + PyObject *res = PyObject_CallMethodOneArg(all_tasks, + &_Py_ID(add), task); if (res == NULL) { return -1; } @@ -1943,10 +1887,8 @@ register_task(PyObject *task) static int unregister_task(PyObject *task) { - _Py_IDENTIFIER(discard); - - PyObject *res = _PyObject_CallMethodIdOneArg(all_tasks, - &PyId_discard, task); + PyObject *res = PyObject_CallMethodOneArg(all_tasks, + &_Py_ID(discard), task); if (res == NULL) { return -1; } @@ -2140,8 +2082,7 @@ static PyObject * TaskObj_get_coro(TaskObj *task, void *Py_UNUSED(ignored)) { if (task->task_coro) { - Py_INCREF(task->task_coro); - return task->task_coro; + return Py_NewRef(task->task_coro); } Py_RETURN_NONE; @@ -2151,8 +2092,7 @@ static PyObject * TaskObj_get_fut_waiter(TaskObj *task, void *Py_UNUSED(ignored)) { if (task->task_fut_waiter) { - Py_INCREF(task->task_fut_waiter); - return task->task_fut_waiter; + return Py_NewRef(task->task_fut_waiter); } Py_RETURN_NONE; @@ -2214,16 +2154,6 @@ static PyObject * _asyncio_Task_cancel_impl(TaskObj *self, PyObject *msg) /*[clinic end generated code: output=c66b60d41c74f9f1 input=7bb51bf25974c783]*/ { - if (msg != Py_None) { - if (PyErr_WarnEx(PyExc_DeprecationWarning, - "Passing 'msg' argument to Task.cancel() " - "is deprecated since Python 3.11, and " - "scheduled for removal in Python 3.14.", - 2)) - { - return NULL; - } - } self->task_log_tb = 0; if (self->task_state != STATE_PENDING) { @@ -2243,8 +2173,8 @@ _asyncio_Task_cancel_impl(TaskObj *self, PyObject *msg) PyObject *res; int is_true; - res = _PyObject_CallMethodIdOneArg(self->task_fut_waiter, - &PyId_cancel, msg); + res = PyObject_CallMethodOneArg(self->task_fut_waiter, + &_Py_ID(cancel), msg); if (res == NULL) { return NULL; } @@ -2405,8 +2335,7 @@ static PyObject * _asyncio_Task_get_coro_impl(TaskObj *self) /*[clinic end generated code: output=bcac27c8cc6c8073 input=d2e8606c42a7b403]*/ { - Py_INCREF(self->task_coro); - return self->task_coro; + return Py_NewRef(self->task_coro); } /*[clinic input] @@ -2417,8 +2346,7 @@ static PyObject * _asyncio_Task_get_context_impl(TaskObj *self) /*[clinic end generated code: output=6996f53d3dc01aef input=87c0b209b8fceeeb]*/ { - Py_INCREF(self->task_context); - return self->task_context; + return Py_NewRef(self->task_context); } /*[clinic input] @@ -2430,8 +2358,7 @@ _asyncio_Task_get_name_impl(TaskObj *self) /*[clinic end generated code: output=0ecf1570c3b37a8f input=a4a6595d12f4f0f8]*/ { if (self->task_name) { - Py_INCREF(self->task_name); - return self->task_name; + return Py_NewRef(self->task_name); } Py_RETURN_NONE; @@ -2464,11 +2391,6 @@ _asyncio_Task_set_name(TaskObj *self, PyObject *value) static void TaskObj_finalize(TaskObj *task) { - _Py_IDENTIFIER(call_exception_handler); - _Py_IDENTIFIER(task); - _Py_IDENTIFIER(message); - _Py_IDENTIFIER(source_traceback); - PyObject *context; PyObject *message = NULL; PyObject *func; @@ -2491,21 +2413,21 @@ TaskObj_finalize(TaskObj *task) goto finally; } - if (_PyDict_SetItemId(context, &PyId_message, message) < 0 || - _PyDict_SetItemId(context, &PyId_task, (PyObject*)task) < 0) + if (PyDict_SetItem(context, &_Py_ID(message), message) < 0 || + PyDict_SetItem(context, &_Py_ID(task), (PyObject*)task) < 0) { goto finally; } if (task->task_source_tb != NULL) { - if (_PyDict_SetItemId(context, &PyId_source_traceback, + if (PyDict_SetItem(context, &_Py_ID(source_traceback), task->task_source_tb) < 0) { goto finally; } } - func = _PyObject_GetAttrId(task->task_loop, &PyId_call_exception_handler); + func = PyObject_GetAttr(task->task_loop, &_Py_ID(call_exception_handler)); if (func != NULL) { PyObject *res = PyObject_CallOneArg(func, context); if (res == NULL) { @@ -2729,7 +2651,7 @@ task_step_impl(TaskObj *task, PyObject *exc) gen_status = PyIter_Send(coro, Py_None, &result); } else { - result = _PyObject_CallMethodIdOneArg(coro, &PyId_throw, exc); + result = PyObject_CallMethodOneArg(coro, &_Py_ID(throw), exc); gen_status = gen_status_from_result(&result); if (clear_exc) { /* We created 'exc' during this call */ @@ -2856,7 +2778,7 @@ task_step_impl(TaskObj *task, PyObject *exc) if (task->task_must_cancel) { PyObject *r; int is_true; - r = _PyObject_CallMethodIdOneArg(result, &PyId_cancel, + r = PyObject_CallMethodOneArg(result, &_Py_ID(cancel), task->task_cancel_msg); if (r == NULL) { return NULL; @@ -2884,7 +2806,7 @@ task_step_impl(TaskObj *task, PyObject *exc) } /* Check if `result` is a Future-compatible object */ - if (_PyObject_LookupAttrId(result, &PyId__asyncio_future_blocking, &o) < 0) { + if (_PyObject_LookupAttr(result, &_Py_ID(_asyncio_future_blocking), &o) < 0) { goto fail; } if (o != NULL && o != Py_None) { @@ -2914,8 +2836,8 @@ task_step_impl(TaskObj *task, PyObject *exc) } /* result._asyncio_future_blocking = False */ - if (_PyObject_SetAttrId( - result, &PyId__asyncio_future_blocking, Py_False) == -1) { + if (PyObject_SetAttr( + result, &_Py_ID(_asyncio_future_blocking), Py_False) == -1) { goto fail; } @@ -2925,8 +2847,8 @@ task_step_impl(TaskObj *task, PyObject *exc) } /* result.add_done_callback(task._wakeup) */ - PyObject *add_cb = _PyObject_GetAttrId( - result, &PyId_add_done_callback); + PyObject *add_cb = PyObject_GetAttr( + result, &_Py_ID(add_done_callback)); if (add_cb == NULL) { Py_DECREF(wrapper); goto fail; @@ -2949,7 +2871,7 @@ task_step_impl(TaskObj *task, PyObject *exc) if (task->task_must_cancel) { PyObject *r; int is_true; - r = _PyObject_CallMethodIdOneArg(result, &PyId_cancel, + r = PyObject_CallMethodOneArg(result, &_Py_ID(cancel), task->task_cancel_msg); if (r == NULL) { return NULL; @@ -3308,9 +3230,7 @@ new_running_loop_holder(PyObject *loop) #if defined(HAVE_GETPID) && !defined(MS_WINDOWS) rl->rl_pid = getpid(); #endif - - Py_INCREF(loop); - rl->rl_loop = loop; + rl->rl_loop = Py_NewRef(loop); return rl; } diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index 741cfbe9dc6142..5fa583821889f3 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -299,8 +299,7 @@ deque_append_internal(dequeobject *deque, PyObject *item, Py_ssize_t maxlen) static PyObject * deque_append(dequeobject *deque, PyObject *item) { - Py_INCREF(item); - if (deque_append_internal(deque, item, deque->maxlen) < 0) + if (deque_append_internal(deque, Py_NewRef(item), deque->maxlen) < 0) return NULL; Py_RETURN_NONE; } @@ -336,8 +335,7 @@ deque_appendleft_internal(dequeobject *deque, PyObject *item, Py_ssize_t maxlen) static PyObject * deque_appendleft(dequeobject *deque, PyObject *item) { - Py_INCREF(item); - if (deque_appendleft_internal(deque, item, deque->maxlen) < 0) + if (deque_appendleft_internal(deque, Py_NewRef(item), deque->maxlen) < 0) return NULL; Py_RETURN_NONE; } @@ -655,14 +653,12 @@ deque_inplace_repeat(dequeobject *deque, Py_ssize_t n) size = Py_SIZE(deque); if (size == 0 || n == 1) { - Py_INCREF(deque); - return (PyObject *)deque; + return Py_NewRef(deque); } if (n <= 0) { deque_clear(deque); - Py_INCREF(deque); - return (PyObject *)deque; + return Py_NewRef(deque); } if (size == 1) { @@ -693,13 +689,11 @@ deque_inplace_repeat(dequeobject *deque, Py_ssize_t n) i += m; while (m--) { deque->rightindex++; - Py_INCREF(item); - deque->rightblock->data[deque->rightindex] = item; + deque->rightblock->data[deque->rightindex] = Py_NewRef(item); } } Py_SET_SIZE(deque, Py_SIZE(deque) + i); - Py_INCREF(deque); - return (PyObject *)deque; + return Py_NewRef(deque); } if ((size_t)size > PY_SSIZE_T_MAX / (size_t)n) { @@ -972,8 +966,7 @@ deque_count(dequeobject *deque, PyObject *v) while (--n >= 0) { CHECK_NOT_END(b); - item = b->data[index]; - Py_INCREF(item); + item = Py_NewRef(b->data[index]); cmp = PyObject_RichCompareBool(item, v, Py_EQ); Py_DECREF(item); if (cmp < 0) @@ -1011,8 +1004,7 @@ deque_contains(dequeobject *deque, PyObject *v) while (--n >= 0) { CHECK_NOT_END(b); - item = b->data[index]; - Py_INCREF(item); + item = Py_NewRef(b->data[index]); cmp = PyObject_RichCompareBool(item, v, Py_EQ); Py_DECREF(item); if (cmp) { @@ -1201,8 +1193,7 @@ deque_item(dequeobject *deque, Py_ssize_t i) } } item = b->data[i]; - Py_INCREF(item); - return item; + return Py_NewRef(item); } static int @@ -1231,8 +1222,7 @@ deque_remove(dequeobject *deque, PyObject *value) int cmp, rv; for (i = 0 ; i < n; i++) { - item = b->data[index]; - Py_INCREF(item); + item = Py_NewRef(b->data[index]); cmp = PyObject_RichCompareBool(item, value, Py_EQ); Py_DECREF(item); if (cmp < 0) { @@ -1266,7 +1256,6 @@ deque_remove(dequeobject *deque, PyObject *value) static int deque_ass_item(dequeobject *deque, Py_ssize_t i, PyObject *v) { - PyObject *old_value; block *b; Py_ssize_t n, len=Py_SIZE(deque), halflen=(len+1)>>1, index=i; @@ -1292,10 +1281,7 @@ deque_ass_item(dequeobject *deque, Py_ssize_t i, PyObject *v) while (--n >= 0) b = b->leftlink; } - Py_INCREF(v); - old_value = b->data[i]; - b->data[i] = v; - Py_DECREF(old_value); + Py_SETREF(b->data[i], Py_NewRef(v)); return 0; } @@ -1686,8 +1672,7 @@ deque_iter(dequeobject *deque) return NULL; it->b = deque->leftblock; it->index = deque->leftindex; - Py_INCREF(deque); - it->deque = deque; + it->deque = (dequeobject*)Py_NewRef(deque); it->state = deque->state; it->counter = Py_SIZE(deque); PyObject_GC_Track(it); @@ -1734,8 +1719,7 @@ dequeiter_next(dequeiterobject *it) it->b = it->b->rightlink; it->index = 0; } - Py_INCREF(item); - return item; + return Py_NewRef(item); } static PyObject * @@ -1844,8 +1828,7 @@ deque_reviter(dequeobject *deque, PyObject *Py_UNUSED(ignored)) return NULL; it->b = deque->rightblock; it->index = deque->rightindex; - Py_INCREF(deque); - it->deque = deque; + it->deque = (dequeobject*)Py_NewRef(deque); it->state = deque->state; it->counter = Py_SIZE(deque); PyObject_GC_Track(it); @@ -1876,8 +1859,7 @@ dequereviter_next(dequeiterobject *it) it->b = it->b->leftlink; it->index = BLOCKLEN - 1; } - Py_INCREF(item); - return item; + return Py_NewRef(item); } static PyObject * @@ -2203,8 +2185,7 @@ defdict_init(PyObject *self, PyObject *args, PyObject *kwds) } if (newargs == NULL) return -1; - Py_XINCREF(newdefault); - dd->default_factory = newdefault; + dd->default_factory = Py_XNewRef(newdefault); result = PyDict_Type.tp_init(self, newargs, kwds); Py_DECREF(newargs); Py_XDECREF(olddefault); @@ -2414,8 +2395,7 @@ tuplegetter_new_impl(PyTypeObject *type, Py_ssize_t index, PyObject *doc) return NULL; } self->index = index; - Py_INCREF(doc); - self->doc = doc; + self->doc = Py_NewRef(doc); return (PyObject *)self; } @@ -2426,13 +2406,11 @@ tuplegetter_descr_get(PyObject *self, PyObject *obj, PyObject *type) PyObject *result; if (obj == NULL) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } if (!PyTuple_Check(obj)) { if (obj == Py_None) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } PyErr_Format(PyExc_TypeError, "descriptor for index '%zd' for tuple subclasses " @@ -2448,8 +2426,7 @@ tuplegetter_descr_get(PyObject *self, PyObject *obj, PyObject *type) } result = PyTuple_GET_ITEM(obj, index); - Py_INCREF(result); - return result; + return Py_NewRef(result); } static int diff --git a/Modules/_csv.c b/Modules/_csv.c index d34d0a1296ae72..bd337084dbff81 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -176,8 +176,7 @@ get_char_or_None(Py_UCS4 c) static PyObject * Dialect_get_lineterminator(DialectObj *self, void *Py_UNUSED(ignored)) { - Py_XINCREF(self->lineterminator); - return self->lineterminator; + return Py_XNewRef(self->lineterminator); } static PyObject * @@ -316,8 +315,7 @@ _set_str(const char *name, PyObject **target, PyObject *src, const char *dflt) else { if (PyUnicode_READY(src) == -1) return -1; - Py_INCREF(src); - Py_XSETREF(*target, src); + Py_XSETREF(*target, Py_NewRef(src)); } } return 0; @@ -514,8 +512,7 @@ dialect_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) goto err; } - ret = (PyObject *)self; - Py_INCREF(self); + ret = Py_NewRef(self); err: Py_CLEAR(self); Py_CLEAR(dialect); @@ -704,7 +701,7 @@ parse_process_char(ReaderObj *self, _csvstate *module_state, Py_UCS4 c) self->state = ESCAPED_CHAR; } else if (c == ' ' && dialect->skipinitialspace) - /* ignore space at start of field */ + /* ignore spaces at start of field */ ; else if (c == dialect->delimiter) { /* save empty field */ @@ -1647,9 +1644,9 @@ PyDoc_STRVAR(csv_module_doc, " quoting character. It defaults to '\"'.\n" " * delimiter - specifies a one-character string to use as the\n" " field separator. It defaults to ','.\n" -" * skipinitialspace - specifies how to interpret whitespace which\n" -" immediately follows a delimiter. It defaults to False, which\n" -" means that whitespace immediately following a delimiter is part\n" +" * skipinitialspace - specifies how to interpret spaces which\n" +" immediately follow a delimiter. It defaults to False, which\n" +" means that spaces immediately following a delimiter is part\n" " of the following field.\n" " * lineterminator - specifies the character sequence which should\n" " terminate rows.\n" diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index a3c7c8c471e584..b9092d3981f364 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -101,8 +101,6 @@ bytes(cdata) #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER - #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -233,10 +231,8 @@ PyDict_SetItemProxy(PyObject *dict, PyObject *key, PyObject *item) remover = (DictRemoverObject *)obj; assert(remover->key == NULL); assert(remover->dict == NULL); - Py_INCREF(key); - remover->key = key; - Py_INCREF(dict); - remover->dict = dict; + remover->key = Py_NewRef(key); + remover->dict = Py_NewRef(dict); proxy = PyWeakref_NewProxy(item, obj); Py_DECREF(obj); @@ -472,8 +468,7 @@ StructUnionType_paramfunc(CDataObject *self) struct_param->ptr = ptr; } else { ptr = self->b_ptr; - obj = (PyObject *)self; - Py_INCREF(obj); + obj = Py_NewRef(self); } parg = PyCArgObject_new(); @@ -498,8 +493,6 @@ StructUnionType_new(PyTypeObject *type, PyObject *args, PyObject *kwds, int isSt PyTypeObject *result; PyObject *fields; StgDictObject *dict; - _Py_IDENTIFIER(_abstract_); - _Py_IDENTIFIER(_fields_); /* create the new instance (which is a class, since we are a metatype!) */ @@ -508,7 +501,7 @@ StructUnionType_new(PyTypeObject *type, PyObject *args, PyObject *kwds, int isSt return NULL; /* keep this for bw compatibility */ - int r = _PyDict_ContainsId(result->tp_dict, &PyId__abstract_); + int r = PyDict_Contains(result->tp_dict, &_Py_ID(_abstract_)); if (r > 0) return (PyObject *)result; if (r < 0) { @@ -540,9 +533,9 @@ StructUnionType_new(PyTypeObject *type, PyObject *args, PyObject *kwds, int isSt dict->paramfunc = StructUnionType_paramfunc; - fields = _PyDict_GetItemIdWithError((PyObject *)dict, &PyId__fields_); + fields = PyDict_GetItemWithError((PyObject *)dict, &_Py_ID(_fields_)); if (fields) { - if (_PyObject_SetAttrId((PyObject *)result, &PyId__fields_, fields) < 0) { + if (PyObject_SetAttr((PyObject *)result, &_Py_ID(_fields_), fields) < 0) { Py_DECREF(result); return NULL; } @@ -797,14 +790,12 @@ PyDoc_STRVAR(from_param_doc, static PyObject * CDataType_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); PyObject *as_parameter; int res = PyObject_IsInstance(value, type); if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (PyCArg_CheckExact(value)) { PyCArgObject *p = (PyCArgObject *)value; @@ -820,8 +811,7 @@ CDataType_from_param(PyObject *type, PyObject *value) if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } ob_name = (ob) ? Py_TYPE(ob)->tp_name : "???"; @@ -831,7 +821,7 @@ CDataType_from_param(PyObject *type, PyObject *value) return NULL; } - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -1055,8 +1045,7 @@ PyCPointerType_paramfunc(CDataObject *self) parg->tag = 'P'; parg->pffi_type = &ffi_type_pointer; - Py_INCREF(self); - parg->obj = (PyObject *)self; + parg->obj = Py_NewRef(self); parg->value.p = *(void **)self->b_ptr; return parg; } @@ -1068,7 +1057,7 @@ PyCPointerType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) StgDictObject *stgdict; PyObject *proto; PyObject *typedict; - _Py_IDENTIFIER(_type_); + typedict = PyTuple_GetItem(args, 2); if (!typedict) @@ -1088,7 +1077,7 @@ PyCPointerType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) stgdict->paramfunc = PyCPointerType_paramfunc; stgdict->flags |= TYPEFLAG_ISPOINTER; - proto = _PyDict_GetItemIdWithError(typedict, &PyId__type_); /* Borrowed ref */ + proto = PyDict_GetItemWithError(typedict, &_Py_ID(_type_)); /* Borrowed ref */ if (proto) { StgDictObject *itemdict; const char *current_format; @@ -1146,7 +1135,7 @@ static PyObject * PyCPointerType_set_type(PyTypeObject *self, PyObject *type) { StgDictObject *dict; - _Py_IDENTIFIER(_type_); + dict = PyType_stgdict((PyObject *)self); if (!dict) { @@ -1158,7 +1147,7 @@ PyCPointerType_set_type(PyTypeObject *self, PyObject *type) if (-1 == PyCPointerType_SetProto(dict, type)) return NULL; - if (-1 == _PyDict_SetItemId((PyObject *)dict, &PyId__type_, type)) + if (-1 == PyDict_SetItem((PyObject *)dict, &_Py_ID(_type_), type)) return NULL; Py_RETURN_NONE; @@ -1173,8 +1162,7 @@ PyCPointerType_from_param(PyObject *type, PyObject *value) if (value == Py_None) { /* ConvParam will convert to a NULL pointer later */ - Py_INCREF(value); - return value; + return Py_NewRef(value); } typedict = PyType_stgdict(type); @@ -1208,8 +1196,7 @@ PyCPointerType_from_param(PyObject *type, PyObject *value) return NULL; } if (ret) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } return CDataType_from_param(type, value); @@ -1453,16 +1440,13 @@ PyCArrayType_paramfunc(CDataObject *self) p->tag = 'P'; p->pffi_type = &ffi_type_pointer; p->value.p = (char *)self->b_ptr; - Py_INCREF(self); - p->obj = (PyObject *)self; + p->obj = Py_NewRef(self); return p; } static PyObject * PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - _Py_IDENTIFIER(_length_); - _Py_IDENTIFIER(_type_); PyTypeObject *result; StgDictObject *stgdict; StgDictObject *itemdict; @@ -1481,7 +1465,7 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) stgdict = NULL; type_attr = NULL; - if (_PyObject_LookupAttrId((PyObject *)result, &PyId__length_, &length_attr) < 0) { + if (_PyObject_LookupAttr((PyObject *)result, &_Py_ID(_length_), &length_attr) < 0) { goto error; } if (!length_attr) { @@ -1514,7 +1498,7 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) goto error; } - if (_PyObject_LookupAttrId((PyObject *)result, &PyId__type_, &type_attr) < 0) { + if (_PyObject_LookupAttr((PyObject *)result, &_Py_ID(_type_), &type_attr) < 0) { goto error; } if (!type_attr) { @@ -1659,7 +1643,6 @@ static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdfuzZqQPXOv?g"; static PyObject * c_wchar_p_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); PyObject *as_parameter; int res; if (value == Py_None) { @@ -1685,8 +1668,7 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (ArrayObject_Check(value) || PointerObject_Check(value)) { /* c_wchar array instance or pointer(c_wchar(...)) */ @@ -1695,8 +1677,7 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) assert(dt); /* Cannot be NULL for pointer or array objects */ dict = dt && dt->proto ? PyType_stgdict(dt->proto) : NULL; if (dict && (dict->setfunc == _ctypes_get_fielddesc("u")->setfunc)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } if (PyCArg_CheckExact(value)) { @@ -1704,12 +1685,11 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) PyCArgObject *a = (PyCArgObject *)value; StgDictObject *dict = PyObject_stgdict(a->obj); if (dict && (dict->setfunc == _ctypes_get_fielddesc("u")->setfunc)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -1726,7 +1706,6 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) static PyObject * c_char_p_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); PyObject *as_parameter; int res; if (value == Py_None) { @@ -1752,8 +1731,7 @@ c_char_p_from_param(PyObject *type, PyObject *value) if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (ArrayObject_Check(value) || PointerObject_Check(value)) { /* c_char array instance or pointer(c_char(...)) */ @@ -1762,8 +1740,7 @@ c_char_p_from_param(PyObject *type, PyObject *value) assert(dt); /* Cannot be NULL for pointer or array objects */ dict = dt && dt->proto ? PyType_stgdict(dt->proto) : NULL; if (dict && (dict->setfunc == _ctypes_get_fielddesc("c")->setfunc)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } if (PyCArg_CheckExact(value)) { @@ -1771,12 +1748,11 @@ c_char_p_from_param(PyObject *type, PyObject *value) PyCArgObject *a = (PyCArgObject *)value; StgDictObject *dict = PyObject_stgdict(a->obj); if (dict && (dict->setfunc == _ctypes_get_fielddesc("c")->setfunc)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -1793,7 +1769,6 @@ c_char_p_from_param(PyObject *type, PyObject *value) static PyObject * c_void_p_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); StgDictObject *stgd; PyObject *as_parameter; int res; @@ -1861,22 +1836,19 @@ c_void_p_from_param(PyObject *type, PyObject *value) return NULL; if (res) { /* c_void_p instances */ - Py_INCREF(value); - return value; + return Py_NewRef(value); } /* ctypes array or pointer instance */ if (ArrayObject_Check(value) || PointerObject_Check(value)) { /* Any array or pointer is accepted */ - Py_INCREF(value); - return value; + return Py_NewRef(value); } /* byref(...) */ if (PyCArg_CheckExact(value)) { /* byref(c_xxx()) */ PyCArgObject *a = (PyCArgObject *)value; if (a->tag == 'P') { - Py_INCREF(value); - return value; + return Py_NewRef(value); } } /* function pointer */ @@ -1907,15 +1879,14 @@ c_void_p_from_param(PyObject *type, PyObject *value) return NULL; parg->pffi_type = &ffi_type_pointer; parg->tag = 'Z'; - Py_INCREF(value); - parg->obj = value; + parg->obj = Py_NewRef(value); /* Remember: b_ptr points to where the pointer is stored! */ parg->value.p = *(void **)(((CDataObject *)value)->b_ptr); return (PyObject *)parg; } } - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -1993,8 +1964,7 @@ static PyObject *CreateSwappedType(PyTypeObject *type, PyObject *args, PyObject stgdict->setfunc = fmt->setfunc_swapped; stgdict->getfunc = fmt->getfunc_swapped; - Py_INCREF(proto); - stgdict->proto = proto; + stgdict->proto = Py_NewRef(proto); /* replace the class dict by our updated spam dict */ if (-1 == PyDict_Update((PyObject *)stgdict, result->tp_dict)) { @@ -2029,8 +1999,7 @@ PyCSimpleType_paramfunc(CDataObject *self) parg->tag = fmt[0]; parg->pffi_type = fd->pffi_type; - Py_INCREF(self); - parg->obj = (PyObject *)self; + parg->obj = Py_NewRef(self); memcpy(&parg->value, self->b_ptr, self->b_size); return parg; } @@ -2038,7 +2007,6 @@ PyCSimpleType_paramfunc(CDataObject *self) static PyObject * PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - _Py_IDENTIFIER(_type_); PyTypeObject *result; StgDictObject *stgdict; PyObject *proto; @@ -2053,7 +2021,7 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (result == NULL) return NULL; - if (_PyObject_LookupAttrId((PyObject *)result, &PyId__type_, &proto) < 0) { + if (_PyObject_LookupAttr((PyObject *)result, &_Py_ID(_type_), &proto) < 0) { return NULL; } if (!proto) { @@ -2223,7 +2191,6 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) static PyObject * PyCSimpleType_from_param(PyObject *type, PyObject *value) { - _Py_IDENTIFIER(_as_parameter_); StgDictObject *dict; const char *fmt; PyCArgObject *parg; @@ -2237,8 +2204,7 @@ PyCSimpleType_from_param(PyObject *type, PyObject *value) if (res == -1) return NULL; if (res) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } dict = PyType_stgdict(type); @@ -2267,7 +2233,7 @@ PyCSimpleType_from_param(PyObject *type, PyObject *value) PyErr_Clear(); Py_DECREF(parg); - if (_PyObject_LookupAttrId(value, &PyId__as_parameter_, &as_parameter) < 0) { + if (_PyObject_LookupAttr(value, &_Py_ID(_as_parameter_), &as_parameter) < 0) { return NULL; } if (as_parameter) { @@ -2344,7 +2310,6 @@ PyTypeObject PyCSimpleType_Type = { static PyObject * converters_from_argtypes(PyObject *ob) { - _Py_IDENTIFIER(from_param); PyObject *converters; Py_ssize_t i; @@ -2424,7 +2389,7 @@ converters_from_argtypes(PyObject *ob) } */ - if (_PyObject_LookupAttrId(tp, &PyId_from_param, &cnv) <= 0) { + if (_PyObject_LookupAttr(tp, &_Py_ID(from_param), &cnv) <= 0) { Py_DECREF(converters); Py_DECREF(ob); if (!PyErr_Occurred()) { @@ -2445,10 +2410,6 @@ make_funcptrtype_dict(StgDictObject *stgdict) { PyObject *ob; PyObject *converters = NULL; - _Py_IDENTIFIER(_flags_); - _Py_IDENTIFIER(_argtypes_); - _Py_IDENTIFIER(_restype_); - _Py_IDENTIFIER(_check_retval_); stgdict->align = _ctypes_get_fielddesc("P")->pffi_type->alignment; stgdict->length = 1; @@ -2457,7 +2418,7 @@ make_funcptrtype_dict(StgDictObject *stgdict) stgdict->getfunc = NULL; stgdict->ffi_type_pointer = ffi_type_pointer; - ob = _PyDict_GetItemIdWithError((PyObject *)stgdict, &PyId__flags_); + ob = PyDict_GetItemWithError((PyObject *)stgdict, &_Py_ID(_flags_)); if (!ob || !PyLong_Check(ob)) { if (!PyErr_Occurred()) { PyErr_SetString(PyExc_TypeError, @@ -2468,29 +2429,27 @@ make_funcptrtype_dict(StgDictObject *stgdict) stgdict->flags = PyLong_AsUnsignedLongMask(ob) | TYPEFLAG_ISPOINTER; /* _argtypes_ is optional... */ - ob = _PyDict_GetItemIdWithError((PyObject *)stgdict, &PyId__argtypes_); + ob = PyDict_GetItemWithError((PyObject *)stgdict, &_Py_ID(_argtypes_)); if (ob) { converters = converters_from_argtypes(ob); if (!converters) return -1; - Py_INCREF(ob); - stgdict->argtypes = ob; + stgdict->argtypes = Py_NewRef(ob); stgdict->converters = converters; } else if (PyErr_Occurred()) { return -1; } - ob = _PyDict_GetItemIdWithError((PyObject *)stgdict, &PyId__restype_); + ob = PyDict_GetItemWithError((PyObject *)stgdict, &_Py_ID(_restype_)); if (ob) { if (ob != Py_None && !PyType_stgdict(ob) && !PyCallable_Check(ob)) { PyErr_SetString(PyExc_TypeError, "_restype_ must be a type, a callable, or None"); return -1; } - Py_INCREF(ob); - stgdict->restype = ob; - if (_PyObject_LookupAttrId(ob, &PyId__check_retval_, + stgdict->restype = Py_NewRef(ob); + if (_PyObject_LookupAttr(ob, &_Py_ID(_check_retval_), &stgdict->checker) < 0) { return -1; @@ -2507,8 +2466,7 @@ make_funcptrtype_dict(StgDictObject *stgdict) "_errcheck_ must be callable"); return -1; } - Py_INCREF(ob); - stgdict->errcheck = ob; + stgdict->errcheck = Py_NewRef(ob); } else if (PyErr_Occurred()) { return -1; @@ -2528,8 +2486,7 @@ PyCFuncPtrType_paramfunc(CDataObject *self) parg->tag = 'P'; parg->pffi_type = &ffi_type_pointer; - Py_INCREF(self); - parg->obj = (PyObject *)self; + parg->obj = Py_NewRef(self); parg->value.p = *(void **)self->b_ptr; return parg; } @@ -2641,8 +2598,7 @@ PyCData_GetContainer(CDataObject *self) if (self->b_objects == NULL) return NULL; } else { - Py_INCREF(Py_None); - self->b_objects = Py_None; + self->b_objects = Py_NewRef(Py_None); } } return self; @@ -2784,8 +2740,7 @@ static int PyCData_NewGetBuffer(PyObject *myself, Py_buffer *view, int flags) if (view == NULL) return 0; view->buf = self->b_ptr; - view->obj = myself; - Py_INCREF(myself); + view->obj = Py_NewRef(myself); view->len = self->b_size; view->readonly = 0; /* use default format character if not set */ @@ -2877,8 +2832,7 @@ PyCData_setstate(PyObject *myself, PyObject *args) static PyObject * PyCData_from_outparam(PyObject *self, PyObject *args) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyMethodDef PyCData_methods[] = { @@ -2983,8 +2937,7 @@ PyCData_FromBaseObj(PyObject *type, PyObject *base, Py_ssize_t index, char *adr) assert(CDataObject_Check(base)); cmem->b_ptr = adr; cmem->b_needsfree = 0; - Py_INCREF(base); - cmem->b_base = (CDataObject *)base; + cmem->b_base = (CDataObject *)Py_NewRef(base); cmem->b_index = index; } else { /* copy contents of adr */ if (-1 == PyCData_MallocBuffer(cmem, dict)) { @@ -3121,8 +3074,7 @@ _PyCData_set(CDataObject *dst, PyObject *type, SETFUNC setfunc, PyObject *value, if (value == NULL) return NULL; - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (PyCPointerTypeObject_Check(type) @@ -3245,8 +3197,7 @@ static PyObject * PyCFuncPtr_get_errcheck(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) { if (self->errcheck) { - Py_INCREF(self->errcheck); - return self->errcheck; + return Py_NewRef(self->errcheck); } Py_RETURN_NONE; } @@ -3254,7 +3205,6 @@ PyCFuncPtr_get_errcheck(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) static int PyCFuncPtr_set_restype(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ignored)) { - _Py_IDENTIFIER(_check_retval_); PyObject *checker, *oldchecker; if (ob == NULL) { oldchecker = self->checker; @@ -3268,7 +3218,7 @@ PyCFuncPtr_set_restype(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ign "restype must be a type, a callable, or None"); return -1; } - if (_PyObject_LookupAttrId(ob, &PyId__check_retval_, &checker) < 0) { + if (_PyObject_LookupAttr(ob, &_Py_ID(_check_retval_), &checker) < 0) { return -1; } oldchecker = self->checker; @@ -3284,14 +3234,12 @@ PyCFuncPtr_get_restype(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) { StgDictObject *dict; if (self->restype) { - Py_INCREF(self->restype); - return self->restype; + return Py_NewRef(self->restype); } dict = PyObject_stgdict((PyObject *)self); assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */ if (dict->restype) { - Py_INCREF(dict->restype); - return dict->restype; + return Py_NewRef(dict->restype); } else { Py_RETURN_NONE; } @@ -3321,14 +3269,12 @@ PyCFuncPtr_get_argtypes(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) { StgDictObject *dict; if (self->argtypes) { - Py_INCREF(self->argtypes); - return self->argtypes; + return Py_NewRef(self->argtypes); } dict = PyObject_stgdict((PyObject *)self); assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */ if (dict->argtypes) { - Py_INCREF(dict->argtypes); - return dict->argtypes; + return Py_NewRef(dict->argtypes); } else { Py_RETURN_NONE; } @@ -3622,8 +3568,7 @@ PyCFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_XINCREF(paramflags); - self->paramflags = paramflags; + self->paramflags = Py_XNewRef(paramflags); *(void **)self->b_ptr = address; Py_INCREF(dll); @@ -3633,8 +3578,7 @@ PyCFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_INCREF(self); - self->callable = (PyObject *)self; + self->callable = Py_NewRef(self); return (PyObject *)self; } @@ -3659,8 +3603,7 @@ PyCFuncPtr_FromVtblIndex(PyTypeObject *type, PyObject *args, PyObject *kwds) self = (PyCFuncPtrObject *)GenericPyCData_new(type, args, kwds); self->index = index + 0x1000; - Py_XINCREF(paramflags); - self->paramflags = paramflags; + self->paramflags = Py_XNewRef(paramflags); if (iid_len == sizeof(GUID)) self->iid = iid; return (PyObject *)self; @@ -3758,8 +3701,7 @@ PyCFuncPtr_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_INCREF(callable); - self->callable = callable; + self->callable = Py_NewRef(callable); self->thunk = thunk; *(void **)self->b_ptr = (void *)thunk->pcl_exec; @@ -3807,23 +3749,20 @@ _get_arg(int *pindex, PyObject *name, PyObject *defval, PyObject *inargs, PyObje if (*pindex < PyTuple_GET_SIZE(inargs)) { v = PyTuple_GET_ITEM(inargs, *pindex); ++*pindex; - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (kwds && name) { v = PyDict_GetItemWithError(kwds, name); if (v) { ++*pindex; - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; } } if (defval) { - Py_INCREF(defval); - return defval; + return Py_NewRef(defval); } /* we can't currently emit a better error message */ if (name) @@ -3878,8 +3817,7 @@ _build_callargs(PyCFuncPtrObject *self, PyObject *argtypes, if (self->index) return PyTuple_GetSlice(inargs, 1, PyTuple_GET_SIZE(inargs)); #endif - Py_INCREF(inargs); - return inargs; + return Py_NewRef(inargs); } len = PyTuple_GET_SIZE(argtypes); @@ -4062,10 +4000,9 @@ _build_result(PyObject *result, PyObject *callargs, PyTuple_SET_ITEM(tup, index, v); index++; } else if (bit & outmask) { - _Py_IDENTIFIER(__ctypes_from_outparam__); v = PyTuple_GET_ITEM(callargs, i); - v = _PyObject_CallMethodIdNoArgs(v, &PyId___ctypes_from_outparam__); + v = PyObject_CallMethodNoArgs(v, &_Py_ID(__ctypes_from_outparam__)); if (v == NULL || numretvals == 1) { Py_DECREF(callargs); return v; @@ -4348,7 +4285,6 @@ _init_pos_args(PyObject *self, PyTypeObject *type, StgDictObject *dict; PyObject *fields; Py_ssize_t i; - _Py_IDENTIFIER(_fields_); if (PyType_stgdict((PyObject *)type->tp_base)) { index = _init_pos_args(self, type->tp_base, @@ -4359,7 +4295,7 @@ _init_pos_args(PyObject *self, PyTypeObject *type, } dict = PyType_stgdict((PyObject *)type); - fields = _PyDict_GetItemIdWithError((PyObject *)dict, &PyId__fields_); + fields = PyDict_GetItemWithError((PyObject *)dict, &_Py_ID(_fields_)); if (fields == NULL) { if (PyErr_Occurred()) { return -1; @@ -4968,8 +4904,7 @@ static PyObject * Simple_from_outparm(PyObject *self, PyObject *args) { if (_ctypes_simple_instance((PyObject *)Py_TYPE(self))) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } /* call stgdict->getfunc */ return Simple_get_value((CDataObject *)self, NULL); @@ -5603,8 +5538,7 @@ cast(void *ptr, PyObject *src, PyObject *ctype) if (obj->b_objects == NULL) goto failed; } - Py_XINCREF(obj->b_objects); - result->b_objects = obj->b_objects; + result->b_objects = Py_XNewRef(obj->b_objects); if (result->b_objects && PyDict_CheckExact(result->b_objects)) { PyObject *index; int rc; diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index 2a668c0ca0cc97..459632b90907fd 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -1,7 +1,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" // windows.h must be included before pycore internal headers @@ -9,7 +8,9 @@ # include #endif -#include "pycore_call.h" // _PyObject_CallNoArgs() +#include "pycore_call.h" // _PyObject_CallNoArgs() +#include "pycore_runtime.h" // _PyRuntime +#include "pycore_global_objects.h" // _Py_ID() #include @@ -125,9 +126,7 @@ static void TryAddRef(StgDictObject *dict, CDataObject *obj) { IUnknown *punk; - _Py_IDENTIFIER(_needs_com_addref_); - - int r = _PyDict_ContainsId((PyObject *)dict, &PyId__needs_com_addref_); + int r = PyDict_Contains((PyObject *)dict, &_Py_ID(_needs_com_addref_)); if (r <= 0) { if (r < 0) { PrintError("getting _needs_com_addref_"); @@ -375,8 +374,7 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, } p->atypes[i] = NULL; - Py_INCREF(restype); - p->restype = restype; + p->restype = Py_NewRef(restype); if (restype == Py_None) { p->setfunc = NULL; p->ffi_restype = &ffi_type_void; @@ -405,9 +403,15 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, "ffi_prep_cif failed with %d", result); goto error; } + + #if HAVE_FFI_PREP_CLOSURE_LOC # ifdef USING_APPLE_OS_LIBFFI +# ifdef HAVE_BUILTIN_AVAILABLE # define HAVE_FFI_PREP_CLOSURE_LOC_RUNTIME __builtin_available(macos 10.15, ios 13, watchos 6, tvos 13, *) +# else +# define HAVE_FFI_PREP_CLOSURE_LOC_RUNTIME (ffi_prep_closure_loc != NULL) +# endif # else # define HAVE_FFI_PREP_CLOSURE_LOC_RUNTIME 1 # endif @@ -447,10 +451,8 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, goto error; } - Py_INCREF(converters); - p->converters = converters; - Py_INCREF(callable); - p->callable = callable; + p->converters = Py_NewRef(converters); + p->callable = Py_NewRef(callable); return p; error: diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index fa1dfac6c7d946..28b7cd4069718b 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -57,7 +57,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" #include "structmember.h" // PyMemberDef @@ -97,8 +96,12 @@ #define DONT_USE_SEH #endif +#include "pycore_runtime.h" // _PyRuntime +#include "pycore_global_objects.h" // _Py_ID() + #define CTYPES_CAPSULE_NAME_PYMEM "_ctypes pymem" + static void pymem_destructor(PyObject *ptr) { void *p = PyCapsule_GetPointer(ptr, CTYPES_CAPSULE_NAME_PYMEM); @@ -669,8 +672,7 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) if (PyCArg_CheckExact(obj)) { PyCArgObject *carg = (PyCArgObject *)obj; pa->ffi_type = carg->pffi_type; - Py_INCREF(obj); - pa->keep = obj; + pa->keep = Py_NewRef(obj); memcpy(&pa->value, &carg->value, sizeof(pa->value)); return 0; } @@ -700,8 +702,7 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) if (PyBytes_Check(obj)) { pa->ffi_type = &ffi_type_pointer; pa->value.p = PyBytes_AsString(obj); - Py_INCREF(obj); - pa->keep = obj; + pa->keep = Py_NewRef(obj); return 0; } @@ -719,9 +720,8 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) } { - _Py_IDENTIFIER(_as_parameter_); PyObject *arg; - if (_PyObject_LookupAttrId(obj, &PyId__as_parameter_, &arg) < 0) { + if (_PyObject_LookupAttr(obj, &_Py_ID(_as_parameter_), &arg) < 0) { return -1; } /* Which types should we exactly allow here? @@ -832,7 +832,11 @@ static int _call_function_pointer(int flags, #endif # ifdef USING_APPLE_OS_LIBFFI +# ifdef HAVE_BUILTIN_AVAILABLE # define HAVE_FFI_PREP_CIF_VAR_RUNTIME __builtin_available(macos 10.15, ios 13, watchos 6, tvos 13, *) +# else +# define HAVE_FFI_PREP_CIF_VAR_RUNTIME (ffi_prep_cif_var != NULL) +# endif # elif HAVE_FFI_PREP_CIF_VAR # define HAVE_FFI_PREP_CIF_VAR_RUNTIME true # else @@ -1019,7 +1023,10 @@ void _ctypes_extend_error(PyObject *exc_class, const char *fmt, ...) PyErr_Fetch(&tp, &v, &tb); PyErr_NormalizeException(&tp, &v, &tb); - cls_str = PyObject_Str(tp); + if (PyType_Check(tp)) + cls_str = PyType_GetName((PyTypeObject *)tp); + else + cls_str = PyObject_Str(tp); if (cls_str) { PyUnicode_AppendAndDel(&s, cls_str); PyUnicode_AppendAndDel(&s, PyUnicode_FromString(": ")); @@ -1442,8 +1449,13 @@ copy_com_pointer(PyObject *self, PyObject *args) #else #ifdef __APPLE__ #ifdef HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH -#define HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH_RUNTIME \ - __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) +# ifdef HAVE_BUILTIN_AVAILABLE +# define HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH_RUNTIME \ + __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) +# else +# define HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH_RUNTIME \ + (_dyld_shared_cache_contains_path != NULL) +# endif #else // Support the deprecated case of compiling on an older macOS version static void *libsystem_b_handle; @@ -1729,8 +1741,7 @@ byref(PyObject *self, PyObject *args) parg->tag = 'P'; parg->pffi_type = &ffi_type_pointer; - Py_INCREF(obj); - parg->obj = obj; + parg->obj = Py_NewRef(obj); parg->value.p = (char *)((CDataObject *)obj)->b_ptr + offset; return (PyObject *)parg; } @@ -1770,8 +1781,7 @@ My_PyObj_FromPtr(PyObject *self, PyObject *args) if (PySys_Audit("ctypes.PyObj_FromPtr", "(O)", ob) < 0) { return NULL; } - Py_INCREF(ob); - return ob; + return Py_NewRef(ob); } static PyObject * @@ -1848,16 +1858,14 @@ static PyObject * unpickle(PyObject *self, PyObject *args) { PyObject *typ, *state, *meth, *obj, *result; - _Py_IDENTIFIER(__new__); - _Py_IDENTIFIER(__setstate__); if (!PyArg_ParseTuple(args, "OO!", &typ, &PyTuple_Type, &state)) return NULL; - obj = _PyObject_CallMethodIdOneArg(typ, &PyId___new__, typ); + obj = PyObject_CallMethodOneArg(typ, &_Py_ID(__new__), typ); if (obj == NULL) return NULL; - meth = _PyObject_GetAttrId(obj, &PyId___setstate__); + meth = PyObject_GetAttr(obj, &_Py_ID(__setstate__)); if (meth == NULL) { goto error; } @@ -1882,29 +1890,20 @@ POINTER(PyObject *self, PyObject *cls) PyObject *result; PyTypeObject *typ; PyObject *key; - char *buf; result = PyDict_GetItemWithError(_ctypes_ptrtype_cache, cls); if (result) { - Py_INCREF(result); - return result; + return Py_NewRef(result); } else if (PyErr_Occurred()) { return NULL; } if (PyUnicode_CheckExact(cls)) { - const char *name = PyUnicode_AsUTF8(cls); - if (name == NULL) - return NULL; - buf = PyMem_Malloc(strlen(name) + 3 + 1); - if (buf == NULL) - return PyErr_NoMemory(); - sprintf(buf, "LP_%s", name); + PyObject *name = PyUnicode_FromFormat("LP_%U", cls); result = PyObject_CallFunction((PyObject *)Py_TYPE(&PyCPointer_Type), - "s(O){}", - buf, + "N(O){}", + name, &PyCPointer_Type); - PyMem_Free(buf); if (result == NULL) return result; key = PyLong_FromVoidPtr(result); @@ -1914,20 +1913,15 @@ POINTER(PyObject *self, PyObject *cls) } } else if (PyType_Check(cls)) { typ = (PyTypeObject *)cls; - buf = PyMem_Malloc(strlen(typ->tp_name) + 3 + 1); - if (buf == NULL) - return PyErr_NoMemory(); - sprintf(buf, "LP_%s", typ->tp_name); + PyObject *name = PyUnicode_FromFormat("LP_%s", typ->tp_name); result = PyObject_CallFunction((PyObject *)Py_TYPE(&PyCPointer_Type), - "s(O){sO}", - buf, + "N(O){sO}", + name, &PyCPointer_Type, "_type_", cls); - PyMem_Free(buf); if (result == NULL) return result; - Py_INCREF(cls); - key = cls; + key = Py_NewRef(cls); } else { PyErr_SetString(PyExc_TypeError, "must be a ctypes type"); return NULL; diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 13ed8b7eda6555..791aeba66539e9 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -144,8 +144,7 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, self->getfunc = getfunc; self->index = index; - Py_INCREF(proto); - self->proto = proto; + self->proto = Py_NewRef(proto); switch (fieldtype) { case NEW_BITFIELD: @@ -231,8 +230,7 @@ PyCField_get(CFieldObject *self, PyObject *inst, PyTypeObject *type) { CDataObject *src; if (inst == NULL) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } if (!CDataObject_Check(inst)) { PyErr_SetString(PyExc_TypeError, @@ -1097,8 +1095,7 @@ O_get(void *ptr, Py_ssize_t size) "PyObject is NULL"); return NULL; } - Py_INCREF(ob); - return ob; + return Py_NewRef(ob); } static PyObject * @@ -1106,8 +1103,7 @@ O_set(void *ptr, PyObject *value, Py_ssize_t size) { /* Hm, does the memory block need it's own refcount or not? */ *(PyObject **)ptr = value; - Py_INCREF(value); - return value; + return Py_NewRef(value); } @@ -1233,8 +1229,7 @@ U_set(void *ptr, PyObject *value, Py_ssize_t length) return NULL; } - Py_INCREF(value); - return value; + return Py_NewRef(value); } @@ -1292,13 +1287,11 @@ z_set(void *ptr, PyObject *value, Py_ssize_t size) { if (value == Py_None) { *(char **)ptr = NULL; - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (PyBytes_Check(value)) { *(const char **)ptr = PyBytes_AsString(value); - Py_INCREF(value); - return value; + return Py_NewRef(value); } else if (PyLong_Check(value)) { #if SIZEOF_VOID_P == SIZEOF_LONG_LONG *(char **)ptr = (char *)PyLong_AsUnsignedLongLongMask(value); @@ -1334,8 +1327,7 @@ Z_set(void *ptr, PyObject *value, Py_ssize_t size) if (value == Py_None) { *(wchar_t **)ptr = NULL; - Py_INCREF(value); - return value; + return Py_NewRef(value); } if (PyLong_Check(value)) { #if SIZEOF_VOID_P == SIZEOF_LONG_LONG diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index 88eb9f59922a04..a7029b6e6da2b8 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -26,6 +26,12 @@ #endif #endif +#if defined(__has_builtin) +#if __has_builtin(__builtin_available) +#define HAVE_BUILTIN_AVAILABLE 1 +#endif +#endif + typedef struct tagPyCArgObject PyCArgObject; typedef struct tagCDataObject CDataObject; typedef PyObject *(* GETFUNC)(void *, Py_ssize_t size); diff --git a/Modules/_ctypes/malloc_closure.c b/Modules/_ctypes/malloc_closure.c index d47153f1d7f3e8..3a859322772ba7 100644 --- a/Modules/_ctypes/malloc_closure.c +++ b/Modules/_ctypes/malloc_closure.c @@ -23,6 +23,7 @@ /* #define MALLOC_CLOSURE_DEBUG */ /* enable for some debugging output */ + /******************************************************************/ typedef union _tagITEM { @@ -96,7 +97,11 @@ void Py_ffi_closure_free(void *p) { #ifdef HAVE_FFI_CLOSURE_ALLOC #ifdef USING_APPLE_OS_LIBFFI +# ifdef HAVE_BUILTIN_AVAILABLE if (__builtin_available(macos 10.15, ios 13, watchos 6, tvos 13, *)) { +# else + if (ffi_closure_free != NULL) { +# endif #endif ffi_closure_free(p); return; @@ -114,7 +119,11 @@ void *Py_ffi_closure_alloc(size_t size, void** codeloc) { #ifdef HAVE_FFI_CLOSURE_ALLOC #ifdef USING_APPLE_OS_LIBFFI +# ifdef HAVE_BUILTIN_AVAILABLE if (__builtin_available(macos 10.15, ios 13, watchos 6, tvos 13, *)) { +# else + if (ffi_closure_alloc != NULL) { +# endif #endif return ffi_closure_alloc(size, codeloc); #ifdef USING_APPLE_OS_LIBFFI diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c index a819ce910d4b52..099331ca8bdb9c 100644 --- a/Modules/_ctypes/stgdict.c +++ b/Modules/_ctypes/stgdict.c @@ -1,7 +1,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" // windows.h must be included before pycore internal headers @@ -268,8 +267,7 @@ MakeFields(PyObject *type, CFieldObject *descr, new_descr->size = fdescr->size; new_descr->offset = fdescr->offset + offset; new_descr->index = fdescr->index + index; - new_descr->proto = fdescr->proto; - Py_XINCREF(new_descr->proto); + new_descr->proto = Py_XNewRef(fdescr->proto); new_descr->getfunc = fdescr->getfunc; new_descr->setfunc = fdescr->setfunc; @@ -291,12 +289,11 @@ MakeFields(PyObject *type, CFieldObject *descr, static int MakeAnonFields(PyObject *type) { - _Py_IDENTIFIER(_anonymous_); PyObject *anon; PyObject *anon_names; Py_ssize_t i; - if (_PyObject_LookupAttrId(type, &PyId__anonymous_, &anon) < 0) { + if (_PyObject_LookupAttr(type, &_Py_ID(_anonymous_), &anon) < 0) { return -1; } if (anon == NULL) { @@ -347,9 +344,6 @@ MakeAnonFields(PyObject *type) int PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct) { - _Py_IDENTIFIER(_swappedbytes_); - _Py_IDENTIFIER(_use_broken_old_ctypes_structure_semantics_); - _Py_IDENTIFIER(_pack_); StgDictObject *stgdict, *basedict; Py_ssize_t len, offset, size, align, i; Py_ssize_t union_size, total_align; @@ -362,18 +356,10 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct int big_endian; int arrays_seen = 0; - /* HACK Alert: I cannot be bothered to fix ctypes.com, so there has to - be a way to use the old, broken semantics: _fields_ are not extended - but replaced in subclasses. - - XXX Remove this in ctypes 1.0! - */ - int use_broken_old_ctypes_semantics; - if (fields == NULL) return 0; - if (_PyObject_LookupAttrId(type, &PyId__swappedbytes_, &tmp) < 0) { + if (_PyObject_LookupAttr(type, &_Py_ID(_swappedbytes_), &tmp) < 0) { return -1; } if (tmp) { @@ -384,20 +370,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct big_endian = PY_BIG_ENDIAN; } - if (_PyObject_LookupAttrId(type, - &PyId__use_broken_old_ctypes_structure_semantics_, &tmp) < 0) - { - return -1; - } - if (tmp) { - Py_DECREF(tmp); - use_broken_old_ctypes_semantics = 1; - } - else { - use_broken_old_ctypes_semantics = 0; - } - - if (_PyObject_LookupAttrId(type, &PyId__pack_, &tmp) < 0) { + if (_PyObject_LookupAttr(type, &_Py_ID(_pack_), &tmp) < 0) { return -1; } if (tmp) { @@ -430,8 +403,11 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct } stgdict = PyType_stgdict(type); - if (!stgdict) + if (!stgdict) { + PyErr_SetString(PyExc_TypeError, + "ctypes state is not initialized"); return -1; + } /* If this structure/union is already marked final we cannot assign _fields_ anymore. */ @@ -457,7 +433,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct if (!isStruct) { stgdict->flags |= TYPEFLAG_HASUNION; } - if (basedict && !use_broken_old_ctypes_semantics) { + if (basedict) { size = offset = basedict->size; align = basedict->align; union_size = 0; diff --git a/Modules/_curses_panel.c b/Modules/_curses_panel.c index 0b328f9665a0a2..2144345de01ba3 100644 --- a/Modules/_curses_panel.c +++ b/Modules/_curses_panel.c @@ -261,8 +261,7 @@ PyCursesPanel_New(_curses_panel_state *state, PANEL *pan, Py_DECREF(po); return NULL; } - po->wo = wo; - Py_INCREF(wo); + po->wo = (PyCursesWindowObject*)Py_NewRef(wo); return (PyObject *)po; } @@ -313,8 +312,7 @@ _curses_panel_panel_above_impl(PyCursesPanelObject *self) "panel_above: can't find Panel Object"); return NULL; } - Py_INCREF(po); - return (PyObject *)po; + return Py_NewRef(po); } /* panel_below(NULL) returns the top panel in the stack. To get @@ -344,8 +342,7 @@ _curses_panel_panel_below_impl(PyCursesPanelObject *self) "panel_below: can't find Panel Object"); return NULL; } - Py_INCREF(po); - return (PyObject *)po; + return Py_NewRef(po); } /*[clinic input] @@ -394,8 +391,7 @@ static PyObject * _curses_panel_panel_window_impl(PyCursesPanelObject *self) /*[clinic end generated code: output=5f05940d4106b4cb input=6067353d2c307901]*/ { - Py_INCREF(self->wo); - return (PyObject *)self->wo; + return Py_NewRef(self->wo); } /*[clinic input] @@ -428,8 +424,7 @@ _curses_panel_panel_replace_impl(PyCursesPanelObject *self, PyErr_SetString(state->PyCursesError, "replace_panel() returned ERR"); return NULL; } - Py_INCREF(win); - Py_SETREF(po->wo, win); + Py_SETREF(po->wo, (PyCursesWindowObject*)Py_NewRef(win)); Py_RETURN_NONE; } @@ -486,8 +481,7 @@ _curses_panel_panel_userptr_impl(PyCursesPanelObject *self, return NULL; } - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } @@ -555,8 +549,7 @@ _curses_panel_bottom_panel_impl(PyObject *module) "panel_above: can't find Panel Object"); return NULL; } - Py_INCREF(po); - return (PyObject *)po; + return Py_NewRef(po); } /*[clinic input] @@ -614,8 +607,7 @@ _curses_panel_top_panel_impl(PyObject *module) "panel_below: can't find Panel Object"); return NULL; } - Py_INCREF(po); - return (PyObject *)po; + return Py_NewRef(po); } /*[clinic input] @@ -670,8 +662,7 @@ _curses_panel_exec(PyObject *mod) state->PyCursesError = PyErr_NewException( "_curses_panel.error", NULL, NULL); - Py_INCREF(state->PyCursesError); - if (PyModule_AddObject(mod, "error", state->PyCursesError) < 0) { + if (PyModule_AddObject(mod, "error", Py_NewRef(state->PyCursesError)) < 0) { Py_DECREF(state->PyCursesError); return -1; } diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c index c10b2b302c6024..5146b4004a141a 100644 --- a/Modules/_cursesmodule.c +++ b/Modules/_cursesmodule.c @@ -103,7 +103,6 @@ static const char PyCursesVersion[] = "2.2"; #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #define PY_SSIZE_T_CLEAN @@ -390,8 +389,7 @@ PyCurses_ConvertToString(PyCursesWindowObject *win, PyObject *obj, #endif } else if (PyBytes_Check(obj)) { - Py_INCREF(obj); - *bytes = obj; + *bytes = Py_NewRef(obj); /* check for embedded null bytes */ if (PyBytes_AsStringAndSize(*bytes, &str, NULL) < 0) { Py_DECREF(obj); @@ -2177,12 +2175,11 @@ _curses_window_putwin(PyCursesWindowObject *self, PyObject *file) while (1) { char buf[BUFSIZ]; Py_ssize_t n = fread(buf, 1, BUFSIZ, fp); - _Py_IDENTIFIER(write); if (n <= 0) break; Py_DECREF(res); - res = _PyObject_CallMethodId(file, &PyId_write, "y#", buf, n); + res = PyObject_CallMethod(file, "write", "y#", buf, n); if (res == NULL) break; } @@ -3051,7 +3048,6 @@ _curses_getwin(PyObject *module, PyObject *file) PyObject *data; size_t datalen; WINDOW *win; - _Py_IDENTIFIER(read); PyObject *res = NULL; PyCursesInitialised; @@ -3063,7 +3059,7 @@ _curses_getwin(PyObject *module, PyObject *file) if (_Py_set_inheritable(fileno(fp), 0, NULL) < 0) goto error; - data = _PyObject_CallMethodIdNoArgs(file, &PyId_read); + data = PyObject_CallMethod(file, "read", NULL); if (data == NULL) goto error; if (!PyBytes_Check(data)) { @@ -3962,8 +3958,6 @@ update_lines_cols(void) { PyObject *o; PyObject *m = PyImport_ImportModule("curses"); - _Py_IDENTIFIER(LINES); - _Py_IDENTIFIER(COLS); if (!m) return 0; @@ -3973,13 +3967,12 @@ update_lines_cols(void) Py_DECREF(m); return 0; } - if (_PyObject_SetAttrId(m, &PyId_LINES, o)) { + if (PyObject_SetAttrString(m, "LINES", o)) { Py_DECREF(m); Py_DECREF(o); return 0; } - /* PyId_LINES.object will be initialized here. */ - if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_LINES), o)) { + if (PyDict_SetItemString(ModDict, "LINES", o)) { Py_DECREF(m); Py_DECREF(o); return 0; @@ -3990,12 +3983,12 @@ update_lines_cols(void) Py_DECREF(m); return 0; } - if (_PyObject_SetAttrId(m, &PyId_COLS, o)) { + if (PyObject_SetAttrString(m, "COLS", o)) { Py_DECREF(m); Py_DECREF(o); return 0; } - if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_COLS), o)) { + if (PyDict_SetItemString(ModDict, "COLS", o)) { Py_DECREF(m); Py_DECREF(o); return 0; diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index d86418af0dc1a8..eda8c5610ba659 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -10,7 +10,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" #include "pycore_long.h" // _PyLong_GetOne() @@ -141,10 +140,6 @@ static PyTypeObject PyDateTime_TimeZoneType; static int check_tzinfo_subclass(PyObject *p); -_Py_IDENTIFIER(as_integer_ratio); -_Py_IDENTIFIER(fromutc); -_Py_IDENTIFIER(isoformat); -_Py_IDENTIFIER(strftime); /* --------------------------------------------------------------------------- * Math utilities. @@ -194,8 +189,7 @@ divide_nearest(PyObject *m, PyObject *n) temp = _PyLong_DivmodNear(m, n); if (temp == NULL) return NULL; - result = PyTuple_GET_ITEM(temp, 0); - Py_INCREF(result); + result = Py_NewRef(PyTuple_GET_ITEM(temp, 0)); Py_DECREF(temp); return result; @@ -1010,8 +1004,7 @@ new_datetime_ex2(int year, int month, int day, int hour, int minute, DATE_SET_SECOND(self, second); DATE_SET_MICROSECOND(self, usecond); if (aware) { - Py_INCREF(tzinfo); - self->tzinfo = tzinfo; + self->tzinfo = Py_NewRef(tzinfo); } DATE_SET_FOLD(self, fold); } @@ -1088,8 +1081,7 @@ new_time_ex2(int hour, int minute, int second, int usecond, TIME_SET_SECOND(self, second); TIME_SET_MICROSECOND(self, usecond); if (aware) { - Py_INCREF(tzinfo); - self->tzinfo = tzinfo; + self->tzinfo = Py_NewRef(tzinfo); } TIME_SET_FOLD(self, fold); } @@ -1170,10 +1162,8 @@ create_timezone(PyObject *offset, PyObject *name) if (self == NULL) { return NULL; } - Py_INCREF(offset); - self->offset = offset; - Py_XINCREF(name); - self->name = name; + self->offset = Py_NewRef(offset); + self->name = Py_XNewRef(name); return (PyObject *)self; } @@ -1187,8 +1177,7 @@ new_timezone(PyObject *offset, PyObject *name) assert(name == NULL || PyUnicode_Check(name)); if (name == NULL && delta_bool((PyDateTime_Delta *)offset) == 0) { - Py_INCREF(PyDateTime_TimeZone_UTC); - return PyDateTime_TimeZone_UTC; + return Py_NewRef(PyDateTime_TimeZone_UTC); } if ((GET_TD_DAYS(offset) == -1 && GET_TD_SECONDS(offset) == 0 && @@ -1323,8 +1312,6 @@ static PyObject * call_tzname(PyObject *tzinfo, PyObject *tzinfoarg) { PyObject *result; - _Py_IDENTIFIER(tzname); - assert(tzinfo != NULL); assert(check_tzinfo_subclass(tzinfo) >= 0); assert(tzinfoarg != NULL); @@ -1332,7 +1319,7 @@ call_tzname(PyObject *tzinfo, PyObject *tzinfoarg) if (tzinfo == Py_None) Py_RETURN_NONE; - result = _PyObject_CallMethodIdOneArg(tzinfo, &PyId_tzname, tzinfoarg); + result = PyObject_CallMethodOneArg(tzinfo, &_Py_ID(tzname), tzinfoarg); if (result == NULL || result == Py_None) return result; @@ -1341,8 +1328,7 @@ call_tzname(PyObject *tzinfo, PyObject *tzinfoarg) PyErr_Format(PyExc_TypeError, "tzinfo.tzname() must " "return None or a string, not '%s'", Py_TYPE(result)->tp_name); - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); } return result; @@ -1404,8 +1390,7 @@ tzinfo_from_isoformat_results(int rv, int tzoffset, int tz_useconds) if (rv == 1) { // Create a timezone from offset in seconds (0 returns UTC) if (tzoffset == 0) { - Py_INCREF(PyDateTime_TimeZone_UTC); - return PyDateTime_TimeZone_UTC; + return Py_NewRef(PyDateTime_TimeZone_UTC); } PyObject *delta = new_delta(0, tzoffset, tz_useconds, 1); @@ -1416,8 +1401,7 @@ tzinfo_from_isoformat_results(int rv, int tzoffset, int tz_useconds) Py_DECREF(delta); } else { - tzinfo = Py_None; - Py_INCREF(Py_None); + tzinfo = Py_NewRef(Py_None); } return tzinfo; @@ -1515,7 +1499,7 @@ make_somezreplacement(PyObject *object, char *sep, PyObject *tzinfoarg) if (tzinfo == Py_None || tzinfo == NULL) { return PyBytes_FromStringAndSize(NULL, 0); } - + assert(tzinfoarg != NULL); if (format_utcoffset(buf, sizeof(buf), @@ -1523,7 +1507,7 @@ make_somezreplacement(PyObject *object, char *sep, PyObject *tzinfoarg) tzinfo, tzinfoarg) < 0) return NULL; - + return PyBytes_FromStringAndSize(buf, strlen(buf)); } @@ -1533,7 +1517,6 @@ make_Zreplacement(PyObject *object, PyObject *tzinfoarg) PyObject *temp; PyObject *tzinfo = get_tzinfo_member(object); PyObject *Zreplacement = PyUnicode_FromStringAndSize(NULL, 0); - _Py_IDENTIFIER(replace); if (Zreplacement == NULL) return NULL; @@ -1555,7 +1538,7 @@ make_Zreplacement(PyObject *object, PyObject *tzinfoarg) * strftime doesn't treat them as format codes. */ Py_DECREF(Zreplacement); - Zreplacement = _PyObject_CallMethodId(temp, &PyId_replace, "ss", "%", "%%"); + Zreplacement = PyObject_CallMethod(temp, "replace", "ss", "%", "%%"); Py_DECREF(temp); if (Zreplacement == NULL) return NULL; @@ -1865,8 +1848,7 @@ delta_to_microseconds(PyDateTime_Delta *self) x2 = PyNumber_Multiply(x1, seconds_per_day); /* days in seconds */ if (x2 == NULL) goto Done; - Py_DECREF(x1); - x1 = NULL; + Py_SETREF(x1, NULL); /* x2 has days in seconds */ x1 = PyLong_FromLong(GET_TD_SECONDS(self)); /* seconds */ @@ -1883,8 +1865,7 @@ delta_to_microseconds(PyDateTime_Delta *self) x1 = PyNumber_Multiply(x3, us_per_second); /* us */ if (x1 == NULL) goto Done; - Py_DECREF(x3); - x3 = NULL; + Py_SETREF(x3, NULL); /* x1 has days+seconds in us */ x2 = PyLong_FromLong(GET_TD_MICROSECONDS(self)); @@ -1951,8 +1932,7 @@ microseconds_to_delta_ex(PyObject *pyus, PyTypeObject *type) goto BadDivmod; } - num = PyTuple_GET_ITEM(tuple, 0); /* leftover seconds */ - Py_INCREF(num); + num = Py_NewRef(PyTuple_GET_ITEM(tuple, 0)); /* leftover seconds */ Py_DECREF(tuple); tuple = checked_divmod(num, seconds_per_day); @@ -1970,8 +1950,7 @@ microseconds_to_delta_ex(PyObject *pyus, PyTypeObject *type) goto BadDivmod; } - num = PyTuple_GET_ITEM(tuple, 0); /* leftover days */ - Py_INCREF(num); + num = Py_NewRef(PyTuple_GET_ITEM(tuple, 0)); /* leftover days */ d = _PyLong_AsInt(num); if (d == -1 && PyErr_Occurred()) { goto Done; @@ -2019,7 +1998,7 @@ get_float_as_integer_ratio(PyObject *floatobj) PyObject *ratio; assert(floatobj && PyFloat_Check(floatobj)); - ratio = _PyObject_CallMethodIdNoArgs(floatobj, &PyId_as_integer_ratio); + ratio = PyObject_CallMethodNoArgs(floatobj, &_Py_ID(as_integer_ratio)); if (ratio == NULL) { return NULL; } @@ -2056,8 +2035,7 @@ multiply_truedivide_timedelta_float(PyDateTime_Delta *delta, PyObject *floatobj, goto error; } temp = PyNumber_Multiply(pyus_in, PyTuple_GET_ITEM(ratio, op)); - Py_DECREF(pyus_in); - pyus_in = NULL; + Py_SETREF(pyus_in, NULL); if (temp == NULL) goto error; pyus_out = divide_nearest(temp, PyTuple_GET_ITEM(ratio, !op)); @@ -2982,8 +2960,6 @@ date_today(PyObject *cls, PyObject *dummy) { PyObject *time; PyObject *result; - _Py_IDENTIFIER(fromtimestamp); - time = time_time(); if (time == NULL) return NULL; @@ -2994,7 +2970,7 @@ date_today(PyObject *cls, PyObject *dummy) * time.time() delivers; if someone were gonzo about optimization, * date.today() could get away with plain C time(). */ - result = _PyObject_CallMethodIdOneArg(cls, &PyId_fromtimestamp, time); + result = PyObject_CallMethodOneArg(cls, &_Py_ID(fromtimestamp), time); Py_DECREF(time); return result; } @@ -3245,7 +3221,7 @@ date_isoformat(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored)) static PyObject * date_str(PyDateTime_Date *self) { - return _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_isoformat); + return PyObject_CallMethodNoArgs((PyObject *)self, &_Py_ID(isoformat)); } @@ -3264,14 +3240,13 @@ date_strftime(PyDateTime_Date *self, PyObject *args, PyObject *kw) PyObject *result; PyObject *tuple; PyObject *format; - _Py_IDENTIFIER(timetuple); static char *keywords[] = {"format", NULL}; if (! PyArg_ParseTupleAndKeywords(args, kw, "U:strftime", keywords, &format)) return NULL; - tuple = _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_timetuple); + tuple = PyObject_CallMethodNoArgs((PyObject *)self, &_Py_ID(timetuple)); if (tuple == NULL) return NULL; result = wrap_strftime((PyObject *)self, format, tuple, @@ -3292,7 +3267,7 @@ date_format(PyDateTime_Date *self, PyObject *args) if (PyUnicode_GetLength(format) == 0) return PyObject_Str((PyObject *)self); - return _PyObject_CallMethodIdOneArg((PyObject *)self, &PyId_strftime, + return PyObject_CallMethodOneArg((PyObject *)self, &_Py_ID(strftime), format); } @@ -3357,8 +3332,7 @@ iso_calendar_date_year(PyDateTime_IsoCalendarDate *self, void *unused) if (year == NULL) { return NULL; } - Py_INCREF(year); - return year; + return Py_NewRef(year); } static PyObject * @@ -3368,8 +3342,7 @@ iso_calendar_date_week(PyDateTime_IsoCalendarDate *self, void *unused) if (week == NULL) { return NULL; } - Py_INCREF(week); - return week; + return Py_NewRef(week); } static PyObject * @@ -3379,8 +3352,7 @@ iso_calendar_date_weekday(PyDateTime_IsoCalendarDate *self, void *unused) if (weekday == NULL) { return NULL; } - Py_INCREF(weekday); - return weekday; + return Py_NewRef(weekday); } static PyGetSetDef iso_calendar_date_getset[] = { @@ -3821,9 +3793,8 @@ tzinfo_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *args, *state; PyObject *getinitargs; - _Py_IDENTIFIER(__getinitargs__); - if (_PyObject_LookupAttrId(self, &PyId___getinitargs__, &getinitargs) < 0) { + if (_PyObject_LookupAttr(self, &_Py_ID(__getinitargs__), &getinitargs) < 0) { return NULL; } if (getinitargs != NULL) { @@ -3992,8 +3963,7 @@ timezone_str(PyDateTime_TimeZone *self) char sign; if (self->name != NULL) { - Py_INCREF(self->name); - return self->name; + return Py_NewRef(self->name); } if ((PyObject *)self == PyDateTime_TimeZone_UTC || (GET_TD_DAYS(self->offset) == 0 && @@ -4009,8 +3979,7 @@ timezone_str(PyDateTime_TimeZone *self) } else { sign = '+'; - offset = self->offset; - Py_INCREF(offset); + offset = Py_NewRef(self->offset); } /* Offset is not negative here. */ microseconds = GET_TD_MICROSECONDS(offset); @@ -4045,8 +4014,7 @@ timezone_utcoffset(PyDateTime_TimeZone *self, PyObject *dt) if (_timezone_check_argument(dt, "utcoffset") == -1) return NULL; - Py_INCREF(self->offset); - return self->offset; + return Py_NewRef(self->offset); } static PyObject * @@ -4183,8 +4151,7 @@ static PyObject * time_tzinfo(PyDateTime_Time *self, void *unused) { PyObject *result = HASTZINFO(self) ? self->tzinfo : Py_None; - Py_INCREF(result); - return result; + return Py_NewRef(result); } static PyObject * @@ -4229,8 +4196,7 @@ time_from_pickle(PyTypeObject *type, PyObject *state, PyObject *tzinfo) me->hashcode = -1; me->hastzinfo = aware; if (aware) { - Py_INCREF(tzinfo); - me->tzinfo = tzinfo; + me->tzinfo = Py_NewRef(tzinfo); } if (pdata[0] & (1 << 7)) { me->data[0] -= 128; @@ -4368,7 +4334,7 @@ time_repr(PyDateTime_Time *self) static PyObject * time_str(PyDateTime_Time *self) { - return _PyObject_CallMethodIdNoArgs((PyObject *)self, &PyId_isoformat); + return PyObject_CallMethodNoArgs((PyObject *)self, &_Py_ID(isoformat)); } static PyObject * @@ -4526,12 +4492,10 @@ time_richcompare(PyObject *self, PyObject *other, int op) result = diff_to_bool(diff, op); } else if (op == Py_EQ) { - result = Py_False; - Py_INCREF(result); + result = Py_NewRef(Py_False); } else if (op == Py_NE) { - result = Py_True; - Py_INCREF(result); + result = Py_NewRef(Py_True); } else { PyErr_SetString(PyExc_TypeError, @@ -4560,8 +4524,7 @@ time_hash(PyDateTime_Time *self) return -1; } else { - self0 = (PyObject *)self; - Py_INCREF(self0); + self0 = Py_NewRef(self); } offset = time_utcoffset(self0, NULL); Py_DECREF(self0); @@ -4858,8 +4821,7 @@ static PyObject * datetime_tzinfo(PyDateTime_DateTime *self, void *unused) { PyObject *result = HASTZINFO(self) ? self->tzinfo : Py_None; - Py_INCREF(result); - return result; + return Py_NewRef(result); } static PyObject * @@ -4906,8 +4868,7 @@ datetime_from_pickle(PyTypeObject *type, PyObject *state, PyObject *tzinfo) me->hashcode = -1; me->hastzinfo = aware; if (aware) { - Py_INCREF(tzinfo); - me->tzinfo = tzinfo; + me->tzinfo = Py_NewRef(tzinfo); } if (pdata[2] & (1 << 7)) { me->data[2] -= 128; @@ -5170,7 +5131,9 @@ datetime_datetime_now_impl(PyTypeObject *type, PyObject *tz) tz); if (self != NULL && tz != Py_None) { /* Convert UTC to tzinfo's zone. */ - self = _PyObject_CallMethodId(tz, &PyId_fromutc, "N", self); + PyObject *res = PyObject_CallMethodOneArg(tz, &_Py_ID(fromutc), self); + Py_DECREF(self); + return res; } return self; } @@ -5206,7 +5169,9 @@ datetime_fromtimestamp(PyObject *cls, PyObject *args, PyObject *kw) tzinfo); if (self != NULL && tzinfo != Py_None) { /* Convert UTC to tzinfo's zone. */ - self = _PyObject_CallMethodId(tzinfo, &PyId_fromutc, "N", self); + PyObject *res = PyObject_CallMethodOneArg(tzinfo, &_Py_ID(fromutc), self); + Py_DECREF(self); + return res; } return self; } @@ -5230,7 +5195,6 @@ datetime_strptime(PyObject *cls, PyObject *args) { static PyObject *module = NULL; PyObject *string, *format; - _Py_IDENTIFIER(_strptime_datetime); if (!PyArg_ParseTuple(args, "UU:strptime", &string, &format)) return NULL; @@ -5240,7 +5204,7 @@ datetime_strptime(PyObject *cls, PyObject *args) if (module == NULL) return NULL; } - return _PyObject_CallMethodIdObjArgs(module, &PyId__strptime_datetime, + return PyObject_CallMethodObjArgs(module, &_Py_ID(_strptime_datetime), cls, string, format, NULL); } @@ -5316,8 +5280,7 @@ _sanitize_isoformat_str(PyObject *dtstr) } if (surrogate_separator == 0) { - Py_INCREF(dtstr); - return dtstr; + return Py_NewRef(dtstr); } PyObject *str_out = _PyUnicode_Copy(dtstr); @@ -5631,9 +5594,8 @@ datetime_subtract(PyObject *left, PyObject *right) int delta_d, delta_s, delta_us; if (GET_DT_TZINFO(left) == GET_DT_TZINFO(right)) { - offset2 = offset1 = Py_None; - Py_INCREF(offset1); - Py_INCREF(offset2); + offset1 = Py_NewRef(Py_None); + offset2 = Py_NewRef(Py_None); } else { offset1 = datetime_utcoffset(left, NULL); @@ -5747,7 +5709,14 @@ datetime_repr(PyDateTime_DateTime *self) static PyObject * datetime_str(PyDateTime_DateTime *self) { - return _PyObject_CallMethodId((PyObject *)self, &PyId_isoformat, "s", " "); + PyObject *space = PyUnicode_FromString(" "); + if (space == NULL) { + return NULL; + } + PyObject *res = PyObject_CallMethodOneArg((PyObject *)self, + &_Py_ID(isoformat), space); + Py_DECREF(space); + return res; } static PyObject * @@ -5971,12 +5940,10 @@ datetime_richcompare(PyObject *self, PyObject *other, int op) result = diff_to_bool(diff, op); } else if (op == Py_EQ) { - result = Py_False; - Py_INCREF(result); + result = Py_NewRef(Py_False); } else if (op == Py_NE) { - result = Py_True; - Py_INCREF(result); + result = Py_NewRef(Py_True); } else { PyErr_SetString(PyExc_TypeError, @@ -6008,8 +5975,7 @@ datetime_hash(PyDateTime_DateTime *self) return -1; } else { - self0 = (PyObject *)self; - Py_INCREF(self0); + self0 = Py_NewRef(self); } offset = datetime_utcoffset(self0, NULL); Py_DECREF(self0); @@ -6226,15 +6192,13 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) if (self_tzinfo == NULL) return NULL; } else { - self_tzinfo = self->tzinfo; - Py_INCREF(self_tzinfo); + self_tzinfo = Py_NewRef(self->tzinfo); } /* Conversion to self's own time zone is a NOP. */ if (self_tzinfo == tzinfo) { Py_DECREF(self_tzinfo); - Py_INCREF(self); - return self; + return (PyDateTime_DateTime*)Py_NewRef(self); } /* Convert self to UTC. */ @@ -6279,14 +6243,10 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) } else { /* Result is already aware - just replace tzinfo. */ - temp = result->tzinfo; - result->tzinfo = PyDateTime_TimeZone_UTC; - Py_INCREF(result->tzinfo); - Py_DECREF(temp); + Py_SETREF(result->tzinfo, Py_NewRef(PyDateTime_TimeZone_UTC)); } /* Attach new tzinfo and let fromutc() do the rest. */ - temp = result->tzinfo; if (tzinfo == Py_None) { tzinfo = local_timezone(result); if (tzinfo == NULL) { @@ -6296,12 +6256,11 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) } else Py_INCREF(tzinfo); - result->tzinfo = tzinfo; - Py_DECREF(temp); + Py_SETREF(result->tzinfo, tzinfo); temp = (PyObject *)result; result = (PyDateTime_DateTime *) - _PyObject_CallMethodIdOneArg(tzinfo, &PyId_fromutc, temp); + PyObject_CallMethodOneArg(tzinfo, &_Py_ID(fromutc), temp); Py_DECREF(temp); return result; @@ -6451,8 +6410,7 @@ datetime_utctimetuple(PyDateTime_DateTime *self, PyObject *Py_UNUSED(ignored)) tzinfo = GET_DT_TZINFO(self); if (tzinfo == Py_None) { - utcself = self; - Py_INCREF(utcself); + utcself = (PyDateTime_DateTime*)Py_NewRef(self); } else { PyObject *offset; @@ -6461,8 +6419,7 @@ datetime_utctimetuple(PyDateTime_DateTime *self, PyObject *Py_UNUSED(ignored)) return NULL; if (offset == Py_None) { Py_DECREF(offset); - utcself = self; - Py_INCREF(utcself); + utcself = (PyDateTime_DateTime*)Py_NewRef(self); } else { utcself = (PyDateTime_DateTime *)add_datetime_timedelta(self, diff --git a/Modules/_dbmmodule.c b/Modules/_dbmmodule.c index 9c83e380afd0f3..54376022dcb182 100644 --- a/Modules/_dbmmodule.c +++ b/Modules/_dbmmodule.c @@ -358,8 +358,7 @@ _dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, return PyBytes_FromStringAndSize(val.dptr, val.dsize); } - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } /*[clinic input] @@ -419,8 +418,7 @@ _dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, static PyObject * dbm__enter__(PyObject *self, PyObject *args) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 548bd601b0eb87..2d6e4e4d62b42c 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -116,15 +116,13 @@ static PyTypeObject PyDecContextManager_Type; Py_LOCAL_INLINE(PyObject *) incr_true(void) { - Py_INCREF(Py_True); - return Py_True; + return Py_NewRef(Py_True); } Py_LOCAL_INLINE(PyObject *) incr_false(void) { - Py_INCREF(Py_False); - return Py_False; + return Py_NewRef(Py_False); } @@ -655,8 +653,7 @@ signaldict_richcompare(PyObject *v, PyObject *w, int op) } } - Py_INCREF(res); - return res; + return Py_NewRef(res); } static PyObject * @@ -754,8 +751,7 @@ context_getround(PyObject *self, void *closure UNUSED) { int i = mpd_getround(CTX(self)); - Py_INCREF(round_map[i]); - return round_map[i]; + return Py_NewRef(round_map[i]); } static PyObject * @@ -1122,13 +1118,11 @@ context_getattr(PyObject *self, PyObject *name) if (PyUnicode_Check(name)) { if (PyUnicode_CompareWithASCIIString(name, "traps") == 0) { retval = ((PyDecContextObject *)self)->traps; - Py_INCREF(retval); - return retval; + return Py_NewRef(retval); } if (PyUnicode_CompareWithASCIIString(name, "flags") == 0) { retval = ((PyDecContextObject *)self)->flags; - Py_INCREF(retval); - return retval; + return Py_NewRef(retval); } } @@ -1602,8 +1596,7 @@ PyDec_GetCurrentContext(PyObject *self UNUSED, PyObject *args UNUSED) return NULL; } - Py_INCREF(context); - return context; + return Py_NewRef(context); } /* Set the thread local context to a new context, decrement old reference */ @@ -1778,8 +1771,7 @@ ctxmanager_new(PyTypeObject *type UNUSED, PyObject *args, PyObject *kwds) Py_DECREF(self); return NULL; } - self->global = global; - Py_INCREF(self->global); + self->global = Py_NewRef(global); int ret = context_setattrs( self->local, prec, rounding, @@ -1814,8 +1806,7 @@ ctxmanager_set_local(PyDecContextManagerObject *self, PyObject *args UNUSED) } Py_DECREF(ret); - Py_INCREF(self->local); - return self->local; + return Py_NewRef(self->local); } static PyObject * @@ -2418,8 +2409,7 @@ PyDecType_FromDecimalExact(PyTypeObject *type, PyObject *v, PyObject *context) uint32_t status = 0; if (type == &PyDec_Type && PyDec_CheckExact(v)) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } dec = PyDecType_New(type); @@ -2440,8 +2430,7 @@ static PyObject * sequence_as_tuple(PyObject *v, PyObject *ex, const char *mesg) { if (PyTuple_Check(v)) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (PyList_Check(v)) { return PyList_AsTuple(v); @@ -2863,8 +2852,7 @@ convert_op(int type_err, PyObject **conv, PyObject *v, PyObject *context) { if (PyDec_Check(v)) { - *conv = v; - Py_INCREF(v); + *conv = Py_NewRef(v); return 1; } if (PyLong_Check(v)) { @@ -2881,8 +2869,7 @@ convert_op(int type_err, PyObject **conv, PyObject *v, PyObject *context) Py_TYPE(v)->tp_name); } else { - Py_INCREF(Py_NotImplemented); - *conv = Py_NotImplemented; + *conv = Py_NewRef(Py_NotImplemented); } return 0; } @@ -3041,8 +3028,7 @@ convert_op_cmp(PyObject **vcmp, PyObject **wcmp, PyObject *v, PyObject *w, *vcmp = v; if (PyDec_Check(w)) { - Py_INCREF(w); - *wcmp = w; + *wcmp = Py_NewRef(w); } else if (PyLong_Check(w)) { *wcmp = PyDec_FromLongExact(w, context); @@ -3074,8 +3060,7 @@ convert_op_cmp(PyObject **vcmp, PyObject **wcmp, PyObject *v, PyObject *w, } } else { - Py_INCREF(Py_NotImplemented); - *wcmp = Py_NotImplemented; + *wcmp = Py_NewRef(Py_NotImplemented); } } else { @@ -3093,8 +3078,7 @@ convert_op_cmp(PyObject **vcmp, PyObject **wcmp, PyObject *v, PyObject *w, } } else { - Py_INCREF(Py_NotImplemented); - *wcmp = Py_NotImplemented; + *wcmp = Py_NewRef(Py_NotImplemented); } } @@ -4329,15 +4313,13 @@ dec_mpd_adjexp(PyObject *self, PyObject *dummy UNUSED) static PyObject * dec_canonical(PyObject *self, PyObject *dummy UNUSED) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * dec_conjugate(PyObject *self, PyObject *dummy UNUSED) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -4654,8 +4636,7 @@ dec_complex(PyObject *self, PyObject *dummy UNUSED) static PyObject * dec_copy(PyObject *self, PyObject *dummy UNUSED) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } /* __floor__ */ @@ -4838,8 +4819,7 @@ dec_trunc(PyObject *self, PyObject *dummy UNUSED) static PyObject * dec_real(PyObject *self, void *closure UNUSED) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -5384,8 +5364,7 @@ ctx_canonical(PyObject *context UNUSED, PyObject *v) return NULL; } - Py_INCREF(v); - return v; + return Py_NewRef(v); } static PyObject * @@ -5916,23 +5895,17 @@ PyInit__decimal(void) /* Create the module */ ASSIGN_PTR(m, PyModule_Create(&_decimal_module)); - /* Add types to the module */ - Py_INCREF(&PyDec_Type); - CHECK_INT(PyModule_AddObject(m, "Decimal", (PyObject *)&PyDec_Type)); - Py_INCREF(&PyDecContext_Type); + CHECK_INT(PyModule_AddObject(m, "Decimal", Py_NewRef(&PyDec_Type))); CHECK_INT(PyModule_AddObject(m, "Context", - (PyObject *)&PyDecContext_Type)); - Py_INCREF(DecimalTuple); - CHECK_INT(PyModule_AddObject(m, "DecimalTuple", (PyObject *)DecimalTuple)); - + Py_NewRef(&PyDecContext_Type))); + CHECK_INT(PyModule_AddObject(m, "DecimalTuple", Py_NewRef(DecimalTuple))); /* Create top level exception */ ASSIGN_PTR(DecimalException, PyErr_NewException( "decimal.DecimalException", PyExc_ArithmeticError, NULL)); - Py_INCREF(DecimalException); - CHECK_INT(PyModule_AddObject(m, "DecimalException", DecimalException)); + CHECK_INT(PyModule_AddObject(m, "DecimalException", Py_NewRef(DecimalException))); /* Create signal tuple */ ASSIGN_PTR(SignalTuple, PyTuple_New(SIGNAL_MAP_LEN)); @@ -5972,12 +5945,10 @@ PyInit__decimal(void) Py_DECREF(base); /* add to module */ - Py_INCREF(cm->ex); - CHECK_INT(PyModule_AddObject(m, cm->name, cm->ex)); + CHECK_INT(PyModule_AddObject(m, cm->name, Py_NewRef(cm->ex))); /* add to signal tuple */ - Py_INCREF(cm->ex); - PyTuple_SET_ITEM(SignalTuple, i, cm->ex); + PyTuple_SET_ITEM(SignalTuple, i, Py_NewRef(cm->ex)); } /* @@ -6003,45 +5974,38 @@ PyInit__decimal(void) ASSIGN_PTR(cm->ex, PyErr_NewException(cm->fqname, base, NULL)); Py_DECREF(base); - Py_INCREF(cm->ex); - CHECK_INT(PyModule_AddObject(m, cm->name, cm->ex)); + CHECK_INT(PyModule_AddObject(m, cm->name, Py_NewRef(cm->ex))); } /* Init default context template first */ ASSIGN_PTR(default_context_template, PyObject_CallObject((PyObject *)&PyDecContext_Type, NULL)); - Py_INCREF(default_context_template); CHECK_INT(PyModule_AddObject(m, "DefaultContext", - default_context_template)); + Py_NewRef(default_context_template))); #ifndef WITH_DECIMAL_CONTEXTVAR ASSIGN_PTR(tls_context_key, PyUnicode_FromString("___DECIMAL_CTX__")); - Py_INCREF(Py_False); - CHECK_INT(PyModule_AddObject(m, "HAVE_CONTEXTVAR", Py_False)); + CHECK_INT(PyModule_AddObject(m, "HAVE_CONTEXTVAR", Py_NewRef(Py_False))); #else ASSIGN_PTR(current_context_var, PyContextVar_New("decimal_context", NULL)); - Py_INCREF(Py_True); - CHECK_INT(PyModule_AddObject(m, "HAVE_CONTEXTVAR", Py_True)); + CHECK_INT(PyModule_AddObject(m, "HAVE_CONTEXTVAR", Py_NewRef(Py_True))); #endif - Py_INCREF(Py_True); - CHECK_INT(PyModule_AddObject(m, "HAVE_THREADS", Py_True)); + CHECK_INT(PyModule_AddObject(m, "HAVE_THREADS", Py_NewRef(Py_True))); /* Init basic context template */ ASSIGN_PTR(basic_context_template, PyObject_CallObject((PyObject *)&PyDecContext_Type, NULL)); init_basic_context(basic_context_template); - Py_INCREF(basic_context_template); CHECK_INT(PyModule_AddObject(m, "BasicContext", - basic_context_template)); + Py_NewRef(basic_context_template))); /* Init extended context template */ ASSIGN_PTR(extended_context_template, PyObject_CallObject((PyObject *)&PyDecContext_Type, NULL)); init_extended_context(extended_context_template); - Py_INCREF(extended_context_template); CHECK_INT(PyModule_AddObject(m, "ExtendedContext", - extended_context_template)); + Py_NewRef(extended_context_template))); /* Init mpd_ssize_t constants */ @@ -6060,8 +6024,7 @@ PyInit__decimal(void) /* Init string constants */ for (i = 0; i < _PY_DEC_ROUND_GUARD; i++) { ASSIGN_PTR(round_map[i], PyUnicode_InternFromString(mpd_round_string[i])); - Py_INCREF(round_map[i]); - CHECK_INT(PyModule_AddObject(m, mpd_round_string[i], round_map[i])); + CHECK_INT(PyModule_AddObject(m, mpd_round_string[i], Py_NewRef(round_map[i]))); } /* Add specification version number */ diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index 87f18d6e671a63..0c68ede42ca61d 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -12,7 +12,6 @@ */ #define PY_SSIZE_T_CLEAN -#define NEEDS_PY_IDENTIFIER #include "Python.h" #include "structmember.h" // PyMemberDef @@ -84,6 +83,15 @@ typedef struct { PyObject *elementpath_obj; PyObject *comment_factory; PyObject *pi_factory; + /* Interned strings */ + PyObject *str_text; + PyObject *str_tail; + PyObject *str_append; + PyObject *str_find; + PyObject *str_findtext; + PyObject *str_findall; + PyObject *str_iterfind; + PyObject *str_doctype; } elementtreestate; static struct PyModuleDef elementtreemodule; @@ -221,8 +229,7 @@ create_extra(ElementObject* self, PyObject* attrib) return -1; } - Py_XINCREF(attrib); - self->extra->attrib = attrib; + self->extra->attrib = Py_XNewRef(attrib); self->extra->length = 0; self->extra->allocated = STATIC_CHILDREN; @@ -278,16 +285,9 @@ create_new_element(PyObject* tag, PyObject* attrib) if (self == NULL) return NULL; self->extra = NULL; - - Py_INCREF(tag); - self->tag = tag; - - Py_INCREF(Py_None); - self->text = Py_None; - - Py_INCREF(Py_None); - self->tail = Py_None; - + self->tag = Py_NewRef(tag); + self->text = Py_NewRef(Py_None); + self->tail = Py_NewRef(Py_None); self->weakreflist = NULL; PyObject_GC_Track(self); @@ -307,15 +307,9 @@ element_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { ElementObject *e = (ElementObject *)type->tp_alloc(type, 0); if (e != NULL) { - Py_INCREF(Py_None); - e->tag = Py_None; - - Py_INCREF(Py_None); - e->text = Py_None; - - Py_INCREF(Py_None); - e->tail = Py_None; - + e->tag = Py_NewRef(Py_None); + e->text = Py_NewRef(Py_None); + e->tail = Py_NewRef(Py_None); e->extra = NULL; e->weakreflist = NULL; } @@ -351,8 +345,7 @@ get_attrib_from_keywords(PyObject *kwds) } attrib = PyDict_Copy(attrib); if (attrib && PyDict_DelItem(kwds, attrib_str) < 0) { - Py_DECREF(attrib); - attrib = NULL; + Py_SETREF(attrib, NULL); } } else if (!PyErr_Occurred()) { @@ -417,14 +410,10 @@ element_init(PyObject *self, PyObject *args, PyObject *kwds) Py_XDECREF(attrib); /* Replace the objects already pointed to by tag, text and tail. */ - Py_INCREF(tag); - Py_XSETREF(self_elem->tag, tag); - - Py_INCREF(Py_None); - _set_joined_ptr(&self_elem->text, Py_None); + Py_XSETREF(self_elem->tag, Py_NewRef(tag)); - Py_INCREF(Py_None); - _set_joined_ptr(&self_elem->tail, Py_None); + _set_joined_ptr(&self_elem->text, Py_NewRef(Py_None)); + _set_joined_ptr(&self_elem->tail, Py_NewRef(Py_None)); return 0; } @@ -506,8 +495,7 @@ element_add_subelement(ElementObject* self, PyObject* element) if (element_resize(self, 1) < 0) return -1; - Py_INCREF(element); - self->extra->children[self->extra->length] = element; + self->extra->children[self->extra->length] = Py_NewRef(element); self->extra->length++; @@ -544,8 +532,7 @@ element_get_text(ElementObject* self) if (!tmp) return NULL; self->text = tmp; - Py_DECREF(res); - res = tmp; + Py_SETREF(res, tmp); } } @@ -566,8 +553,7 @@ element_get_tail(ElementObject* self) if (!tmp) return NULL; self->tail = tmp; - Py_DECREF(res); - res = tmp; + Py_SETREF(res, tmp); } } @@ -699,11 +685,8 @@ _elementtree_Element_clear_impl(ElementObject *self) { clear_extra(self); - Py_INCREF(Py_None); - _set_joined_ptr(&self->text, Py_None); - - Py_INCREF(Py_None); - _set_joined_ptr(&self->tail, Py_None); + _set_joined_ptr(&self->text, Py_NewRef(Py_None)); + _set_joined_ptr(&self->tail, Py_NewRef(Py_None)); Py_RETURN_NONE; } @@ -739,8 +722,7 @@ _elementtree_Element___copy___impl(ElementObject *self) } for (i = 0; i < self->extra->length; i++) { - Py_INCREF(self->extra->children[i]); - element->extra->children[i] = self->extra->children[i]; + element->extra->children[i] = Py_NewRef(self->extra->children[i]); } assert(!element->extra->length); @@ -855,8 +837,7 @@ deepcopy(PyObject *object, PyObject *memo) /* Fast paths */ if (object == Py_None || PyUnicode_CheckExact(object)) { - Py_INCREF(object); - return object; + return Py_NewRef(object); } if (Py_REFCNT(object) == 1) { @@ -942,14 +923,12 @@ _elementtree_Element___getstate___impl(ElementObject *self) if (!children) return NULL; for (i = 0; i < PyList_GET_SIZE(children); i++) { - PyObject *child = self->extra->children[i]; - Py_INCREF(child); + PyObject *child = Py_NewRef(self->extra->children[i]); PyList_SET_ITEM(children, i, child); } if (self->extra && self->extra->attrib) { - attrib = self->extra->attrib; - Py_INCREF(attrib); + attrib = Py_NewRef(self->extra->attrib); } else { attrib = PyDict_New(); @@ -983,8 +962,7 @@ element_setstate_from_attributes(ElementObject *self, return NULL; } - Py_INCREF(tag); - Py_XSETREF(self->tag, tag); + Py_XSETREF(self->tag, Py_NewRef(tag)); text = text ? JOIN_SET(text, PyList_CheckExact(text)) : Py_None; Py_INCREF(JOIN_OBJ(text)); @@ -1035,8 +1013,7 @@ element_setstate_from_attributes(ElementObject *self, dealloc_extra(oldextra); return NULL; } - Py_INCREF(child); - self->extra->children[i] = child; + self->extra->children[i] = Py_NewRef(child); } assert(!self->extra->length); @@ -1049,8 +1026,7 @@ element_setstate_from_attributes(ElementObject *self, } /* Stash attrib. */ - Py_XINCREF(attrib); - Py_XSETREF(self->extra->attrib, attrib); + Py_XSETREF(self->extra->attrib, Py_XNewRef(attrib)); dealloc_extra(oldextra); Py_RETURN_NONE; @@ -1187,8 +1163,7 @@ _elementtree_Element_extend(ElementObject *self, PyObject *elements) } for (i = 0; i < PySequence_Fast_GET_SIZE(seq); i++) { - PyObject* element = PySequence_Fast_GET_ITEM(seq, i); - Py_INCREF(element); + PyObject* element = Py_NewRef(PySequence_Fast_GET_ITEM(seq, i)); if (element_add_subelement(self, element) < 0) { Py_DECREF(seq); Py_DECREF(element); @@ -1219,9 +1194,8 @@ _elementtree_Element_find_impl(ElementObject *self, PyObject *path, elementtreestate *st = ET_STATE_GLOBAL; if (checkpath(path) || namespaces != Py_None) { - _Py_IDENTIFIER(find); - return _PyObject_CallMethodIdObjArgs( - st->elementpath_obj, &PyId_find, self, path, namespaces, NULL + return PyObject_CallMethodObjArgs( + st->elementpath_obj, st->str_find, self, path, namespaces, NULL ); } @@ -1260,18 +1234,16 @@ _elementtree_Element_findtext_impl(ElementObject *self, PyObject *path, /*[clinic end generated code: output=83b3ba4535d308d2 input=b53a85aa5aa2a916]*/ { Py_ssize_t i; - _Py_IDENTIFIER(findtext); elementtreestate *st = ET_STATE_GLOBAL; if (checkpath(path) || namespaces != Py_None) - return _PyObject_CallMethodIdObjArgs( - st->elementpath_obj, &PyId_findtext, + return PyObject_CallMethodObjArgs( + st->elementpath_obj, st->str_findtext, self, path, default_value, namespaces, NULL ); if (!self->extra) { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } for (i = 0; i < self->extra->length; i++) { @@ -1295,8 +1267,7 @@ _elementtree_Element_findtext_impl(ElementObject *self, PyObject *path, return NULL; } - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } /*[clinic input] @@ -1317,9 +1288,8 @@ _elementtree_Element_findall_impl(ElementObject *self, PyObject *path, elementtreestate *st = ET_STATE_GLOBAL; if (checkpath(path) || namespaces != Py_None) { - _Py_IDENTIFIER(findall); - return _PyObject_CallMethodIdObjArgs( - st->elementpath_obj, &PyId_findall, self, path, namespaces, NULL + return PyObject_CallMethodObjArgs( + st->elementpath_obj, st->str_findall, self, path, namespaces, NULL ); } @@ -1361,11 +1331,10 @@ _elementtree_Element_iterfind_impl(ElementObject *self, PyObject *path, /*[clinic end generated code: output=ecdd56d63b19d40f input=abb974e350fb65c7]*/ { PyObject* tag = path; - _Py_IDENTIFIER(iterfind); elementtreestate *st = ET_STATE_GLOBAL; - return _PyObject_CallMethodIdObjArgs( - st->elementpath_obj, &PyId_iterfind, self, tag, namespaces, NULL); + return PyObject_CallMethodObjArgs( + st->elementpath_obj, st->str_iterfind, self, tag, namespaces, NULL); } /*[clinic input] @@ -1382,18 +1351,15 @@ _elementtree_Element_get_impl(ElementObject *self, PyObject *key, /*[clinic end generated code: output=523c614142595d75 input=ee153bbf8cdb246e]*/ { if (self->extra && self->extra->attrib) { - PyObject *attrib = self->extra->attrib; - Py_INCREF(attrib); - PyObject *value = PyDict_GetItemWithError(attrib, key); - Py_XINCREF(value); + PyObject *attrib = Py_NewRef(self->extra->attrib); + PyObject *value = Py_XNewRef(PyDict_GetItemWithError(attrib, key)); Py_DECREF(attrib); if (value != NULL || PyErr_Occurred()) { return value; } } - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } static PyObject * @@ -1452,8 +1418,7 @@ element_getitem(PyObject* self_, Py_ssize_t index) return NULL; } - Py_INCREF(self->extra->children[index]); - return self->extra->children[index]; + return Py_NewRef(self->extra->children[index]); } /*[clinic input] @@ -1491,8 +1456,7 @@ _elementtree_Element_insert_impl(ElementObject *self, Py_ssize_t index, for (i = self->extra->length; i > index; i--) self->extra->children[i] = self->extra->children[i-1]; - Py_INCREF(subelement); - self->extra->children[index] = subelement; + self->extra->children[index] = Py_NewRef(subelement); self->extra->length++; @@ -1693,8 +1657,7 @@ element_setitem(PyObject* self_, Py_ssize_t index, PyObject* item) raise_type_error(item); return -1; } - Py_INCREF(item); - self->extra->children[index] = item; + self->extra->children[index] = Py_NewRef(item); } else { self->extra->length--; for (i = index; i < self->extra->length; i++) @@ -1744,8 +1707,7 @@ element_subscr(PyObject* self_, PyObject* item) for (cur = start, i = 0; i < slicelen; cur += step, i++) { - PyObject* item = self->extra->children[cur]; - Py_INCREF(item); + PyObject* item = Py_NewRef(self->extra->children[cur]); PyList_SET_ITEM(list, i, item); } @@ -1925,8 +1887,7 @@ element_ass_subscr(PyObject* self_, PyObject* item, PyObject* value) for (cur = start, i = 0; i < newlen; cur += step, i++) { PyObject* element = PySequence_Fast_GET_ITEM(seq, i); - Py_INCREF(element); - self->extra->children[cur] = element; + self->extra->children[cur] = Py_NewRef(element); } self->extra->length += newlen - slicelen; @@ -1949,24 +1910,21 @@ static PyObject* element_tag_getter(ElementObject *self, void *closure) { PyObject *res = self->tag; - Py_INCREF(res); - return res; + return Py_NewRef(res); } static PyObject* element_text_getter(ElementObject *self, void *closure) { PyObject *res = element_get_text(self); - Py_XINCREF(res); - return res; + return Py_XNewRef(res); } static PyObject* element_tail_getter(ElementObject *self, void *closure) { PyObject *res = element_get_tail(self); - Py_XINCREF(res); - return res; + return Py_XNewRef(res); } static PyObject* @@ -1978,8 +1936,7 @@ element_attrib_getter(ElementObject *self, void *closure) return NULL; } res = element_get_attrib(self); - Py_XINCREF(res); - return res; + return Py_XNewRef(res); } /* macro for setter validation */ @@ -1995,8 +1952,7 @@ static int element_tag_setter(ElementObject *self, PyObject *value, void *closure) { _VALIDATE_ATTR_VALUE(value); - Py_INCREF(value); - Py_SETREF(self->tag, value); + Py_SETREF(self->tag, Py_NewRef(value)); return 0; } @@ -2004,8 +1960,7 @@ static int element_text_setter(ElementObject *self, PyObject *value, void *closure) { _VALIDATE_ATTR_VALUE(value); - Py_INCREF(value); - _set_joined_ptr(&self->text, value); + _set_joined_ptr(&self->text, Py_NewRef(value)); return 0; } @@ -2013,8 +1968,7 @@ static int element_tail_setter(ElementObject *self, PyObject *value, void *closure) { _VALIDATE_ATTR_VALUE(value); - Py_INCREF(value); - _set_joined_ptr(&self->tail, value); + _set_joined_ptr(&self->tail, Py_NewRef(value)); return 0; } @@ -2032,8 +1986,7 @@ element_attrib_setter(ElementObject *self, PyObject *value, void *closure) if (create_extra(self, NULL) < 0) return -1; } - Py_INCREF(value); - Py_XSETREF(self->extra->attrib, value); + Py_XSETREF(self->extra->attrib, Py_NewRef(value)); return 0; } @@ -2119,8 +2072,7 @@ parent_stack_push_new(ElementIterObject *it, ElementObject *parent) it->parent_stack_size = new_size; } item = it->parent_stack + it->parent_stack_used++; - Py_INCREF(parent); - item->parent = parent; + item->parent = (ElementObject*)Py_NewRef(parent); item->child_index = 0; return 0; } @@ -2181,9 +2133,8 @@ elementiter_next(ElementIterObject *it) } assert(Element_Check(extra->children[child_index])); - elem = (ElementObject *)extra->children[child_index]; + elem = (ElementObject *)Py_NewRef(extra->children[child_index]); item->child_index++; - Py_INCREF(elem); } if (parent_stack_push_new(it, elem) < 0) { @@ -2287,11 +2238,9 @@ create_elementiter(ElementObject *self, PyObject *tag, int gettext) if (!it) return NULL; - Py_INCREF(tag); - it->sought_tag = tag; + it->sought_tag = Py_NewRef(tag); it->gettext = gettext; - Py_INCREF(self); - it->root_element = self; + it->root_element = (ElementObject*)Py_NewRef(self); it->parent_stack = PyMem_New(ParentLocator, INIT_PARENT_STACK_SIZE); if (it->parent_stack == NULL) { @@ -2353,12 +2302,8 @@ treebuilder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) TreeBuilderObject *t = (TreeBuilderObject *)type->tp_alloc(type, 0); if (t != NULL) { t->root = NULL; - - Py_INCREF(Py_None); - t->this = Py_None; - Py_INCREF(Py_None); - t->last = Py_None; - + t->this = Py_NewRef(Py_None); + t->last = Py_NewRef(Py_None); t->data = NULL; t->element_factory = NULL; t->comment_factory = NULL; @@ -2402,8 +2347,7 @@ _elementtree_TreeBuilder___init___impl(TreeBuilderObject *self, /*[clinic end generated code: output=8571d4dcadfdf952 input=ae98a94df20b5cc3]*/ { if (element_factory != Py_None) { - Py_INCREF(element_factory); - Py_XSETREF(self->element_factory, element_factory); + Py_XSETREF(self->element_factory, Py_NewRef(element_factory)); } else { Py_CLEAR(self->element_factory); } @@ -2413,8 +2357,7 @@ _elementtree_TreeBuilder___init___impl(TreeBuilderObject *self, comment_factory = st->comment_factory; } if (comment_factory) { - Py_INCREF(comment_factory); - Py_XSETREF(self->comment_factory, comment_factory); + Py_XSETREF(self->comment_factory, Py_NewRef(comment_factory)); self->insert_comments = insert_comments; } else { Py_CLEAR(self->comment_factory); @@ -2426,8 +2369,7 @@ _elementtree_TreeBuilder___init___impl(TreeBuilderObject *self, pi_factory = st->pi_factory; } if (pi_factory) { - Py_INCREF(pi_factory); - Py_XSETREF(self->pi_factory, pi_factory); + Py_XSETREF(self->pi_factory, Py_NewRef(pi_factory)); self->insert_pis = insert_pis; } else { Py_CLEAR(self->pi_factory); @@ -2530,14 +2472,12 @@ _elementtree__set_factories_impl(PyObject *module, PyObject *comment_factory, if (comment_factory == Py_None) { Py_CLEAR(st->comment_factory); } else { - Py_INCREF(comment_factory); - Py_XSETREF(st->comment_factory, comment_factory); + Py_XSETREF(st->comment_factory, Py_NewRef(comment_factory)); } if (pi_factory == Py_None) { Py_CLEAR(st->pi_factory); } else { - Py_INCREF(pi_factory); - Py_XSETREF(st->pi_factory, pi_factory); + Py_XSETREF(st->pi_factory, Py_NewRef(pi_factory)); } return old; @@ -2545,7 +2485,7 @@ _elementtree__set_factories_impl(PyObject *module, PyObject *comment_factory, static int treebuilder_extend_element_text_or_tail(PyObject *element, PyObject **data, - PyObject **dest, _Py_Identifier *name) + PyObject **dest, PyObject *name) { /* Fast paths for the "almost always" cases. */ if (Element_CheckExact(element)) { @@ -2569,7 +2509,7 @@ treebuilder_extend_element_text_or_tail(PyObject *element, PyObject **data, { int r; PyObject* joined; - PyObject* previous = _PyObject_GetAttrId(element, name); + PyObject* previous = PyObject_GetAttr(element, name); if (!previous) return -1; joined = list_join(*data); @@ -2588,7 +2528,7 @@ treebuilder_extend_element_text_or_tail(PyObject *element, PyObject **data, Py_DECREF(previous); } - r = _PyObject_SetAttrId(element, name, joined); + r = PyObject_SetAttr(element, name, joined); Py_DECREF(joined); if (r < 0) return -1; @@ -2603,34 +2543,32 @@ treebuilder_flush_data(TreeBuilderObject* self) if (!self->data) { return 0; } - + elementtreestate *st = ET_STATE_GLOBAL; if (!self->last_for_tail) { PyObject *element = self->last; - _Py_IDENTIFIER(text); return treebuilder_extend_element_text_or_tail( element, &self->data, - &((ElementObject *) element)->text, &PyId_text); + &((ElementObject *) element)->text, st->str_text); } else { PyObject *element = self->last_for_tail; - _Py_IDENTIFIER(tail); return treebuilder_extend_element_text_or_tail( element, &self->data, - &((ElementObject *) element)->tail, &PyId_tail); + &((ElementObject *) element)->tail, st->str_tail); } } static int treebuilder_add_subelement(PyObject *element, PyObject *child) { - _Py_IDENTIFIER(append); + elementtreestate *st = ET_STATE_GLOBAL; if (Element_CheckExact(element)) { ElementObject *elem = (ElementObject *) element; return element_add_subelement(elem, child); } else { PyObject *res; - res = _PyObject_CallMethodIdOneArg(element, &PyId_append, child); + res = PyObject_CallMethodOneArg(element, st->str_append, child); if (res == NULL) return -1; Py_DECREF(res); @@ -2703,8 +2641,7 @@ treebuilder_handle_start(TreeBuilderObject* self, PyObject* tag, ); goto error; } - Py_INCREF(node); - self->root = node; + self->root = Py_NewRef(node); } if (self->index < PyList_GET_SIZE(self->stack)) { @@ -2717,10 +2654,8 @@ treebuilder_handle_start(TreeBuilderObject* self, PyObject* tag, } self->index++; - Py_INCREF(node); - Py_SETREF(self->this, node); - Py_INCREF(node); - Py_SETREF(self->last, node); + Py_SETREF(self->this, Py_NewRef(node)); + Py_SETREF(self->last, Py_NewRef(node)); if (treebuilder_append_event(self, self->start_event_obj, node) < 0) goto error; @@ -2741,7 +2676,7 @@ treebuilder_handle_data(TreeBuilderObject* self, PyObject* data) Py_RETURN_NONE; } /* store the first item as is */ - Py_INCREF(data); self->data = data; + self->data = Py_NewRef(data); } else { /* more than one item; use a list to collect items */ if (PyBytes_CheckExact(self->data) && Py_REFCNT(self->data) == 1 && @@ -2760,9 +2695,9 @@ treebuilder_handle_data(TreeBuilderObject* self, PyObject* data) PyObject* list = PyList_New(2); if (!list) return NULL; - PyList_SET_ITEM(list, 0, self->data); - Py_INCREF(data); PyList_SET_ITEM(list, 1, data); - self->data = list; + PyList_SET_ITEM(list, 0, Py_NewRef(self->data)); + PyList_SET_ITEM(list, 1, Py_NewRef(data)); + Py_SETREF(self->data, list); } } @@ -2787,19 +2722,16 @@ treebuilder_handle_end(TreeBuilderObject* self, PyObject* tag) } item = self->last; - self->last = self->this; - Py_INCREF(self->last); + self->last = Py_NewRef(self->this); Py_XSETREF(self->last_for_tail, self->last); self->index--; - self->this = PyList_GET_ITEM(self->stack, self->index); - Py_INCREF(self->this); + self->this = Py_NewRef(PyList_GET_ITEM(self->stack, self->index)); Py_DECREF(item); if (treebuilder_append_event(self, self->end_event_obj, self->last) < 0) return NULL; - Py_INCREF(self->last); - return (PyObject*) self->last; + return Py_NewRef(self->last); } LOCAL(PyObject*) @@ -2821,12 +2753,10 @@ treebuilder_handle_comment(TreeBuilderObject* self, PyObject* text) if (self->insert_comments && this != Py_None) { if (treebuilder_add_subelement(this, comment) < 0) goto error; - Py_INCREF(comment); - Py_XSETREF(self->last_for_tail, comment); + Py_XSETREF(self->last_for_tail, Py_NewRef(comment)); } } else { - Py_INCREF(text); - comment = text; + comment = Py_NewRef(text); } if (self->events_append && self->comment_event_obj) { @@ -2862,8 +2792,7 @@ treebuilder_handle_pi(TreeBuilderObject* self, PyObject* target, PyObject* text) if (self->insert_pis && this != Py_None) { if (treebuilder_add_subelement(this, pi) < 0) goto error; - Py_INCREF(pi); - Py_XSETREF(self->last_for_tail, pi); + Py_XSETREF(self->last_for_tail, Py_NewRef(pi)); } } else { pi = PyTuple_Pack(2, target, text); @@ -2994,8 +2923,7 @@ treebuilder_done(TreeBuilderObject* self) else res = Py_None; - Py_INCREF(res); - return res; + return Py_NewRef(res); } /*[clinic input] @@ -3083,12 +3011,9 @@ makeuniversal(XMLParserObject* self, const char* string) if (!key) return NULL; - value = PyDict_GetItemWithError(self->names, key); + value = Py_XNewRef(PyDict_GetItemWithError(self->names, key)); - if (value) { - Py_INCREF(value); - } - else if (!PyErr_Occurred()) { + if (value == NULL && !PyErr_Occurred()) { /* new name. convert to universal name, and decode as necessary */ @@ -3113,8 +3038,7 @@ makeuniversal(XMLParserObject* self, const char* string) size++; } else { /* plain name; use key as tag */ - Py_INCREF(key); - tag = key; + tag = Py_NewRef(key); } /* decode universal name */ @@ -3486,7 +3410,6 @@ expat_start_doctype_handler(XMLParserObject *self, const XML_Char *pubid, int has_internal_subset) { - _Py_IDENTIFIER(doctype); PyObject *doctype_name_obj, *sysid_obj, *pubid_obj; PyObject *res; @@ -3504,8 +3427,7 @@ expat_start_doctype_handler(XMLParserObject *self, return; } } else { - Py_INCREF(Py_None); - sysid_obj = Py_None; + sysid_obj = Py_NewRef(Py_None); } if (pubid) { @@ -3516,10 +3438,10 @@ expat_start_doctype_handler(XMLParserObject *self, return; } } else { - Py_INCREF(Py_None); - pubid_obj = Py_None; + pubid_obj = Py_NewRef(Py_None); } + elementtreestate *st = ET_STATE_GLOBAL; /* If the target has a handler for doctype, call it. */ if (self->handle_doctype) { res = PyObject_CallFunctionObjArgs(self->handle_doctype, @@ -3527,7 +3449,7 @@ expat_start_doctype_handler(XMLParserObject *self, sysid_obj, NULL); Py_XDECREF(res); } - else if (_PyObject_LookupAttrId((PyObject *)self, &PyId_doctype, &res) > 0) { + else if (_PyObject_LookupAttr((PyObject *)self, st->str_doctype, &res) > 0) { (void)PyErr_WarnEx(PyExc_RuntimeWarning, "The doctype() method of XMLParser is ignored. " "Define doctype() method on the TreeBuilder target.", @@ -4077,39 +3999,37 @@ _elementtree_XMLParser__setevents_impl(XMLParserObject *self, return NULL; } - Py_INCREF(event_name_obj); if (strcmp(event_name, "start") == 0) { - Py_XSETREF(target->start_event_obj, event_name_obj); + Py_XSETREF(target->start_event_obj, Py_NewRef(event_name_obj)); } else if (strcmp(event_name, "end") == 0) { - Py_XSETREF(target->end_event_obj, event_name_obj); + Py_XSETREF(target->end_event_obj, Py_NewRef(event_name_obj)); } else if (strcmp(event_name, "start-ns") == 0) { - Py_XSETREF(target->start_ns_event_obj, event_name_obj); + Py_XSETREF(target->start_ns_event_obj, Py_NewRef(event_name_obj)); EXPAT(SetNamespaceDeclHandler)( self->parser, (XML_StartNamespaceDeclHandler) expat_start_ns_handler, (XML_EndNamespaceDeclHandler) expat_end_ns_handler ); } else if (strcmp(event_name, "end-ns") == 0) { - Py_XSETREF(target->end_ns_event_obj, event_name_obj); + Py_XSETREF(target->end_ns_event_obj, Py_NewRef(event_name_obj)); EXPAT(SetNamespaceDeclHandler)( self->parser, (XML_StartNamespaceDeclHandler) expat_start_ns_handler, (XML_EndNamespaceDeclHandler) expat_end_ns_handler ); } else if (strcmp(event_name, "comment") == 0) { - Py_XSETREF(target->comment_event_obj, event_name_obj); + Py_XSETREF(target->comment_event_obj, Py_NewRef(event_name_obj)); EXPAT(SetCommentHandler)( self->parser, (XML_CommentHandler) expat_comment_handler ); } else if (strcmp(event_name, "pi") == 0) { - Py_XSETREF(target->pi_event_obj, event_name_obj); + Py_XSETREF(target->pi_event_obj, Py_NewRef(event_name_obj)); EXPAT(SetProcessingInstructionHandler)( self->parser, (XML_ProcessingInstructionHandler) expat_pi_handler ); } else { - Py_DECREF(event_name_obj); Py_DECREF(events_seq); PyErr_Format(PyExc_ValueError, "unknown event '%s'", event_name); return NULL; @@ -4375,8 +4295,7 @@ PyInit__elementtree(void) m = PyState_FindModule(&elementtreemodule); if (m) { - Py_INCREF(m); - return m; + return Py_NewRef(m); } /* Initialize object types */ @@ -4420,12 +4339,42 @@ PyInit__elementtree(void) return NULL; } + st->str_append = PyUnicode_InternFromString("append"); + if (st->str_append == NULL) { + return NULL; + } + st->str_find = PyUnicode_InternFromString("find"); + if (st->str_find == NULL) { + return NULL; + } + st->str_findall = PyUnicode_InternFromString("findall"); + if (st->str_findall == NULL) { + return NULL; + } + st->str_findtext = PyUnicode_InternFromString("findtext"); + if (st->str_findtext == NULL) { + return NULL; + } + st->str_iterfind = PyUnicode_InternFromString("iterfind"); + if (st->str_iterfind == NULL) { + return NULL; + } + st->str_tail = PyUnicode_InternFromString("tail"); + if (st->str_tail == NULL) { + return NULL; + } + st->str_text = PyUnicode_InternFromString("text"); + if (st->str_text == NULL) { + return NULL; + } + st->str_doctype = PyUnicode_InternFromString("doctype"); + if (st->str_doctype == NULL) { + return NULL; + } st->parseerror_obj = PyErr_NewException( "xml.etree.ElementTree.ParseError", PyExc_SyntaxError, NULL ); - Py_INCREF(st->parseerror_obj); - if (PyModule_AddObject(m, "ParseError", st->parseerror_obj) < 0) { - Py_DECREF(st->parseerror_obj); + if (PyModule_AddObjectRef(m, "ParseError", st->parseerror_obj) < 0) { return NULL; } diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index 3abb7fd710aeb4..4032ba79374fa4 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -1,5 +1,6 @@ #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() +#include "pycore_dict.h" // _PyDict_Pop_KnownHash() #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_object.h" // _PyObject_GC_TRACK @@ -7,6 +8,13 @@ #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "structmember.h" // PyMemberDef +#include "clinic/_functoolsmodule.c.h" +/*[clinic input] +module _functools +class _functools._lru_cache_wrapper "PyObject *" "&lru_cache_type_spec" +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=bece4053896b09c0]*/ + /* _functools module written and maintained by Hye-Shik Chang with adaptations by Raymond Hettinger @@ -58,6 +66,7 @@ get_functools_state_by_type(PyTypeObject *type) return get_functools_state(module); } +// Not converted to argument clinic, because of `*args, **kwargs` arguments. static PyObject * partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) { @@ -97,8 +106,7 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) if (pto == NULL) return NULL; - pto->fn = func; - Py_INCREF(func); + pto->fn = Py_NewRef(func); nargs = PyTuple_GetSlice(args, 1, PY_SSIZE_T_MAX); if (nargs == NULL) { @@ -123,8 +131,7 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) pto->kw = PyDict_New(); } else if (Py_REFCNT(kw) == 1) { - Py_INCREF(kw); - pto->kw = kw; + pto->kw = Py_NewRef(kw); } else { pto->kw = PyDict_Copy(kw); @@ -282,6 +289,7 @@ partial_setvectorcall(partialobject *pto) } +// Not converted to argument clinic, because of `*args, **kwargs` arguments. static PyObject * partial_call(partialobject *pto, PyObject *args, PyObject *kwargs) { @@ -293,8 +301,7 @@ partial_call(partialobject *pto, PyObject *args, PyObject *kwargs) PyObject *kwargs2; if (PyDict_GET_SIZE(pto->kw) == 0) { /* kwargs can be NULL */ - kwargs2 = kwargs; - Py_XINCREF(kwargs2); + kwargs2 = Py_XNewRef(kwargs); } else { /* bpo-27840, bpo-29318: dictionary of keyword parameters must be @@ -454,8 +461,7 @@ partial_setstate(partialobject *pto, PyObject *state) else Py_INCREF(dict); - Py_INCREF(fn); - Py_SETREF(pto->fn, fn); + Py_SETREF(pto->fn, Py_NewRef(fn)); Py_SETREF(pto->args, fnargs); Py_SETREF(pto->kw, kw); Py_XSETREF(pto->dict, dict); @@ -579,10 +585,8 @@ keyobject_call(keyobject *ko, PyObject *args, PyObject *kwds) if (result == NULL) { return NULL; } - Py_INCREF(ko->cmp); - result->cmp = ko->cmp; - Py_INCREF(object); - result->object = object; + result->cmp = Py_NewRef(ko->cmp); + result->object = Py_NewRef(object); PyObject_GC_Track(result); return (PyObject *)result; } @@ -625,33 +629,36 @@ keyobject_richcompare(PyObject *ko, PyObject *other, int op) return answer; } +/*[clinic input] +_functools.cmp_to_key + + mycmp: object + Function that compares two objects. + +Convert a cmp= function into a key= function. +[clinic start generated code]*/ + static PyObject * -functools_cmp_to_key(PyObject *self, PyObject *args, PyObject *kwds) +_functools_cmp_to_key_impl(PyObject *module, PyObject *mycmp) +/*[clinic end generated code: output=71eaad0f4fc81f33 input=d1b76f231c0dfeb3]*/ { - PyObject *cmp; - static char *kwargs[] = {"mycmp", NULL}; keyobject *object; _functools_state *state; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O:cmp_to_key", kwargs, &cmp)) - return NULL; - - state = get_functools_state(self); + state = get_functools_state(module); object = PyObject_GC_New(keyobject, state->keyobject_type); if (!object) return NULL; - Py_INCREF(cmp); - object->cmp = cmp; + object->cmp = Py_NewRef(mycmp); object->object = NULL; PyObject_GC_Track(object); return (PyObject *)object; } -PyDoc_STRVAR(functools_cmp_to_key_doc, -"Convert a cmp= function into a key= function."); - /* reduce (used to be a builtin) ********************************************/ +// Not converted to argument clinic, because of `args` in-place modification. +// AC will affect performance. static PyObject * functools_reduce(PyObject *self, PyObject *args) { @@ -824,12 +831,10 @@ lru_cache_make_key(PyObject *kwd_mark, PyObject *args, if (PyUnicode_CheckExact(key) || PyLong_CheckExact(key)) { /* For common scalar keys, save space by dropping the enclosing args tuple */ - Py_INCREF(key); - return key; + return Py_NewRef(key); } } - Py_INCREF(args); - return args; + return Py_NewRef(args); } key_size = PyTuple_GET_SIZE(args); @@ -845,31 +850,25 @@ lru_cache_make_key(PyObject *kwd_mark, PyObject *args, key_pos = 0; for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) { PyObject *item = PyTuple_GET_ITEM(args, pos); - Py_INCREF(item); - PyTuple_SET_ITEM(key, key_pos++, item); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item)); } if (kwds_size) { - Py_INCREF(kwd_mark); - PyTuple_SET_ITEM(key, key_pos++, kwd_mark); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(kwd_mark)); for (pos = 0; PyDict_Next(kwds, &pos, &keyword, &value);) { - Py_INCREF(keyword); - PyTuple_SET_ITEM(key, key_pos++, keyword); - Py_INCREF(value); - PyTuple_SET_ITEM(key, key_pos++, value); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(keyword)); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(value)); } assert(key_pos == PyTuple_GET_SIZE(args) + kwds_size * 2 + 1); } if (typed) { for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) { PyObject *item = (PyObject *)Py_TYPE(PyTuple_GET_ITEM(args, pos)); - Py_INCREF(item); - PyTuple_SET_ITEM(key, key_pos++, item); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item)); } if (kwds_size) { for (pos = 0; PyDict_Next(kwds, &pos, &keyword, &value);) { PyObject *item = (PyObject *)Py_TYPE(value); - Py_INCREF(item); - PyTuple_SET_ITEM(key, key_pos++, item); + PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item)); } } } @@ -1071,8 +1070,7 @@ bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds return NULL; } lru_cache_append_link(self, link); - Py_INCREF(result); /* for return */ - return result; + return Py_NewRef(result); } /* Since the cache is full, we need to evict an old key and add a new key. Rather than free the old link and allocate a new @@ -1217,16 +1215,12 @@ lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw) obj->wrapper = wrapper; obj->typed = typed; obj->cache = cachedict; - Py_INCREF(func); - obj->func = func; + obj->func = Py_NewRef(func); obj->misses = obj->hits = 0; obj->maxsize = maxsize; - Py_INCREF(state->kwd_mark); - obj->kwd_mark = state->kwd_mark; - Py_INCREF(state->lru_list_elem_type); - obj->lru_list_elem_type = state->lru_list_elem_type; - Py_INCREF(cache_info_type); - obj->cache_info_type = cache_info_type; + obj->kwd_mark = Py_NewRef(state->kwd_mark); + obj->lru_list_elem_type = (PyTypeObject*)Py_NewRef(state->lru_list_elem_type); + obj->cache_info_type = Py_NewRef(cache_info_type); obj->dict = NULL; obj->weakreflist = NULL; return (PyObject *)obj; @@ -1249,8 +1243,7 @@ lru_cache_clear_list(lru_list_elem *link) { while (link != NULL) { lru_list_elem *next = link->next; - Py_DECREF(link); - link = next; + Py_SETREF(link, next); } } @@ -1293,31 +1286,46 @@ static PyObject * lru_cache_descr_get(PyObject *self, PyObject *obj, PyObject *type) { if (obj == Py_None || obj == NULL) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } return PyMethod_New(self, obj); } +/*[clinic input] +_functools._lru_cache_wrapper.cache_info + +Report cache statistics +[clinic start generated code]*/ + static PyObject * -lru_cache_cache_info(lru_cache_object *self, PyObject *unused) +_functools__lru_cache_wrapper_cache_info_impl(PyObject *self) +/*[clinic end generated code: output=cc796a0b06dbd717 input=f05e5b6ebfe38645]*/ { - if (self->maxsize == -1) { - return PyObject_CallFunction(self->cache_info_type, "nnOn", - self->hits, self->misses, Py_None, - PyDict_GET_SIZE(self->cache)); - } - return PyObject_CallFunction(self->cache_info_type, "nnnn", - self->hits, self->misses, self->maxsize, - PyDict_GET_SIZE(self->cache)); + lru_cache_object *_self = (lru_cache_object *) self; + if (_self->maxsize == -1) { + return PyObject_CallFunction(_self->cache_info_type, "nnOn", + _self->hits, _self->misses, Py_None, + PyDict_GET_SIZE(_self->cache)); + } + return PyObject_CallFunction(_self->cache_info_type, "nnnn", + _self->hits, _self->misses, _self->maxsize, + PyDict_GET_SIZE(_self->cache)); } +/*[clinic input] +_functools._lru_cache_wrapper.cache_clear + +Clear the cache and cache statistics +[clinic start generated code]*/ + static PyObject * -lru_cache_cache_clear(lru_cache_object *self, PyObject *unused) +_functools__lru_cache_wrapper_cache_clear_impl(PyObject *self) +/*[clinic end generated code: output=58423b35efc3e381 input=6ca59dba09b12584]*/ { - lru_list_elem *list = lru_cache_unlink_list(self); - self->hits = self->misses = 0; - PyDict_Clear(self->cache); + lru_cache_object *_self = (lru_cache_object *) self; + lru_list_elem *list = lru_cache_unlink_list(_self); + _self->hits = _self->misses = 0; + PyDict_Clear(_self->cache); lru_cache_clear_list(list); Py_RETURN_NONE; } @@ -1331,15 +1339,13 @@ lru_cache_reduce(PyObject *self, PyObject *unused) static PyObject * lru_cache_copy(PyObject *self, PyObject *unused) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * lru_cache_deepcopy(PyObject *self, PyObject *unused) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static int @@ -1381,8 +1387,8 @@ cache_info_type: namedtuple class with the fields:\n\ ); static PyMethodDef lru_cache_methods[] = { - {"cache_info", (PyCFunction)lru_cache_cache_info, METH_NOARGS}, - {"cache_clear", (PyCFunction)lru_cache_cache_clear, METH_NOARGS}, + _FUNCTOOLS__LRU_CACHE_WRAPPER_CACHE_INFO_METHODDEF + _FUNCTOOLS__LRU_CACHE_WRAPPER_CACHE_CLEAR_METHODDEF {"__reduce__", (PyCFunction)lru_cache_reduce, METH_NOARGS}, {"__copy__", (PyCFunction)lru_cache_copy, METH_VARARGS}, {"__deepcopy__", (PyCFunction)lru_cache_deepcopy, METH_VARARGS}, @@ -1432,8 +1438,7 @@ PyDoc_STRVAR(_functools_doc, static PyMethodDef _functools_methods[] = { {"reduce", functools_reduce, METH_VARARGS, functools_reduce_doc}, - {"cmp_to_key", _PyCFunction_CAST(functools_cmp_to_key), - METH_VARARGS | METH_KEYWORDS, functools_cmp_to_key_doc}, + _FUNCTOOLS_CMP_TO_KEY_METHODDEF {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c index a96d32306d5550..4e8acdefc722b2 100644 --- a/Modules/_gdbmmodule.c +++ b/Modules/_gdbmmodule.c @@ -256,8 +256,7 @@ _gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value) res = gdbm_subscript(self, key); if (res == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Clear(); - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } return res; } @@ -566,8 +565,7 @@ _gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls) static PyObject * gdbm__enter__(PyObject *self, PyObject *args) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -677,7 +675,6 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, return NULL; } for (flags++; *flags != '\0'; flags++) { - char buf[40]; switch (*flags) { #ifdef GDBM_FAST case 'f': @@ -695,9 +692,8 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, break; #endif default: - PyOS_snprintf(buf, sizeof(buf), "Flag '%c' is not supported.", - *flags); - PyErr_SetString(state->gdbm_error, buf); + PyErr_Format(state->gdbm_error, + "Flag '%c' is not supported.", (unsigned char)*flags); return NULL; } } diff --git a/Modules/_heapqmodule.c b/Modules/_heapqmodule.c index 3dbaaa0a0da1d6..07ddc7b0851241 100644 --- a/Modules/_heapqmodule.c +++ b/Modules/_heapqmodule.c @@ -197,8 +197,7 @@ heapreplace_internal(PyObject *heap, PyObject *item, int siftup_func(PyListObjec } returnitem = PyList_GET_ITEM(heap, 0); - Py_INCREF(item); - PyList_SET_ITEM(heap, 0, item); + PyList_SET_ITEM(heap, 0, Py_NewRef(item)); if (siftup_func((PyListObject *)heap, 0)) { Py_DECREF(returnitem); return NULL; @@ -253,8 +252,7 @@ _heapq_heappushpop_impl(PyObject *module, PyObject *heap, PyObject *item) int cmp; if (PyList_GET_SIZE(heap) == 0) { - Py_INCREF(item); - return item; + return Py_NewRef(item); } PyObject* top = PyList_GET_ITEM(heap, 0); @@ -264,8 +262,7 @@ _heapq_heappushpop_impl(PyObject *module, PyObject *heap, PyObject *item) if (cmp < 0) return NULL; if (cmp == 0) { - Py_INCREF(item); - return item; + return Py_NewRef(item); } if (PyList_GET_SIZE(heap) == 0) { @@ -274,8 +271,7 @@ _heapq_heappushpop_impl(PyObject *module, PyObject *heap, PyObject *item) } returnitem = PyList_GET_ITEM(heap, 0); - Py_INCREF(item); - PyList_SET_ITEM(heap, 0, item); + PyList_SET_ITEM(heap, 0, Py_NewRef(item)); if (siftup((PyListObject *)heap, 0)) { Py_DECREF(returnitem); return NULL; @@ -410,8 +406,7 @@ siftdown_max(PyListObject *heap, Py_ssize_t startpos, Py_ssize_t pos) newitem = arr[pos]; while (pos > startpos) { parentpos = (pos - 1) >> 1; - parent = arr[parentpos]; - Py_INCREF(parent); + parent = Py_NewRef(arr[parentpos]); Py_INCREF(newitem); cmp = PyObject_RichCompareBool(parent, newitem, Py_LT); Py_DECREF(parent); diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c index 38ef24637b7318..121d9617e1883b 100644 --- a/Modules/_io/_iomodule.c +++ b/Modules/_io/_iomodule.c @@ -212,8 +212,7 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode, is_number = PyNumber_Check(file); if (is_number) { - path_or_fd = file; - Py_INCREF(path_or_fd); + path_or_fd = Py_NewRef(file); } else { path_or_fd = PyOS_FSPath(file); if (path_or_fd == NULL) { @@ -335,8 +334,7 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode, goto error; result = raw; - Py_DECREF(path_or_fd); - path_or_fd = NULL; + Py_SETREF(path_or_fd, NULL); modeobj = PyUnicode_FromString(mode); if (modeobj == NULL) @@ -489,8 +487,7 @@ _io_text_encoding_impl(PyObject *module, PyObject *encoding, int stacklevel) encoding = &_Py_ID(locale); } } - Py_INCREF(encoding); - return encoding; + return Py_NewRef(encoding); } @@ -697,16 +694,15 @@ PyInit__io(void) "UnsupportedOperation", PyExc_OSError, PyExc_ValueError); if (state->unsupported_operation == NULL) goto fail; - Py_INCREF(state->unsupported_operation); if (PyModule_AddObject(m, "UnsupportedOperation", - state->unsupported_operation) < 0) + Py_NewRef(state->unsupported_operation)) < 0) goto fail; /* BlockingIOError, for compatibility */ - Py_INCREF(PyExc_BlockingIOError); - if (PyModule_AddObject(m, "BlockingIOError", - (PyObject *) PyExc_BlockingIOError) < 0) + if (PyModule_AddObjectRef(m, "BlockingIOError", + (PyObject *) PyExc_BlockingIOError) < 0) { goto fail; + } // Set type base classes PyFileIO_Type.tp_base = &PyRawIOBase_Type; diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c index 4a4a1992dbbb7a..e2610931caae07 100644 --- a/Modules/_io/bufferedio.c +++ b/Modules/_io/bufferedio.c @@ -481,8 +481,7 @@ buffered_close(buffered *self, PyObject *args) if (r < 0) goto end; if (r > 0) { - res = Py_None; - Py_INCREF(res); + res = Py_NewRef(Py_None); goto end; } @@ -1007,8 +1006,7 @@ _buffered_readinto_generic(buffered *self, Py_buffer *buffer, char readinto1) break; if (n < 0) { if (n == -2) { - Py_INCREF(Py_None); - res = Py_None; + res = Py_NewRef(Py_None); } goto end; } @@ -1422,8 +1420,7 @@ _io_BufferedReader___init___impl(buffered *self, PyObject *raw, if (_PyIOBase_check_readable(raw, Py_True) == NULL) return -1; - Py_INCREF(raw); - Py_XSETREF(self->raw, raw); + Py_XSETREF(self->raw, Py_NewRef(raw)); self->buffer_size = buffer_size; self->readable = 1; self->writable = 0; diff --git a/Modules/_io/bytesio.c b/Modules/_io/bytesio.c index 930ef7e29dbcf6..41be3497506d3f 100644 --- a/Modules/_io/bytesio.c +++ b/Modules/_io/bytesio.c @@ -324,8 +324,7 @@ _io_BytesIO_getbuffer_impl(bytesio *self) buf = (bytesiobuf *) type->tp_alloc(type, 0); if (buf == NULL) return NULL; - Py_INCREF(self); - buf->source = self; + buf->source = (bytesio*)Py_NewRef(self); view = PyMemoryView_FromObject((PyObject *) buf); Py_DECREF(buf); return view; @@ -356,8 +355,7 @@ _io_BytesIO_getvalue_impl(bytesio *self) return NULL; } } - Py_INCREF(self->buf); - return self->buf; + return Py_NewRef(self->buf); } /*[clinic input] @@ -401,8 +399,7 @@ read_bytes(bytesio *self, Py_ssize_t size) self->pos == 0 && size == PyBytes_GET_SIZE(self->buf) && self->exports == 0) { self->pos += size; - Py_INCREF(self->buf); - return self->buf; + return Py_NewRef(self->buf); } output = PyBytes_AS_STRING(self->buf) + self->pos; @@ -791,8 +788,7 @@ bytesio_getstate(bytesio *self, PyObject *Py_UNUSED(ignored)) if (initvalue == NULL) return NULL; if (self->dict == NULL) { - Py_INCREF(Py_None); - dict = Py_None; + dict = Py_NewRef(Py_None); } else { dict = PyDict_Copy(self->dict); @@ -875,8 +871,7 @@ bytesio_setstate(bytesio *self, PyObject *state) return NULL; } else { - Py_INCREF(dict); - self->dict = dict; + self->dict = Py_NewRef(dict); } } @@ -943,8 +938,7 @@ _io_BytesIO___init___impl(bytesio *self, PyObject *initvalue) } if (initvalue && initvalue != Py_None) { if (PyBytes_CheckExact(initvalue)) { - Py_INCREF(initvalue); - Py_XSETREF(self->buf, initvalue); + Py_XSETREF(self->buf, Py_NewRef(initvalue)); self->string_size = PyBytes_GET_SIZE(initvalue); } else { diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index 00859978e8cd6c..659297ef1b1d30 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -485,8 +485,12 @@ _io_FileIO___init___impl(fileio *self, PyObject *nameobj, const char *mode, ret = -1; if (!fd_is_own) self->fd = -1; - if (self->fd >= 0) + if (self->fd >= 0) { + PyObject *exc, *val, *tb; + PyErr_Fetch(&exc, &val, &tb); internal_close(self); + _PyErr_ChainExceptions(exc, val, tb); + } done: #ifdef MS_WINDOWS diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c index 6ae43a8b3bd626..7b9391ec54d732 100644 --- a/Modules/_io/iobase.c +++ b/Modules/_io/iobase.c @@ -464,8 +464,7 @@ iobase_enter(PyObject *self, PyObject *args) if (iobase_check_closed(self)) return NULL; - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -642,8 +641,7 @@ iobase_iter(PyObject *self) if (iobase_check_closed(self)) return NULL; - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * diff --git a/Modules/_io/stringio.c b/Modules/_io/stringio.c index 0f31c172499ac3..ae6c3125a2d9da 100644 --- a/Modules/_io/stringio.c +++ b/Modules/_io/stringio.c @@ -188,14 +188,12 @@ write_str(stringio *self, PyObject *obj) self->decoder, obj, 1 /* always final */); } else { - decoded = obj; - Py_INCREF(decoded); + decoded = Py_NewRef(obj); } if (self->writenl) { PyObject *translated = PyUnicode_Replace( decoded, &_Py_STR(newline), self->writenl, -1); - Py_DECREF(decoded); - decoded = translated; + Py_SETREF(decoded, translated); } if (decoded == NULL) return -1; @@ -710,8 +708,7 @@ _io_StringIO___init___impl(stringio *self, PyObject *value, is pointless for StringIO) */ if (newline != NULL && newline[0] == '\r') { - self->writenl = self->readnl; - Py_INCREF(self->writenl); + self->writenl = Py_NewRef(self->readnl); } if (self->readuniversal) { @@ -823,8 +820,7 @@ stringio_getstate(stringio *self, PyObject *Py_UNUSED(ignored)) if (initvalue == NULL) return NULL; if (self->dict == NULL) { - Py_INCREF(Py_None); - dict = Py_None; + dict = Py_NewRef(Py_None); } else { dict = PyDict_Copy(self->dict); @@ -934,8 +930,7 @@ stringio_setstate(stringio *self, PyObject *state) return NULL; } else { - Py_INCREF(dict); - self->dict = dict; + self->dict = Py_NewRef(dict); } } diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c index 3369694653fd96..ff903e9341de27 100644 --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -231,16 +231,14 @@ _io_IncrementalNewlineDecoder___init___impl(nldecoder_object *self, PyObject *errors) /*[clinic end generated code: output=fbd04d443e764ec2 input=89db6b19c6b126bf]*/ { - self->decoder = decoder; - Py_INCREF(decoder); + self->decoder = Py_NewRef(decoder); if (errors == NULL) { - self->errors = &_Py_ID(strict); + self->errors = Py_NewRef(&_Py_ID(strict)); } else { - self->errors = errors; + self->errors = Py_NewRef(errors); } - Py_INCREF(self->errors); self->translate = translate ? 1 : 0; self->seennl = 0; @@ -301,8 +299,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself, &_Py_ID(decode), input, final ? Py_True : Py_False, NULL); } else { - output = input; - Py_INCREF(output); + output = Py_NewRef(input); } if (check_decoded(output) < 0) @@ -323,8 +320,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself, out = PyUnicode_DATA(modified); PyUnicode_WRITE(kind, out, 0, '\r'); memcpy(out + kind, PyUnicode_DATA(output), kind * output_len); - Py_DECREF(output); - output = modified; /* output remains ready */ + Py_SETREF(output, modified); /* output remains ready */ self->pendingcr = 0; output_len++; } @@ -339,8 +335,7 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself, PyObject *modified = PyUnicode_Substring(output, 0, output_len -1); if (modified == NULL) goto error; - Py_DECREF(output); - output = modified; + Py_SETREF(output, modified); self->pendingcr = 1; } } @@ -868,8 +863,7 @@ _textiowrapper_set_decoder(textio *self, PyObject *codec_info, self->decoder, self->readtranslate ? Py_True : Py_False, NULL); if (incrementalDecoder == NULL) return -1; - Py_CLEAR(self->decoder); - self->decoder = incrementalDecoder; + Py_XSETREF(self->decoder, incrementalDecoder); } return 0; @@ -1148,8 +1142,7 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer, * of the partially constructed object (like self->encoding) */ - Py_INCREF(errors); - self->errors = errors; + self->errors = Py_NewRef(errors); self->chunk_size = 8192; self->line_buffering = line_buffering; self->write_through = write_through; @@ -1157,8 +1150,7 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer, goto error; } - self->buffer = buffer; - Py_INCREF(buffer); + self->buffer = Py_NewRef(buffer); /* Build the decoder object */ if (_textiowrapper_set_decoder(self, codec_info, PyUnicode_AsUTF8(errors)) != 0) @@ -1284,9 +1276,8 @@ textiowrapper_change_encoding(textio *self, PyObject *encoding, } Py_DECREF(codec_info); - Py_INCREF(errors); Py_SETREF(self->encoding, encoding); - Py_SETREF(self->errors, errors); + Py_SETREF(self->errors, Py_NewRef(errors)); return _textiowrapper_fix_encoder_state(self); } @@ -1502,8 +1493,7 @@ _textiowrapper_writeflush(textio *self) PyObject *b; if (PyBytes_Check(pending)) { - b = pending; - Py_INCREF(b); + b = Py_NewRef(pending); } else if (PyUnicode_Check(pending)) { assert(PyUnicode_IS_ASCII(pending)); @@ -1618,8 +1608,7 @@ _io_TextIOWrapper_write_impl(textio *self, PyObject *text) // See bpo-43260 PyUnicode_GET_LENGTH(text) <= self->chunk_size && is_asciicompat_encoding(self->encodefunc)) { - b = text; - Py_INCREF(b); + b = Py_NewRef(text); } else { b = (*self->encodefunc)((PyObject *) self, text); @@ -1741,8 +1730,7 @@ textiowrapper_get_decoded_chars(textio *self, Py_ssize_t n) return NULL; } else { - chars = self->decoded_chars; - Py_INCREF(chars); + chars = Py_NewRef(self->decoded_chars); } self->decoded_chars_used += n; @@ -2139,10 +2127,9 @@ _textiowrapper_readline(textio *self, Py_ssize_t limit) } if (remaining == NULL) { - line = self->decoded_chars; + line = Py_NewRef(self->decoded_chars); start = self->decoded_chars_used; offset_to_buffer = 0; - Py_INCREF(line); } else { assert(self->decoded_chars_used == 0); @@ -3115,8 +3102,7 @@ static PyObject * textiowrapper_errors_get(textio *self, void *context) { CHECK_INITIALIZED(self); - Py_INCREF(self->errors); - return self->errors; + return Py_NewRef(self->errors); } static PyObject * diff --git a/Modules/_json.c b/Modules/_json.c index 1c39b46937d792..81431aa1041c55 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -7,12 +7,13 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #include "Python.h" -#include "pycore_ceval.h" // _Py_EnterRecursiveCall() -#include "structmember.h" // PyMemberDef -#include // bool +#include "pycore_ceval.h" // _Py_EnterRecursiveCall() +#include "pycore_runtime.h" // _PyRuntime +#include "structmember.h" // PyMemberDef +#include "pycore_global_objects.h" // _Py_ID() +#include // bool typedef struct _PyScannerObject { @@ -305,15 +306,9 @@ static void raise_errmsg(const char *msg, PyObject *s, Py_ssize_t end) { /* Use JSONDecodeError exception to raise a nice looking ValueError subclass */ - _Py_static_string(PyId_decoder, "json.decoder"); - PyObject *decoder = _PyImport_GetModuleId(&PyId_decoder); - if (decoder == NULL) { - return; - } - - _Py_IDENTIFIER(JSONDecodeError); - PyObject *JSONDecodeError = _PyObject_GetAttrId(decoder, &PyId_JSONDecodeError); - Py_DECREF(decoder); + _Py_DECLARE_STR(json_decoder, "json.decoder"); + PyObject *JSONDecodeError = + _PyImport_GetModuleAttr(&_Py_STR(json_decoder), &_Py_ID(JSONDecodeError)); if (JSONDecodeError == NULL) { return; } @@ -714,9 +709,7 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss if (memokey == NULL) { goto bail; } - Py_INCREF(memokey); - Py_DECREF(key); - key = memokey; + Py_SETREF(key, Py_NewRef(memokey)); idx = next_idx; /* skip whitespace between key and : delimiter, read :, skip whitespace */ @@ -1248,16 +1241,17 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (s == NULL) return NULL; - s->markers = markers; - s->defaultfn = defaultfn; - s->encoder = encoder; - s->indent = indent; - s->key_separator = key_separator; - s->item_separator = item_separator; + s->markers = Py_NewRef(markers); + s->defaultfn = Py_NewRef(defaultfn); + s->encoder = Py_NewRef(encoder); + s->indent = Py_NewRef(indent); + s->key_separator = Py_NewRef(key_separator); + s->item_separator = Py_NewRef(item_separator); s->sort_keys = sort_keys; s->skipkeys = skipkeys; s->allow_nan = allow_nan; s->fast_encode = NULL; + if (PyCFunction_Check(s->encoder)) { PyCFunction f = PyCFunction_GetFunction(s->encoder); if (f == (PyCFunction)py_encode_basestring_ascii || @@ -1266,12 +1260,6 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } } - Py_INCREF(s->markers); - Py_INCREF(s->defaultfn); - Py_INCREF(s->encoder); - Py_INCREF(s->indent); - Py_INCREF(s->key_separator); - Py_INCREF(s->item_separator); return (PyObject *)s; } @@ -1310,28 +1298,13 @@ _encoded_const(PyObject *obj) { /* Return the JSON string representation of None, True, False */ if (obj == Py_None) { - _Py_static_string(PyId_null, "null"); - PyObject *s_null = _PyUnicode_FromId(&PyId_null); - if (s_null == NULL) { - return NULL; - } - return Py_NewRef(s_null); + return Py_NewRef(&_Py_ID(null)); } else if (obj == Py_True) { - _Py_static_string(PyId_true, "true"); - PyObject *s_true = _PyUnicode_FromId(&PyId_true); - if (s_true == NULL) { - return NULL; - } - return Py_NewRef(s_true); + return Py_NewRef(&_Py_ID(true)); } else if (obj == Py_False) { - _Py_static_string(PyId_false, "false"); - PyObject *s_false = _PyUnicode_FromId(&PyId_false); - if (s_false == NULL) { - return NULL; - } - return Py_NewRef(s_false); + return Py_NewRef(&_Py_ID(false)); } else { PyErr_SetString(PyExc_ValueError, "not a const"); @@ -1500,8 +1473,7 @@ encoder_encode_key_value(PyEncoderObject *s, _PyUnicodeWriter *writer, bool *fir PyObject *encoded; if (PyUnicode_Check(key)) { - Py_INCREF(key); - keystr = key; + keystr = Py_NewRef(key); } else if (PyFloat_Check(key)) { keystr = encoder_encode_float(s, key); @@ -1530,7 +1502,7 @@ encoder_encode_key_value(PyEncoderObject *s, _PyUnicodeWriter *writer, bool *fir if (*first) { *first = false; - } + } else { if (_PyUnicodeWriter_WriteStr(writer, s->item_separator) < 0) { Py_DECREF(keystr); diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c index a0e262bdac26c6..cb80c8d339325b 100644 --- a/Modules/_lsprof.c +++ b/Modules/_lsprof.c @@ -128,8 +128,7 @@ normalizeUserObj(PyObject *obj) { PyCFunctionObject *fn; if (!PyCFunction_Check(obj)) { - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } /* Replace built-in function objects with a descriptive string because of built-in methods -- keeping a reference to @@ -142,8 +141,7 @@ normalizeUserObj(PyObject *obj) PyObject *modname = NULL; if (mod != NULL) { if (PyUnicode_Check(mod)) { - modname = mod; - Py_INCREF(modname); + modname = Py_NewRef(mod); } else if (PyModule_Check(mod)) { modname = PyModule_GetNameObject(mod); @@ -555,8 +553,7 @@ static int statsForEntry(rotating_node_t *node, void *arg) } } else { - Py_INCREF(Py_None); - collect->sublist = Py_None; + collect->sublist = Py_NewRef(Py_None); } info = PyObject_CallFunction((PyObject*) collect->state->stats_entry_type, @@ -781,8 +778,7 @@ profiler_init(ProfilerObject *pObj, PyObject *args, PyObject *kw) if (setSubcalls(pObj, subcalls) < 0 || setBuiltins(pObj, builtins) < 0) return -1; pObj->externalTimerUnit = timeunit; - Py_XINCREF(timer); - Py_XSETREF(pObj->externalTimer, timer); + Py_XSETREF(pObj->externalTimer, Py_XNewRef(timer)); return 0; } diff --git a/Modules/_operator.c b/Modules/_operator.c index 51ca74eeeaee21..4f2367150eefc4 100644 --- a/Modules/_operator.c +++ b/Modules/_operator.c @@ -722,8 +722,7 @@ _operator_is_not_impl(PyObject *module, PyObject *a, PyObject *b) { PyObject *result; result = (a != b) ? Py_True : Py_False; - Py_INCREF(result); - return result; + return Py_NewRef(result); } /* compare_digest **********************************************************/ @@ -731,9 +730,9 @@ _operator_is_not_impl(PyObject *module, PyObject *a, PyObject *b) /* * timing safe compare * - * Returns 1 of the strings are equal. + * Returns 1 if the strings are equal. * In case of len(a) != len(b) the function tries to keep the timing - * dependent on the length of b. CPU cache locally may still alter timing + * dependent on the length of b. CPU cache locality may still alter timing * a bit. */ static int @@ -1010,8 +1009,7 @@ itemgetter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_INCREF(item); - ig->item = item; + ig->item = Py_NewRef(item); ig->nitems = nitems; ig->index = -1; if (PyLong_CheckExact(item)) { @@ -1095,8 +1093,7 @@ itemgetter_call_impl(itemgetterobject *ig, PyObject *obj) && ig->index < PyTuple_GET_SIZE(obj)) { result = PyTuple_GET_ITEM(obj, ig->index); - Py_INCREF(result); - return result; + return Py_NewRef(result); } return PyObject_GetItem(obj, ig->item); } @@ -1162,8 +1159,7 @@ static PyMemberDef itemgetter_members[] = { }; PyDoc_STRVAR(itemgetter_doc, -"itemgetter(item, ...) --> itemgetter object\n\ -\n\ +"itemgetter(item, /, *items)\n--\n\n\ Return a callable object that fetches the given item(s) from its operand.\n\ After f = itemgetter(2), the call f(r) returns r[2].\n\ After g = itemgetter(2, 5, 3), the call g(r) returns (r[2], r[5], r[3])"); @@ -1441,8 +1437,7 @@ dotjoinattr(PyObject *attr, PyObject **attrsep) } return PyUnicode_Join(*attrsep, attr); } else { - Py_INCREF(attr); - return attr; + return Py_NewRef(attr); } } @@ -1523,8 +1518,7 @@ static PyMemberDef attrgetter_members[] = { }; PyDoc_STRVAR(attrgetter_doc, -"attrgetter(attr, ...) --> attrgetter object\n\ -\n\ +"attrgetter(attr, /, *attrs)\n--\n\n\ Return a callable object that fetches the given attribute(s) from its operand.\n\ After f = attrgetter('name'), the call f(r) returns r.name.\n\ After g = attrgetter('name', 'date'), the call g(r) returns (r.name, r.date).\n\ @@ -1596,8 +1590,7 @@ methodcaller_new(PyTypeObject *type, PyObject *args, PyObject *kwds) PyUnicode_InternInPlace(&name); mc->name = name; - Py_XINCREF(kwds); - mc->kwds = kwds; + mc->kwds = Py_XNewRef(kwds); mc->args = PyTuple_GetSlice(args, 1, PyTuple_GET_SIZE(args)); if (mc->args == NULL) { @@ -1742,12 +1735,10 @@ methodcaller_reduce(methodcallerobject *mc, PyObject *Py_UNUSED(ignored)) newargs = PyTuple_New(1 + callargcount); if (newargs == NULL) return NULL; - Py_INCREF(mc->name); - PyTuple_SET_ITEM(newargs, 0, mc->name); + PyTuple_SET_ITEM(newargs, 0, Py_NewRef(mc->name)); for (i = 0; i < callargcount; ++i) { PyObject *arg = PyTuple_GET_ITEM(mc->args, i); - Py_INCREF(arg); - PyTuple_SET_ITEM(newargs, i + 1, arg); + PyTuple_SET_ITEM(newargs, i + 1, Py_NewRef(arg)); } return Py_BuildValue("ON", Py_TYPE(mc), newargs); } @@ -1775,8 +1766,7 @@ static PyMethodDef methodcaller_methods[] = { {NULL} }; PyDoc_STRVAR(methodcaller_doc, -"methodcaller(name, ...) --> methodcaller object\n\ -\n\ +"methodcaller(name, /, *args, **kwargs)\n--\n\n\ Return a callable object that calls the given method on its operand.\n\ After f = methodcaller('name'), the call f(r) returns r.name().\n\ After g = methodcaller('name', 'date', foo=1), the call g(r) returns\n\ diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 52704b0c59ade1..2078779663a919 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -386,10 +386,9 @@ init_method_ref(PyObject *self, PyObject *name, if (PyMethod_Check(func) && PyMethod_GET_SELF(func) == self) { /* Deconstruct a bound Python method */ - func2 = PyMethod_GET_FUNCTION(func); - Py_INCREF(func2); *method_self = self; /* borrowed */ - Py_XSETREF(*method_func, func2); + func2 = PyMethod_GET_FUNCTION(func); + Py_XSETREF(*method_func, Py_NewRef(func2)); Py_DECREF(func); return 0; } @@ -408,8 +407,7 @@ reconstruct_method(PyObject *func, PyObject *self) return PyMethod_New(func, self); } else { - Py_INCREF(func); - return func; + return Py_NewRef(func); } } @@ -907,8 +905,7 @@ PyMemoTable_Set(PyMemoTable *self, PyObject *key, Py_ssize_t value) entry->me_value = value; return 0; } - Py_INCREF(key); - entry->me_key = key; + entry->me_key = Py_NewRef(key); entry->me_value = value; self->mt_used++; @@ -1196,8 +1193,7 @@ _Pickler_SetBufferCallback(PicklerObject *self, PyObject *buffer_callback) return -1; } - Py_XINCREF(buffer_callback); - self->buffer_callback = buffer_callback; + self->buffer_callback = Py_XNewRef(buffer_callback); return 0; } @@ -1543,9 +1539,8 @@ _Unpickler_MemoPut(UnpicklerObject *self, size_t idx, PyObject *value) return -1; assert(idx < self->memo_size); } - Py_INCREF(value); old_item = self->memo[idx]; - self->memo[idx] = value; + self->memo[idx] = Py_NewRef(value); if (old_item != NULL) { Py_DECREF(old_item); } @@ -1834,8 +1829,7 @@ get_deep_attribute(PyObject *obj, PyObject *names, PyObject **pparent) n = PyList_GET_SIZE(names); for (i = 0; i < n; i++) { PyObject *name = PyList_GET_ITEM(names, i); - Py_XDECREF(parent); - parent = obj; + Py_XSETREF(parent, obj); (void)_PyObject_LookupAttr(parent, name, &obj); if (obj == NULL) { Py_DECREF(parent); @@ -1928,8 +1922,7 @@ whichmodule(PyObject *global, PyObject *dotted_path) i = 0; while (PyDict_Next(modules, &i, &module_name, &module)) { if (_checkmodule(module_name, module, global, dotted_path) == 0) { - Py_INCREF(module_name); - return module_name; + return Py_NewRef(module_name); } if (PyErr_Occurred()) { return NULL; @@ -1965,8 +1958,7 @@ whichmodule(PyObject *global, PyObject *dotted_path) /* If no module is found, use __main__. */ module_name = &_Py_ID(__main__); - Py_INCREF(module_name); - return module_name; + return Py_NewRef(module_name); } /* fast_save_enter() and fast_save_leave() are guards against recursive @@ -3557,10 +3549,8 @@ fix_imports(PyObject **module_name, PyObject **global_name) Py_CLEAR(*module_name); Py_CLEAR(*global_name); - Py_INCREF(fixed_module_name); - Py_INCREF(fixed_global_name); - *module_name = fixed_module_name; - *global_name = fixed_global_name; + *module_name = Py_NewRef(fixed_module_name); + *global_name = Py_NewRef(fixed_global_name); return 0; } else if (PyErr_Occurred()) { @@ -3576,8 +3566,7 @@ fix_imports(PyObject **module_name, PyObject **global_name) Py_TYPE(item)->tp_name); return -1; } - Py_INCREF(item); - Py_XSETREF(*module_name, item); + Py_XSETREF(*module_name, Py_NewRef(item)); } else if (PyErr_Occurred()) { return -1; @@ -3602,8 +3591,7 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) const char global_op = GLOBAL; if (name) { - Py_INCREF(name); - global_name = name; + global_name = Py_NewRef(name); } else { if (_PyObject_LookupAttr(obj, &_Py_ID(__qualname__), &global_name) < 0) @@ -3637,8 +3625,8 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) obj, module_name); goto error; } - lastname = PyList_GET_ITEM(dotted_path, PyList_GET_SIZE(dotted_path)-1); - Py_INCREF(lastname); + lastname = Py_NewRef(PyList_GET_ITEM(dotted_path, + PyList_GET_SIZE(dotted_path) - 1)); cls = get_deep_attribute(module, dotted_path, &parent); Py_CLEAR(dotted_path); if (cls == NULL) { @@ -3728,9 +3716,7 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) else { gen_global: if (parent == module) { - Py_INCREF(lastname); - Py_DECREF(global_name); - global_name = lastname; + Py_SETREF(global_name, Py_NewRef(lastname)); } if (self->proto >= 4) { const char stack_global_op = STACK_GLOBAL; @@ -3932,8 +3918,7 @@ get_class(PyObject *obj) PyObject *cls; if (_PyObject_LookupAttr(obj, &_Py_ID(__class__), &cls) == 0) { - cls = (PyObject *) Py_TYPE(obj); - Py_INCREF(cls); + cls = Py_NewRef(Py_TYPE(obj)); } return cls; } @@ -4084,12 +4069,10 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) return -1; } PyTuple_SET_ITEM(newargs, 0, cls_new); - Py_INCREF(cls); - PyTuple_SET_ITEM(newargs, 1, cls); + PyTuple_SET_ITEM(newargs, 1, Py_NewRef(cls)); for (i = 0; i < PyTuple_GET_SIZE(args); i++) { PyObject *item = PyTuple_GET_ITEM(args, i); - Py_INCREF(item); - PyTuple_SET_ITEM(newargs, i + 2, item); + PyTuple_SET_ITEM(newargs, i + 2, Py_NewRef(item)); } callable = PyObject_Call(st->partial, newargs, kwargs); @@ -4361,8 +4344,7 @@ save(PicklerObject *self, PyObject *obj, int pers_save) if (reduce_value != Py_NotImplemented) { goto reduce; } - Py_DECREF(reduce_value); - reduce_value = NULL; + Py_SETREF(reduce_value, NULL); } if (type == &PyType_Type) { @@ -4405,8 +4387,7 @@ save(PicklerObject *self, PyObject *obj, int pers_save) } } if (reduce_func != NULL) { - Py_INCREF(obj); - reduce_value = _Pickle_FastCall(reduce_func, obj); + reduce_value = _Pickle_FastCall(reduce_func, Py_NewRef(obj)); } else if (PyType_IsSubtype(type, &PyType_Type)) { status = save_global(self, obj, NULL); @@ -4869,8 +4850,7 @@ _pickle_PicklerMemoProxy___reduce___impl(PicklerMemoProxyObject *self) return NULL; } PyTuple_SET_ITEM(dict_args, 0, contents); - Py_INCREF((PyObject *)&PyDict_Type); - PyTuple_SET_ITEM(reduce_value, 0, (PyObject *)&PyDict_Type); + PyTuple_SET_ITEM(reduce_value, 0, Py_NewRef(&PyDict_Type)); PyTuple_SET_ITEM(reduce_value, 1, dict_args); return reduce_value; } @@ -4944,8 +4924,7 @@ PicklerMemoProxy_New(PicklerObject *pickler) self = PyObject_GC_New(PicklerMemoProxyObject, &PicklerMemoProxyType); if (self == NULL) return NULL; - Py_INCREF(pickler); - self->pickler = pickler; + self->pickler = (PicklerObject*)Py_NewRef(pickler); PyObject_GC_Track(self); return (PyObject *)self; } @@ -5045,8 +5024,7 @@ Pickler_set_persid(PicklerObject *self, PyObject *value, void *Py_UNUSED(ignored } self->pers_func_self = NULL; - Py_INCREF(value); - Py_XSETREF(self->pers_func, value); + Py_XSETREF(self->pers_func, Py_NewRef(value)); return 0; } @@ -6073,7 +6051,7 @@ load_persid(UnpicklerObject *self) else { PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, - "A load persistent id instruction was encountered,\n" + "A load persistent id instruction was encountered, " "but no persistent_load function was specified."); return -1; } @@ -6100,7 +6078,7 @@ load_binpersid(UnpicklerObject *self) else { PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, - "A load persistent id instruction was encountered,\n" + "A load persistent id instruction was encountered, " "but no persistent_load function was specified."); return -1; } @@ -7370,8 +7348,7 @@ _pickle_UnpicklerMemoProxy___reduce___impl(UnpicklerMemoProxyObject *self) return NULL; } PyTuple_SET_ITEM(constructor_args, 0, contents); - Py_INCREF((PyObject *)&PyDict_Type); - PyTuple_SET_ITEM(reduce_value, 0, (PyObject *)&PyDict_Type); + PyTuple_SET_ITEM(reduce_value, 0, Py_NewRef(&PyDict_Type)); PyTuple_SET_ITEM(reduce_value, 1, constructor_args); return reduce_value; } @@ -7446,8 +7423,7 @@ UnpicklerMemoProxy_New(UnpicklerObject *unpickler) &UnpicklerMemoProxyType); if (self == NULL) return NULL; - Py_INCREF(unpickler); - self->unpickler = unpickler; + self->unpickler = (UnpicklerObject*)Py_NewRef(unpickler); PyObject_GC_Track(self); return (PyObject *)self; } @@ -7483,8 +7459,7 @@ Unpickler_set_memo(UnpicklerObject *self, PyObject *obj, void *Py_UNUSED(ignored return -1; for (size_t i = 0; i < new_memo_size; i++) { - Py_XINCREF(unpickler->memo[i]); - new_memo[i] = unpickler->memo[i]; + new_memo[i] = Py_XNewRef(unpickler->memo[i]); } } else if (PyDict_Check(obj)) { @@ -7564,8 +7539,7 @@ Unpickler_set_persload(UnpicklerObject *self, PyObject *value, void *Py_UNUSED(i } self->pers_func_self = NULL; - Py_INCREF(value); - Py_XSETREF(self->pers_func, value); + Py_XSETREF(self->pers_func, Py_NewRef(value)); return 0; } @@ -7944,8 +7918,7 @@ PyInit__pickle(void) m = PyState_FindModule(&_picklemodule); if (m) { - Py_INCREF(m); - return m; + return Py_NewRef(m); } if (PyType_Ready(&Pdata_Type) < 0) @@ -7986,16 +7959,15 @@ PyInit__pickle(void) if (st->UnpicklingError == NULL) return NULL; - Py_INCREF(st->PickleError); - if (PyModule_AddObject(m, "PickleError", st->PickleError) < 0) + if (PyModule_AddObjectRef(m, "PickleError", st->PickleError) < 0) { return NULL; - Py_INCREF(st->PicklingError); - if (PyModule_AddObject(m, "PicklingError", st->PicklingError) < 0) + } + if (PyModule_AddObjectRef(m, "PicklingError", st->PicklingError) < 0) { return NULL; - Py_INCREF(st->UnpicklingError); - if (PyModule_AddObject(m, "UnpicklingError", st->UnpicklingError) < 0) + } + if (PyModule_AddObjectRef(m, "UnpicklingError", st->UnpicklingError) < 0) { return NULL; - + } if (_Pickle_InitState(st) < 0) return NULL; diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c index 44e60d7c14954d..717b1cf2202105 100644 --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -825,8 +825,8 @@ subprocess_fork_exec(PyObject *module, PyObject *args) &preexec_fn, &allow_vfork)) return NULL; - if ((preexec_fn != Py_None) && - (PyInterpreterState_Get() != PyInterpreterState_Main())) { + PyInterpreterState *interp = PyInterpreterState_Get(); + if ((preexec_fn != Py_None) && (interp != PyInterpreterState_Main())) { PyErr_SetString(PyExc_RuntimeError, "preexec_fn not supported within subinterpreters"); return NULL; @@ -841,14 +841,6 @@ subprocess_fork_exec(PyObject *module, PyObject *args) return NULL; } - PyInterpreterState *interp = PyInterpreterState_Get(); - const PyConfig *config = _PyInterpreterState_GetConfig(interp); - if (config->_isolated_interpreter) { - PyErr_SetString(PyExc_RuntimeError, - "subprocess not supported for isolated subinterpreters"); - return NULL; - } - /* We need to call gc.disable() when we'll be calling preexec_fn */ if (preexec_fn != Py_None) { need_to_reenable_gc = PyGC_Disable(); diff --git a/Modules/_scproxy.c b/Modules/_scproxy.c index 4c1f1aa300c717..344b66f9aad522 100644 --- a/Modules/_scproxy.c +++ b/Modules/_scproxy.c @@ -84,7 +84,7 @@ get_proxy_settings(PyObject* Py_UNUSED(mod), PyObject *Py_UNUSED(ignored)) if (v == NULL) goto error; r = PyDict_SetItemString(result, "exclude_simple", v); - Py_DECREF(v); v = NULL; + Py_SETREF(v, NULL); if (r == -1) goto error; anArray = CFDictionaryGetValue(proxyDict, diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h index e7e78707ee8d5d..1f9841c368b313 100644 --- a/Modules/_sqlite/clinic/connection.c.h +++ b/Modules/_sqlite/clinic/connection.c.h @@ -13,7 +13,8 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, double timeout, int detect_types, const char *isolation_level, int check_same_thread, PyObject *factory, - int cache_size, int uri); + int cache_size, int uri, + enum autocommit_mode autocommit); static int pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) @@ -21,14 +22,14 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) int return_value = -1; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - #define NUM_KEYWORDS 8 + #define NUM_KEYWORDS 9 static struct { PyGC_Head _this_is_not_used; PyObject_VAR_HEAD PyObject *ob_item[NUM_KEYWORDS]; } _kwtuple = { .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(database), &_Py_ID(timeout), &_Py_ID(detect_types), &_Py_ID(isolation_level), &_Py_ID(check_same_thread), &_Py_ID(factory), &_Py_ID(cached_statements), &_Py_ID(uri), }, + .ob_item = { &_Py_ID(database), &_Py_ID(timeout), &_Py_ID(detect_types), &_Py_ID(isolation_level), &_Py_ID(check_same_thread), &_Py_ID(factory), &_Py_ID(cached_statements), &_Py_ID(uri), &_Py_ID(autocommit), }, }; #undef NUM_KEYWORDS #define KWTUPLE (&_kwtuple.ob_base.ob_base) @@ -37,14 +38,14 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) # define KWTUPLE NULL #endif // !Py_BUILD_CORE - static const char * const _keywords[] = {"database", "timeout", "detect_types", "isolation_level", "check_same_thread", "factory", "cached_statements", "uri", NULL}; + static const char * const _keywords[] = {"database", "timeout", "detect_types", "isolation_level", "check_same_thread", "factory", "cached_statements", "uri", "autocommit", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, .fname = "Connection", .kwtuple = KWTUPLE, }; #undef KWTUPLE - PyObject *argsbuf[8]; + PyObject *argsbuf[9]; PyObject * const *fastargs; Py_ssize_t nargs = PyTuple_GET_SIZE(args); Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 1; @@ -56,6 +57,7 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) PyObject *factory = (PyObject*)clinic_state()->ConnectionType; int cache_size = 128; int uri = 0; + enum autocommit_mode autocommit = LEGACY_TRANSACTION_CONTROL; fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 8, 0, argsbuf); if (!fastargs) { @@ -121,12 +123,24 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs) goto skip_optional_pos; } } - uri = PyObject_IsTrue(fastargs[7]); - if (uri < 0) { - goto exit; + if (fastargs[7]) { + uri = PyObject_IsTrue(fastargs[7]); + if (uri < 0) { + goto exit; + } + if (!--noptargs) { + goto skip_optional_pos; + } } skip_optional_pos: - return_value = pysqlite_connection_init_impl((pysqlite_Connection *)self, database, timeout, detect_types, isolation_level, check_same_thread, factory, cache_size, uri); + if (!noptargs) { + goto skip_optional_kwonly; + } + if (!autocommit_converter(fastargs[8], &autocommit)) { + goto exit; + } +skip_optional_kwonly: + return_value = pysqlite_connection_init_impl((pysqlite_Connection *)self, database, timeout, detect_types, isolation_level, check_same_thread, factory, cache_size, uri, autocommit); exit: return return_value; @@ -1518,4 +1532,4 @@ getlimit(pysqlite_Connection *self, PyObject *arg) #ifndef DESERIALIZE_METHODDEF #define DESERIALIZE_METHODDEF #endif /* !defined(DESERIALIZE_METHODDEF) */ -/*[clinic end generated code: output=beef3eac690a1f88 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=20e929a7a7d62a01 input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index ceb77bbf8420ff..2854c1b5c31b2f 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -92,6 +92,30 @@ isolation_level_converter(PyObject *str_or_none, const char **result) return 1; } +static int +autocommit_converter(PyObject *val, enum autocommit_mode *result) +{ + if (Py_IsTrue(val)) { + *result = AUTOCOMMIT_ENABLED; + return 1; + } + if (Py_IsFalse(val)) { + *result = AUTOCOMMIT_DISABLED; + return 1; + } + if (PyLong_Check(val) && + PyLong_AsLong(val) == LEGACY_TRANSACTION_CONTROL) + { + *result = AUTOCOMMIT_LEGACY; + return 1; + } + + PyErr_SetString(PyExc_ValueError, + "autocommit must be True, False, or " + "sqlite3.LEGACY_TRANSACTION_CONTROL"); + return 0; +} + #define clinic_state() (pysqlite_get_state_by_type(Py_TYPE(self))) #include "clinic/connection.c.h" #undef clinic_state @@ -132,13 +156,38 @@ new_statement_cache(pysqlite_Connection *self, pysqlite_state *state, return res; } +static inline int +connection_exec_stmt(pysqlite_Connection *self, const char *sql) +{ + int rc; + Py_BEGIN_ALLOW_THREADS + int len = (int)strlen(sql) + 1; + sqlite3_stmt *stmt; + rc = sqlite3_prepare_v2(self->db, sql, len, &stmt, NULL); + if (rc == SQLITE_OK) { + (void)sqlite3_step(stmt); + rc = sqlite3_finalize(stmt); + } + Py_END_ALLOW_THREADS + + if (rc != SQLITE_OK) { + (void)_pysqlite_seterror(self->state, self->db); + return -1; + } + return 0; +} + /*[python input] class IsolationLevel_converter(CConverter): type = "const char *" converter = "isolation_level_converter" +class Autocommit_converter(CConverter): + type = "enum autocommit_mode" + converter = "autocommit_converter" + [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=cbcfe85b253061c2]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=bc2aa6c7ba0c5f8f]*/ // NB: This needs to be in sync with the sqlite3.connect docstring /*[clinic input] @@ -152,6 +201,8 @@ _sqlite3.Connection.__init__ as pysqlite_connection_init factory: object(c_default='(PyObject*)clinic_state()->ConnectionType') = ConnectionType cached_statements as cache_size: int = 128 uri: bool = False + * + autocommit: Autocommit(c_default='LEGACY_TRANSACTION_CONTROL') = sqlite3.LEGACY_TRANSACTION_CONTROL [clinic start generated code]*/ static int @@ -159,8 +210,9 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, double timeout, int detect_types, const char *isolation_level, int check_same_thread, PyObject *factory, - int cache_size, int uri) -/*[clinic end generated code: output=839eb2fee4293bda input=b8ce63dc6f70a383]*/ + int cache_size, int uri, + enum autocommit_mode autocommit) +/*[clinic end generated code: output=cba057313ea7712f input=b21abce28ebcd304]*/ { if (PySys_Audit("sqlite3.connect", "O", database) < 0) { return -1; @@ -227,6 +279,7 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, self->state = state; self->detect_types = detect_types; self->isolation_level = isolation_level; + self->autocommit = autocommit; self->check_same_thread = check_same_thread; self->thread_ident = PyThread_get_thread_ident(); self->statement_cache = statement_cache; @@ -256,6 +309,10 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database, } self->initialized = 1; + + if (autocommit == AUTOCOMMIT_DISABLED) { + (void)connection_exec_stmt(self, "BEGIN"); + } return 0; error: @@ -321,10 +378,33 @@ free_callback_contexts(pysqlite_Connection *self) set_callback_context(&self->authorizer_ctx, NULL); } +static void +remove_callbacks(sqlite3 *db) +{ +#ifdef HAVE_TRACE_V2 + sqlite3_trace_v2(db, SQLITE_TRACE_STMT, 0, 0); +#else + sqlite3_trace(db, 0, (void*)0); +#endif + sqlite3_progress_handler(db, 0, 0, (void *)0); + (void)sqlite3_set_authorizer(db, NULL, NULL); +} + static void connection_close(pysqlite_Connection *self) { if (self->db) { + if (self->autocommit == AUTOCOMMIT_DISABLED && + !sqlite3_get_autocommit(self->db)) + { + /* If close is implicitly called as a result of interpreter + * tear-down, we must not call back into Python. */ + if (_Py_IsFinalizing()) { + remove_callbacks(self->db); + } + (void)connection_exec_stmt(self, "ROLLBACK"); + } + free_callback_contexts(self); sqlite3 *db = self->db; @@ -538,24 +618,21 @@ pysqlite_connection_commit_impl(pysqlite_Connection *self) return NULL; } - if (!sqlite3_get_autocommit(self->db)) { - int rc; - - Py_BEGIN_ALLOW_THREADS - sqlite3_stmt *statement; - rc = sqlite3_prepare_v2(self->db, "COMMIT", 7, &statement, NULL); - if (rc == SQLITE_OK) { - (void)sqlite3_step(statement); - rc = sqlite3_finalize(statement); + if (self->autocommit == AUTOCOMMIT_LEGACY) { + if (!sqlite3_get_autocommit(self->db)) { + if (connection_exec_stmt(self, "COMMIT") < 0) { + return NULL; + } } - Py_END_ALLOW_THREADS - - if (rc != SQLITE_OK) { - (void)_pysqlite_seterror(self->state, self->db); + } + else if (self->autocommit == AUTOCOMMIT_DISABLED) { + if (connection_exec_stmt(self, "COMMIT") < 0) { + return NULL; + } + if (connection_exec_stmt(self, "BEGIN") < 0) { return NULL; } } - Py_RETURN_NONE; } @@ -575,25 +652,21 @@ pysqlite_connection_rollback_impl(pysqlite_Connection *self) return NULL; } - if (!sqlite3_get_autocommit(self->db)) { - int rc; - - Py_BEGIN_ALLOW_THREADS - sqlite3_stmt *statement; - rc = sqlite3_prepare_v2(self->db, "ROLLBACK", 9, &statement, NULL); - if (rc == SQLITE_OK) { - (void)sqlite3_step(statement); - rc = sqlite3_finalize(statement); + if (self->autocommit == AUTOCOMMIT_LEGACY) { + if (!sqlite3_get_autocommit(self->db)) { + if (connection_exec_stmt(self, "ROLLBACK") < 0) { + return NULL; + } } - Py_END_ALLOW_THREADS - - if (rc != SQLITE_OK) { - (void)_pysqlite_seterror(self->state, self->db); + } + else if (self->autocommit == AUTOCOMMIT_DISABLED) { + if (connection_exec_stmt(self, "ROLLBACK") < 0) { + return NULL; + } + if (connection_exec_stmt(self, "BEGIN") < 0) { return NULL; } - } - Py_RETURN_NONE; } @@ -2270,6 +2343,48 @@ getlimit_impl(pysqlite_Connection *self, int category) } +static PyObject * +get_autocommit(pysqlite_Connection *self, void *Py_UNUSED(ctx)) +{ + if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { + return NULL; + } + if (self->autocommit == AUTOCOMMIT_ENABLED) { + Py_RETURN_TRUE; + } + if (self->autocommit == AUTOCOMMIT_DISABLED) { + Py_RETURN_FALSE; + } + return PyLong_FromLong(LEGACY_TRANSACTION_CONTROL); +} + +static int +set_autocommit(pysqlite_Connection *self, PyObject *val, void *Py_UNUSED(ctx)) +{ + if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { + return -1; + } + if (!autocommit_converter(val, &self->autocommit)) { + return -1; + } + if (self->autocommit == AUTOCOMMIT_ENABLED) { + if (!sqlite3_get_autocommit(self->db)) { + if (connection_exec_stmt(self, "COMMIT") < 0) { + return -1; + } + } + } + else if (self->autocommit == AUTOCOMMIT_DISABLED) { + if (sqlite3_get_autocommit(self->db)) { + if (connection_exec_stmt(self, "BEGIN") < 0) { + return -1; + } + } + } + return 0; +} + + static const char connection_doc[] = PyDoc_STR("SQLite database connection object."); @@ -2277,6 +2392,7 @@ static PyGetSetDef connection_getset[] = { {"isolation_level", (getter)pysqlite_connection_get_isolation_level, (setter)pysqlite_connection_set_isolation_level}, {"total_changes", (getter)pysqlite_connection_get_total_changes, (setter)0}, {"in_transaction", (getter)pysqlite_connection_get_in_transaction, (setter)0}, + {"autocommit", (getter)get_autocommit, (setter)set_autocommit}, {NULL} }; diff --git a/Modules/_sqlite/connection.h b/Modules/_sqlite/connection.h index 629fe3d3a95a11..1df92065a587a2 100644 --- a/Modules/_sqlite/connection.h +++ b/Modules/_sqlite/connection.h @@ -39,6 +39,12 @@ typedef struct _callback_context pysqlite_state *state; } callback_context; +enum autocommit_mode { + AUTOCOMMIT_LEGACY = LEGACY_TRANSACTION_CONTROL, + AUTOCOMMIT_ENABLED = 1, + AUTOCOMMIT_DISABLED = 0, +}; + typedef struct { PyObject_HEAD @@ -51,6 +57,7 @@ typedef struct /* NULL for autocommit, otherwise a string with the isolation level */ const char *isolation_level; + enum autocommit_mode autocommit; /* 1 if a check should be performed for each API call if the connection is * used from the same thread it was created in */ diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c index cbf4718365fee6..a4e22bb4a2b58d 100644 --- a/Modules/_sqlite/cursor.c +++ b/Modules/_sqlite/cursor.c @@ -855,7 +855,8 @@ _pysqlite_query_execute(pysqlite_Cursor* self, int multiple, PyObject* operation /* We start a transaction implicitly before a DML statement. SELECT is the only exception. See #9924. */ - if (self->connection->isolation_level + if (self->connection->autocommit == AUTOCOMMIT_LEGACY + && self->connection->isolation_level && self->statement->is_dml && sqlite3_get_autocommit(self->connection->db)) { @@ -1033,7 +1034,9 @@ pysqlite_cursor_executescript_impl(pysqlite_Cursor *self, // Commit if needed sqlite3 *db = self->connection->db; - if (!sqlite3_get_autocommit(db)) { + if (self->connection->autocommit == AUTOCOMMIT_LEGACY + && !sqlite3_get_autocommit(db)) + { int rc = SQLITE_OK; Py_BEGIN_ALLOW_THREADS @@ -1120,8 +1123,7 @@ pysqlite_cursor_iternext(pysqlite_Cursor *self) PyObject *factory = self->row_factory; PyObject *args[] = { (PyObject *)self, row, }; PyObject *new_row = PyObject_Vectorcall(factory, args, 2, NULL); - Py_DECREF(row); - row = new_row; + Py_SETREF(row, new_row); } return row; } diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c index 707aae160ab4f3..6db3d51fd20220 100644 --- a/Modules/_sqlite/module.c +++ b/Modules/_sqlite/module.c @@ -46,7 +46,8 @@ module _sqlite3 PyDoc_STRVAR(module_connect_doc, "connect($module, /, database, timeout=5.0, detect_types=0,\n" " isolation_level='', check_same_thread=True,\n" -" factory=ConnectionType, cached_statements=128, uri=False)\n" +" factory=ConnectionType, cached_statements=128, uri=False, *,\n" +" autocommit=sqlite3.LEGACY_TRANSACTION_CONTROL)\n" "--\n" "\n" "Opens a connection to the SQLite database file database.\n" @@ -706,6 +707,10 @@ module_exec(PyObject *module) goto error; } + if (PyModule_AddIntMacro(module, LEGACY_TRANSACTION_CONTROL) < 0) { + goto error; + } + int threadsafety = get_threadsafety(state); if (threadsafety < 0) { goto error; diff --git a/Modules/_sqlite/module.h b/Modules/_sqlite/module.h index 7deba22ffec1b6..daa22091d38ad7 100644 --- a/Modules/_sqlite/module.h +++ b/Modules/_sqlite/module.h @@ -26,6 +26,8 @@ #define PY_SSIZE_T_CLEAN #include "Python.h" +#define LEGACY_TRANSACTION_CONTROL -1 + #define PYSQLITE_VERSION "2.6.0" #define MODULE_NAME "sqlite3" diff --git a/Modules/_sre/clinic/sre.c.h b/Modules/_sre/clinic/sre.c.h index 711e16a1190d77..da641081ce9e3c 100644 --- a/Modules/_sre/clinic/sre.c.h +++ b/Modules/_sre/clinic/sre.c.h @@ -1068,6 +1068,45 @@ _sre_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject return return_value; } +PyDoc_STRVAR(_sre_template__doc__, +"template($module, pattern, template, /)\n" +"--\n" +"\n" +"\n" +"\n" +" template\n" +" A list containing interleaved literal strings (str or bytes) and group\n" +" indices (int), as returned by re._parser.parse_template():\n" +" [literal1, group1, ..., literalN, groupN]"); + +#define _SRE_TEMPLATE_METHODDEF \ + {"template", _PyCFunction_CAST(_sre_template), METH_FASTCALL, _sre_template__doc__}, + +static PyObject * +_sre_template_impl(PyObject *module, PyObject *pattern, PyObject *template); + +static PyObject * +_sre_template(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *pattern; + PyObject *template; + + if (!_PyArg_CheckPositional("template", nargs, 2, 2)) { + goto exit; + } + pattern = args[0]; + if (!PyList_Check(args[1])) { + _PyArg_BadArgument("template", "argument 2", "list", args[1]); + goto exit; + } + template = args[1]; + return_value = _sre_template_impl(module, pattern, template); + +exit: + return return_value; +} + PyDoc_STRVAR(_sre_SRE_Match_expand__doc__, "expand($self, /, template)\n" "--\n" @@ -1421,4 +1460,4 @@ _sre_SRE_Scanner_search(ScannerObject *self, PyTypeObject *cls, PyObject *const } return _sre_SRE_Scanner_search_impl(self, cls); } -/*[clinic end generated code: output=14ea86f85c130a7b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e3ba72156dd71572 input=a9049054013a1b77]*/ diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c index bcb30848d9a592..4b6290a5967932 100644 --- a/Modules/_sre/sre.c +++ b/Modules/_sre/sre.c @@ -51,13 +51,6 @@ static const char copyright[] = #include -/* name of this module, minus the leading underscore */ -#if !defined(SRE_MODULE) -#define SRE_MODULE "sre" -#endif - -#define SRE_PY_MODULE "re" - /* defining this one enables tracing */ #undef VERBOSE @@ -254,6 +247,8 @@ typedef struct { PyTypeObject *Pattern_Type; PyTypeObject *Match_Type; PyTypeObject *Scanner_Type; + PyTypeObject *Template_Type; + PyObject *compile_template; // reference to re._compile_template } _sremodulestate; static _sremodulestate * @@ -464,8 +459,7 @@ state_init(SRE_STATE* state, PatternObject* pattern, PyObject* string, state->start = (void*) ((char*) ptr + start * state->charsize); state->end = (void*) ((char*) ptr + end * state->charsize); - Py_INCREF(string); - state->string = string; + state->string = Py_NewRef(string); state->pos = start; state->endpos = end; @@ -504,8 +498,7 @@ getslice(int isbytes, const void *ptr, if (isbytes) { if (PyBytes_CheckExact(string) && start == 0 && end == PyBytes_GET_SIZE(string)) { - Py_INCREF(string); - return string; + return Py_NewRef(string); } return PyBytes_FromStringAndSize( (const char *)ptr + start, end - start); @@ -757,23 +750,6 @@ _sre_SRE_Pattern_search_impl(PatternObject *self, PyTypeObject *cls, return match; } -static PyObject* -call(const char* module, const char* function, PyObject* args) -{ - PyObject* func; - PyObject* result; - - if (!args) - return NULL; - func = _PyImport_GetModuleAttrString(module, function); - if (!func) - return NULL; - result = PyObject_CallObject(func, args); - Py_DECREF(func); - Py_DECREF(args); - return result; -} - /*[clinic input] _sre.SRE_Pattern.findall @@ -1036,6 +1012,57 @@ _sre_SRE_Pattern_split_impl(PatternObject *self, PyObject *string, } +static PyObject * +compile_template(_sremodulestate *module_state, + PatternObject *pattern, PyObject *template) +{ + /* delegate to Python code */ + PyObject *func = module_state->compile_template; + if (func == NULL) { + func = _PyImport_GetModuleAttrString("re", "_compile_template"); + if (func == NULL) { + return NULL; + } + Py_XSETREF(module_state->compile_template, func); + } + + PyObject *args[] = {(PyObject *)pattern, template}; + PyObject *result = PyObject_Vectorcall(func, args, 2, NULL); + + if (result == NULL && PyErr_ExceptionMatches(PyExc_TypeError)) { + /* If the replacement string is unhashable (e.g. bytearray), + * convert it to the basic type (str or bytes) and repeat. */ + if (PyUnicode_Check(template) && !PyUnicode_CheckExact(template)) { + PyErr_Clear(); + template = _PyUnicode_Copy(template); + } + else if (PyObject_CheckBuffer(template) && !PyBytes_CheckExact(template)) { + PyErr_Clear(); + template = PyBytes_FromObject(template); + } + else { + return NULL; + } + if (template == NULL) { + return NULL; + } + args[1] = template; + result = PyObject_Vectorcall(func, args, 2, NULL); + Py_DECREF(template); + } + + if (result != NULL && Py_TYPE(result) != module_state->Template_Type) { + PyErr_Format(PyExc_RuntimeError, + "the result of compiling a replacement string is %.200s", + Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; + } + return result; +} + +static PyObject *expand_template(TemplateObject *, MatchObject *); /* Forward */ + static PyObject* pattern_subx(_sremodulestate* module_state, PatternObject* self, @@ -1055,14 +1082,13 @@ pattern_subx(_sremodulestate* module_state, Py_ssize_t n; Py_ssize_t i, b, e; int isbytes, charsize; - int filter_is_callable; + enum {LITERAL, TEMPLATE, CALLABLE} filter_type; Py_buffer view; if (PyCallable_Check(ptemplate)) { /* sub/subn takes either a function or a template */ - filter = ptemplate; - Py_INCREF(filter); - filter_is_callable = 1; + filter = Py_NewRef(ptemplate); + filter_type = CALLABLE; } else { /* if not callable, check if it's a literal string */ int literal; @@ -1080,18 +1106,23 @@ pattern_subx(_sremodulestate* module_state, if (view.buf) PyBuffer_Release(&view); if (literal) { - filter = ptemplate; - Py_INCREF(filter); - filter_is_callable = 0; + filter = Py_NewRef(ptemplate); + filter_type = LITERAL; } else { /* not a literal; hand it over to the template compiler */ - filter = call( - SRE_PY_MODULE, "_subx", - PyTuple_Pack(2, self, ptemplate) - ); + filter = compile_template(module_state, self, ptemplate); if (!filter) return NULL; - filter_is_callable = PyCallable_Check(filter); + + assert(Py_TYPE(filter) == module_state->Template_Type); + if (Py_SIZE(filter) == 0) { + Py_SETREF(filter, + Py_NewRef(((TemplateObject *)filter)->literal)); + filter_type = LITERAL; + } + else { + filter_type = TEMPLATE; + } } } @@ -1142,19 +1173,25 @@ pattern_subx(_sremodulestate* module_state, } - if (filter_is_callable) { + if (filter_type != LITERAL) { /* pass match object through filter */ match = pattern_new_match(module_state, self, &state, 1); if (!match) goto error; - item = PyObject_CallOneArg(filter, match); + if (filter_type == TEMPLATE) { + item = expand_template((TemplateObject *)filter, + (MatchObject *)match); + } + else { + assert(filter_type == CALLABLE); + item = PyObject_CallOneArg(filter, match); + } Py_DECREF(match); if (!item) goto error; } else { /* filter is literal string */ - item = filter; - Py_INCREF(item); + item = Py_NewRef(filter); } /* add to list */ @@ -1275,8 +1312,7 @@ static PyObject * _sre_SRE_Pattern___copy___impl(PatternObject *self) /*[clinic end generated code: output=85dedc2db1bd8694 input=a730a59d863bc9f5]*/ { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } /*[clinic input] @@ -1291,8 +1327,7 @@ static PyObject * _sre_SRE_Pattern___deepcopy__(PatternObject *self, PyObject *memo) /*[clinic end generated code: output=2ad25679c1f1204a input=a465b1602f997bed]*/ { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static PyObject * @@ -1458,19 +1493,16 @@ _sre_compile_impl(PyObject *module, PyObject *pattern, int flags, PyBuffer_Release(&view); } - Py_INCREF(pattern); - self->pattern = pattern; + self->pattern = Py_NewRef(pattern); self->flags = flags; self->groups = groups; if (PyDict_GET_SIZE(groupindex) > 0) { - Py_INCREF(groupindex); - self->groupindex = groupindex; + self->groupindex = Py_NewRef(groupindex); if (PyTuple_GET_SIZE(indexgroup) > 0) { - Py_INCREF(indexgroup); - self->indexgroup = indexgroup; + self->indexgroup = Py_NewRef(indexgroup); } } @@ -1482,6 +1514,67 @@ _sre_compile_impl(PyObject *module, PyObject *pattern, int flags, return (PyObject*) self; } +/*[clinic input] +_sre.template + + pattern: object + template: object(subclass_of="&PyList_Type") + A list containing interleaved literal strings (str or bytes) and group + indices (int), as returned by re._parser.parse_template(): + [literal1, group1, ..., literalN, groupN] + / + +[clinic start generated code]*/ + +static PyObject * +_sre_template_impl(PyObject *module, PyObject *pattern, PyObject *template) +/*[clinic end generated code: output=d51290e596ebca86 input=af55380b27f02942]*/ +{ + /* template is a list containing interleaved literal strings (str or bytes) + * and group indices (int), as returned by _parser.parse_template: + * [literal1, group1, literal2, ..., literalN]. + */ + _sremodulestate *module_state = get_sre_module_state(module); + TemplateObject *self = NULL; + Py_ssize_t n = PyList_GET_SIZE(template); + if ((n & 1) == 0 || n < 1) { + goto bad_template; + } + n /= 2; + self = PyObject_GC_NewVar(TemplateObject, module_state->Template_Type, n); + if (!self) + return NULL; + self->chunks = 1 + 2*n; + self->literal = Py_NewRef(PyList_GET_ITEM(template, 0)); + for (Py_ssize_t i = 0; i < n; i++) { + Py_ssize_t index = PyLong_AsSsize_t(PyList_GET_ITEM(template, 2*i+1)); + if (index == -1 && PyErr_Occurred()) { + Py_DECREF(self); + return NULL; + } + if (index < 0) { + goto bad_template; + } + self->items[i].index = index; + + PyObject *literal = PyList_GET_ITEM(template, 2*i+2); + // Skip empty literals. + if ((PyUnicode_Check(literal) && !PyUnicode_GET_LENGTH(literal)) || + (PyBytes_Check(literal) && !PyBytes_GET_SIZE(literal))) + { + literal = NULL; + self->chunks--; + } + self->items[i].literal = Py_XNewRef(literal); + } + return (PyObject*) self; + +bad_template: + PyErr_SetString(PyExc_TypeError, "invalid template"); + Py_XDECREF(self); + return NULL; +} + /* -------------------------------------------------------------------- */ /* Code validation */ @@ -1518,7 +1611,7 @@ _sre_compile_impl(PyObject *module, PyObject *pattern, int flags, #endif /* Report failure */ -#define FAIL do { VTRACE(("FAIL: %d\n", __LINE__)); return 0; } while (0) +#define FAIL do { VTRACE(("FAIL: %d\n", __LINE__)); return -1; } while (0) /* Extract opcode, argument, or skip count from code array */ #define GET_OP \ @@ -1542,7 +1635,7 @@ _sre_compile_impl(PyObject *module, PyObject *pattern, int flags, skip = *code; \ VTRACE(("%lu (skip to %p)\n", \ (unsigned long)skip, code+skip)); \ - if (skip-adj > (uintptr_t)(end - code)) \ + if (skip-adj > (uintptr_t)(end - code)) \ FAIL; \ code++; \ } while (0) @@ -1631,9 +1724,10 @@ _validate_charset(SRE_CODE *code, SRE_CODE *end) } } - return 1; + return 0; } +/* Returns 0 on success, -1 on failure, and 1 if the last op is JUMP. */ static int _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) { @@ -1711,7 +1805,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) case SRE_OP_IN_LOC_IGNORE: GET_SKIP; /* Stop 1 before the end; we check the FAILURE below */ - if (!_validate_charset(code, code+skip-2)) + if (_validate_charset(code, code+skip-2)) FAIL; if (code[skip-2] != SRE_OP_FAILURE) FAIL; @@ -1765,7 +1859,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) } /* Validate the charset */ if (flags & SRE_INFO_CHARSET) { - if (!_validate_charset(code, newcode-1)) + if (_validate_charset(code, newcode-1)) FAIL; if (newcode[-1] != SRE_OP_FAILURE) FAIL; @@ -1786,7 +1880,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) if (skip == 0) break; /* Stop 2 before the end; we check the JUMP below */ - if (!_validate_inner(code, code+skip-3, groups)) + if (_validate_inner(code, code+skip-3, groups)) FAIL; code += skip-3; /* Check that it ends with a JUMP, and that each JUMP @@ -1800,6 +1894,8 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) else if (code+skip-1 != target) FAIL; } + if (code != target) + FAIL; } break; @@ -1815,7 +1911,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) FAIL; if (max > SRE_MAXREPEAT) FAIL; - if (!_validate_inner(code, code+skip-4, groups)) + if (_validate_inner(code, code+skip-4, groups)) FAIL; code += skip-4; GET_OP; @@ -1835,7 +1931,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) FAIL; if (max > SRE_MAXREPEAT) FAIL; - if (!_validate_inner(code, code+skip-3, groups)) + if (_validate_inner(code, code+skip-3, groups)) FAIL; code += skip-3; GET_OP; @@ -1853,7 +1949,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) case SRE_OP_ATOMIC_GROUP: { GET_SKIP; - if (!_validate_inner(code, code+skip-2, groups)) + if (_validate_inner(code, code+skip-2, groups)) FAIL; code += skip-2; GET_OP; @@ -1905,24 +2001,17 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) to allow arbitrary jumps anywhere in the code; so we just look for a JUMP opcode preceding our skip target. */ - if (skip >= 3 && skip-3 < (uintptr_t)(end - code) && - code[skip-3] == SRE_OP_JUMP) - { - VTRACE(("both then and else parts present\n")); - if (!_validate_inner(code+1, code+skip-3, groups)) - FAIL; + VTRACE(("then part:\n")); + int rc = _validate_inner(code+1, code+skip-1, groups); + if (rc == 1) { + VTRACE(("else part:\n")); code += skip-2; /* Position after JUMP, at */ GET_SKIP; - if (!_validate_inner(code, code+skip-1, groups)) - FAIL; - code += skip-1; - } - else { - VTRACE(("only a then part present\n")); - if (!_validate_inner(code+1, code+skip-1, groups)) - FAIL; - code += skip-1; + rc = _validate_inner(code, code+skip-1, groups); } + if (rc) + FAIL; + code += skip-1; break; case SRE_OP_ASSERT: @@ -1933,7 +2022,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) if (arg & 0x80000000) FAIL; /* Width too large */ /* Stop 1 before the end; we check the SUCCESS below */ - if (!_validate_inner(code+1, code+skip-2, groups)) + if (_validate_inner(code+1, code+skip-2, groups)) FAIL; code += skip-2; GET_OP; @@ -1941,6 +2030,12 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) FAIL; break; + case SRE_OP_JUMP: + if (code + 1 != end) + FAIL; + VTRACE(("JUMP: %d\n", __LINE__)); + return 1; + default: FAIL; @@ -1948,7 +2043,7 @@ _validate_inner(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) } VTRACE(("okay\n")); - return 1; + return 0; } static int @@ -1963,7 +2058,7 @@ _validate_outer(SRE_CODE *code, SRE_CODE *end, Py_ssize_t groups) static int _validate(PatternObject *self) { - if (!_validate_outer(self->code, self->code+self->codesize, self->groups)) + if (_validate_outer(self->code, self->code+self->codesize, self->groups)) { PyErr_SetString(PyExc_RuntimeError, "invalid SRE code"); return 0; @@ -2021,8 +2116,7 @@ match_getslice_by_index(MatchObject* self, Py_ssize_t index, PyObject* def) if (self->string == Py_None || self->mark[index] < 0) { /* return default value if the string or group is undefined */ - Py_INCREF(def); - return def; + return Py_NewRef(def); } ptr = getstring(self->string, &length, &isbytes, &charsize, &view); @@ -2096,11 +2190,14 @@ static PyObject * _sre_SRE_Match_expand_impl(MatchObject *self, PyObject *template) /*[clinic end generated code: output=931b58ccc323c3a1 input=4bfdb22c2f8b146a]*/ { - /* delegate to Python code */ - return call( - SRE_PY_MODULE, "_expand", - PyTuple_Pack(3, self->pattern, self, template) - ); + _sremodulestate *module_state = get_sre_module_state_by_class(Py_TYPE(self)); + PyObject *filter = compile_template(module_state, self->pattern, template); + if (filter == NULL) { + return NULL; + } + PyObject *result = expand_template((TemplateObject *)filter, self); + Py_DECREF(filter); + return result; } static PyObject* @@ -2338,8 +2435,7 @@ match_regs(MatchObject* self) PyTuple_SET_ITEM(regs, index, item); } - Py_INCREF(regs); - self->regs = regs; + self->regs = Py_NewRef(regs); return regs; } @@ -2353,8 +2449,7 @@ static PyObject * _sre_SRE_Match___copy___impl(MatchObject *self) /*[clinic end generated code: output=a779c5fc8b5b4eb4 input=3bb4d30b6baddb5b]*/ { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } /*[clinic input] @@ -2369,8 +2464,7 @@ static PyObject * _sre_SRE_Match___deepcopy__(MatchObject *self, PyObject *memo) /*[clinic end generated code: output=ba7cb46d655e4ee2 input=779d12a31c2c325e]*/ { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } PyDoc_STRVAR(match_doc, @@ -2399,8 +2493,7 @@ match_lastgroup_get(MatchObject *self, void *Py_UNUSED(ignored)) { PyObject *result = PyTuple_GET_ITEM(self->pattern->indexgroup, self->lastindex); - Py_INCREF(result); - return result; + return Py_NewRef(result); } Py_RETURN_NONE; } @@ -2409,8 +2502,7 @@ static PyObject * match_regs_get(MatchObject *self, void *Py_UNUSED(ignored)) { if (self->regs) { - Py_INCREF(self->regs); - return self->regs; + return Py_NewRef(self->regs); } else return match_regs(self); } @@ -2454,11 +2546,9 @@ pattern_new_match(_sremodulestate* module_state, if (!match) return NULL; - Py_INCREF(pattern); - match->pattern = pattern; + match->pattern = (PatternObject*)Py_NewRef(pattern); - Py_INCREF(state->string); - match->string = state->string; + match->string = Py_NewRef(state->string); match->regs = NULL; match->groups = pattern->groups+1; @@ -2678,13 +2768,114 @@ pattern_scanner(_sremodulestate *module_state, return NULL; } - Py_INCREF(self); - scanner->pattern = (PyObject*) self; + scanner->pattern = Py_NewRef(self); PyObject_GC_Track(scanner); return (PyObject*) scanner; } +/* -------------------------------------------------------------------- */ +/* template methods */ + +static int +template_traverse(TemplateObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + Py_VISIT(self->literal); + for (Py_ssize_t i = 0, n = Py_SIZE(self); i < n; i++) { + Py_VISIT(self->items[i].literal); + } + return 0; +} + +static int +template_clear(TemplateObject *self) +{ + Py_CLEAR(self->literal); + for (Py_ssize_t i = 0, n = Py_SIZE(self); i < n; i++) { + Py_CLEAR(self->items[i].literal); + } + return 0; +} + +static void +template_dealloc(TemplateObject *self) +{ + PyTypeObject *tp = Py_TYPE(self); + + PyObject_GC_UnTrack(self); + (void)template_clear(self); + tp->tp_free(self); + Py_DECREF(tp); +} + +static PyObject * +expand_template(TemplateObject *self, MatchObject *match) +{ + if (Py_SIZE(self) == 0) { + return Py_NewRef(self->literal); + } + + PyObject *result = NULL; + Py_ssize_t count = 0; // the number of non-empty chunks + /* For small number of strings use a buffer allocated on the stack, + * otherwise use a list object. */ + PyObject *buffer[10]; + PyObject **out = buffer; + PyObject *list = NULL; + if (self->chunks > (int)Py_ARRAY_LENGTH(buffer) || + !PyUnicode_Check(self->literal)) + { + list = PyList_New(self->chunks); + if (!list) { + return NULL; + } + out = &PyList_GET_ITEM(list, 0); + } + + out[count++] = Py_NewRef(self->literal); + for (Py_ssize_t i = 0; i < Py_SIZE(self); i++) { + Py_ssize_t index = self->items[i].index; + if (index >= match->groups) { + PyErr_SetString(PyExc_IndexError, "no such group"); + goto cleanup; + } + PyObject *item = match_getslice_by_index(match, index, Py_None); + if (item == NULL) { + goto cleanup; + } + if (item != Py_None) { + out[count++] = Py_NewRef(item); + } + Py_DECREF(item); + + PyObject *literal = self->items[i].literal; + if (literal != NULL) { + out[count++] = Py_NewRef(literal); + } + } + + if (PyUnicode_Check(self->literal)) { + result = _PyUnicode_JoinArray(&_Py_STR(empty), out, count); + } + else { + Py_SET_SIZE(list, count); + result = _PyBytes_Join((PyObject *)&_Py_SINGLETON(bytes_empty), list); + } + +cleanup: + if (list) { + Py_DECREF(list); + } + else { + for (Py_ssize_t i = 0; i < count; i++) { + Py_DECREF(out[i]); + } + } + return result; +} + + static Py_hash_t pattern_hash(PatternObject *self) { @@ -2907,15 +3098,32 @@ static PyType_Slot scanner_slots[] = { }; static PyType_Spec scanner_spec = { - .name = "_" SRE_MODULE ".SRE_Scanner", + .name = "_sre.SRE_Scanner", .basicsize = sizeof(ScannerObject), .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION | Py_TPFLAGS_HAVE_GC), .slots = scanner_slots, }; +static PyType_Slot template_slots[] = { + {Py_tp_dealloc, template_dealloc}, + {Py_tp_traverse, template_traverse}, + {Py_tp_clear, template_clear}, + {0, NULL}, +}; + +static PyType_Spec template_spec = { + .name = "_sre.SRE_Template", + .basicsize = sizeof(TemplateObject), + .itemsize = sizeof(((TemplateObject *)0)->items[0]), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE | + Py_TPFLAGS_DISALLOW_INSTANTIATION | Py_TPFLAGS_HAVE_GC), + .slots = template_slots, +}; + static PyMethodDef _functions[] = { _SRE_COMPILE_METHODDEF + _SRE_TEMPLATE_METHODDEF _SRE_GETCODESIZE_METHODDEF _SRE_ASCII_ISCASED_METHODDEF _SRE_UNICODE_ISCASED_METHODDEF @@ -2932,6 +3140,8 @@ sre_traverse(PyObject *module, visitproc visit, void *arg) Py_VISIT(state->Pattern_Type); Py_VISIT(state->Match_Type); Py_VISIT(state->Scanner_Type); + Py_VISIT(state->Template_Type); + Py_VISIT(state->compile_template); return 0; } @@ -2944,6 +3154,8 @@ sre_clear(PyObject *module) Py_CLEAR(state->Pattern_Type); Py_CLEAR(state->Match_Type); Py_CLEAR(state->Scanner_Type); + Py_CLEAR(state->Template_Type); + Py_CLEAR(state->compile_template); return 0; } @@ -2984,6 +3196,7 @@ sre_exec(PyObject *m) CREATE_TYPE(m, state->Pattern_Type, &pattern_spec); CREATE_TYPE(m, state->Match_Type, &match_spec); CREATE_TYPE(m, state->Scanner_Type, &scanner_spec); + CREATE_TYPE(m, state->Template_Type, &template_spec); if (PyModule_AddIntConstant(m, "MAGIC", SRE_MAGIC) < 0) { goto error; @@ -3013,7 +3226,7 @@ static PyModuleDef_Slot sre_slots[] = { static struct PyModuleDef sremodule = { .m_base = PyModuleDef_HEAD_INIT, - .m_name = "_" SRE_MODULE, + .m_name = "_sre", .m_size = sizeof(_sremodulestate), .m_methods = _functions, .m_slots = sre_slots, diff --git a/Modules/_sre/sre.h b/Modules/_sre/sre.h index 52ae3e11b5f750..d967d9ea04ba7a 100644 --- a/Modules/_sre/sre.h +++ b/Modules/_sre/sre.h @@ -52,6 +52,17 @@ typedef struct { Py_ssize_t mark[1]; } MatchObject; +typedef struct { + PyObject_VAR_HEAD + Py_ssize_t chunks; /* the number of group references and non-NULL literals + * self->chunks <= 2*Py_SIZE(self) + 1 */ + PyObject *literal; + struct { + Py_ssize_t index; + PyObject *literal; /* NULL if empty */ + } items[0]; +} TemplateObject; + typedef struct SRE_REPEAT_T { Py_ssize_t count; const SRE_CODE* pattern; /* points to REPEAT operator arguments */ diff --git a/Modules/_sre/sre_constants.h b/Modules/_sre/sre_constants.h index c6335147368626..b5692292f65280 100644 --- a/Modules/_sre/sre_constants.h +++ b/Modules/_sre/sre_constants.h @@ -3,7 +3,7 @@ * * regular expression matching engine * - * Auto-generated by Tools/scripts/generate_sre_constants.py from + * Auto-generated by Tools/build/generate_sre_constants.py from * Lib/re/_constants.py. * * Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved. @@ -11,7 +11,7 @@ * See the sre.c file for information on usage and redistribution. */ -#define SRE_MAGIC 20220615 +#define SRE_MAGIC 20221023 #define SRE_OP_FAILURE 0 #define SRE_OP_SUCCESS 1 #define SRE_OP_ANY 2 diff --git a/Modules/_sre/sre_targets.h b/Modules/_sre/sre_targets.h index 25b6edd436bb73..62761a0000d836 100644 --- a/Modules/_sre/sre_targets.h +++ b/Modules/_sre/sre_targets.h @@ -3,7 +3,7 @@ * * regular expression matching engine * - * Auto-generated by Tools/scripts/generate_sre_constants.py from + * Auto-generated by Tools/build/generate_sre_constants.py from * Lib/re/_constants.py. * * Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved. diff --git a/Modules/_ssl.c b/Modules/_ssl.c index bf8bd9dea89b6b..2885774295b065 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -415,8 +415,7 @@ static PyObject * SSLError_str(PyOSErrorObject *self) { if (self->strerror != NULL && PyUnicode_Check(self->strerror)) { - Py_INCREF(self->strerror); - return self->strerror; + return Py_NewRef(self->strerror); } else return PyObject_Str(self->args); @@ -500,8 +499,7 @@ fill_and_set_sslerror(_sslmodulestate *state, if (verify_str != NULL) { verify_obj = PyUnicode_FromString(verify_str); } else { - verify_obj = Py_None; - Py_INCREF(verify_obj); + verify_obj = Py_NewRef(Py_None); } break; } @@ -800,8 +798,7 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, self->ssl = NULL; self->Socket = NULL; - self->ctx = sslctx; - Py_INCREF(sslctx); + self->ctx = (PySSLContext*)Py_NewRef(sslctx); self->shutdown_seen_zero = 0; self->owner = NULL; self->server_hostname = NULL; @@ -1026,8 +1023,7 @@ _asn1obj2py(_sslmodulestate *state, const ASN1_OBJECT *name, int no_name) } } if (!buflen && no_name) { - Py_INCREF(Py_None); - name_obj = Py_None; + name_obj = Py_NewRef(Py_None); } else { name_obj = PyUnicode_FromStringAndSize(namebuf, buflen); @@ -1876,8 +1872,7 @@ _ssl__SSLSocket_get_unverified_chain_impl(PySSLSocket *self) X509 *peer = SSL_get_peer_certificate(self->ssl); if (peer == NULL) { - peerobj = Py_None; - Py_INCREF(peerobj); + peerobj = Py_NewRef(Py_None); } else { /* consume X509 reference on success */ peerobj = _PySSL_CertificateFromX509(self->ctx->state, peer, 0); @@ -1907,8 +1902,7 @@ cipher_to_tuple(const SSL_CIPHER *cipher) cipher_name = SSL_CIPHER_get_name(cipher); if (cipher_name == NULL) { - Py_INCREF(Py_None); - PyTuple_SET_ITEM(retval, 0, Py_None); + PyTuple_SET_ITEM(retval, 0, Py_NewRef(Py_None)); } else { v = PyUnicode_FromString(cipher_name); if (v == NULL) @@ -1918,8 +1912,7 @@ cipher_to_tuple(const SSL_CIPHER *cipher) cipher_protocol = SSL_CIPHER_get_version(cipher); if (cipher_protocol == NULL) { - Py_INCREF(Py_None); - PyTuple_SET_ITEM(retval, 1, Py_None); + PyTuple_SET_ITEM(retval, 1, Py_NewRef(Py_None)); } else { v = PyUnicode_FromString(cipher_protocol); if (v == NULL) @@ -2103,16 +2096,14 @@ _ssl__SSLSocket_compression_impl(PySSLSocket *self) } static PySSLContext *PySSL_get_context(PySSLSocket *self, void *closure) { - Py_INCREF(self->ctx); - return self->ctx; + return (PySSLContext*)Py_NewRef(self->ctx); } static int PySSL_set_context(PySSLSocket *self, PyObject *value, void *closure) { if (PyObject_TypeCheck(value, self->ctx->state->PySSLContext_Type)) { - Py_INCREF(value); - Py_SETREF(self->ctx, (PySSLContext *)value); + Py_SETREF(self->ctx, (PySSLContext *)Py_NewRef(value)); SSL_set_SSL_CTX(self->ssl, self->ctx->ctx); /* Set SSL* internal msg_callback to state of new context's state */ SSL_set_msg_callback( @@ -2150,8 +2141,7 @@ PySSL_get_server_hostname(PySSLSocket *self, void *c) { if (self->server_hostname == NULL) Py_RETURN_NONE; - Py_INCREF(self->server_hostname); - return self->server_hostname; + return Py_NewRef(self->server_hostname); } PyDoc_STRVAR(PySSL_get_server_hostname_doc, @@ -2166,8 +2156,7 @@ PySSL_get_owner(PySSLSocket *self, void *c) Py_RETURN_NONE; owner = PyWeakref_GetObject(self->owner); - Py_INCREF(owner); - return owner; + return Py_NewRef(owner); } static int @@ -2820,8 +2809,7 @@ PySSL_get_session(PySSLSocket *self, void *closure) { } assert(self->ctx); - pysess->ctx = self->ctx; - Py_INCREF(pysess->ctx); + pysess->ctx = (PySSLContext*)Py_NewRef(self->ctx); pysess->session = session; PyObject_GC_Track(pysess); return (PyObject *)pysess; @@ -4459,8 +4447,7 @@ get_sni_callback(PySSLContext *self, void *c) if (cb == NULL) { Py_RETURN_NONE; } - Py_INCREF(cb); - return cb; + return Py_NewRef(cb); } static int @@ -4482,8 +4469,7 @@ set_sni_callback(PySSLContext *self, PyObject *arg, void *c) "not a callable object"); return -1; } - Py_INCREF(arg); - self->set_sni_cb = arg; + self->set_sni_cb = Py_NewRef(arg); SSL_CTX_set_tlsext_servername_callback(self->ctx, _servername_callback); SSL_CTX_set_tlsext_servername_arg(self->ctx, self); } @@ -5196,7 +5182,7 @@ _ssl_get_default_verify_paths_impl(PyObject *module) #define CONVERT(info, target) { \ const char *tmp = (info); \ target = NULL; \ - if (!tmp) { Py_INCREF(Py_None); target = Py_None; } \ + if (!tmp) { target = Py_NewRef(Py_None); } \ else if ((target = PyUnicode_DecodeFSDefault(tmp)) == NULL) { \ target = PyBytes_FromString(tmp); } \ if (!target) goto error; \ @@ -5311,11 +5297,9 @@ certEncodingType(DWORD encodingType) } switch(encodingType) { case X509_ASN_ENCODING: - Py_INCREF(x509_asn); - return x509_asn; + return Py_NewRef(x509_asn); case PKCS_7_ASN_ENCODING: - Py_INCREF(pkcs_7_asn); - return pkcs_7_asn; + return Py_NewRef(pkcs_7_asn); default: return PyLong_FromLong(encodingType); } @@ -5717,8 +5701,7 @@ sslmodule_init_socketapi(PyObject *module) if ((sockmod == NULL) || (sockmod->Sock_Type == NULL)) { return -1; } - state->Sock_Type = sockmod->Sock_Type; - Py_INCREF(state->Sock_Type); + state->Sock_Type = (PyTypeObject*)Py_NewRef(sockmod->Sock_Type); return 0; } @@ -5881,6 +5864,9 @@ sslmodule_init_constants(PyObject *m) PyModule_AddIntConstant(m, "OP_IGNORE_UNEXPECTED_EOF", SSL_OP_IGNORE_UNEXPECTED_EOF); #endif +#ifdef SSL_OP_ENABLE_KTLS + PyModule_AddIntConstant(m, "OP_ENABLE_KTLS", SSL_OP_ENABLE_KTLS); +#endif #ifdef X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT PyModule_AddIntConstant(m, "HOSTFLAG_ALWAYS_CHECK_SUBJECT", @@ -5925,8 +5911,7 @@ sslmodule_init_constants(PyObject *m) #define addbool(m, key, value) \ do { \ PyObject *bool_obj = (value) ? Py_True : Py_False; \ - Py_INCREF(bool_obj); \ - PyModule_AddObject((m), (key), bool_obj); \ + PyModule_AddObject((m), (key), Py_NewRef(bool_obj)); \ } while (0) addbool(m, "HAS_SNI", 1); diff --git a/Modules/_ssl/debughelpers.c b/Modules/_ssl/debughelpers.c index 03c125eb44fd3a..08f3457035b90c 100644 --- a/Modules/_ssl/debughelpers.c +++ b/Modules/_ssl/debughelpers.c @@ -87,8 +87,7 @@ _PySSL_msg_callback(int write_p, int version, int content_type, static PyObject * _PySSLContext_get_msg_callback(PySSLContext *self, void *c) { if (self->msg_cb != NULL) { - Py_INCREF(self->msg_cb); - return self->msg_cb; + return Py_NewRef(self->msg_cb); } else { Py_RETURN_NONE; } @@ -107,8 +106,7 @@ _PySSLContext_set_msg_callback(PySSLContext *self, PyObject *arg, void *c) { "not a callable object"); return -1; } - Py_INCREF(arg); - self->msg_cb = arg; + self->msg_cb = Py_NewRef(arg); SSL_CTX_set_msg_callback(self->ctx, _PySSL_msg_callback); } return 0; @@ -166,8 +164,7 @@ _PySSL_keylog_callback(const SSL *ssl, const char *line) static PyObject * _PySSLContext_get_keylog_filename(PySSLContext *self, void *c) { if (self->keylog_filename != NULL) { - Py_INCREF(self->keylog_filename); - return self->keylog_filename; + return Py_NewRef(self->keylog_filename); } else { Py_RETURN_NONE; } @@ -203,8 +200,7 @@ _PySSLContext_set_keylog_filename(PySSLContext *self, PyObject *arg, void *c) { "Can't malloc memory for keylog file"); return -1; } - Py_INCREF(arg); - self->keylog_filename = arg; + self->keylog_filename = Py_NewRef(arg); /* Write a header for seekable, empty files (this excludes pipes). */ PySSL_BEGIN_ALLOW_THREADS diff --git a/Modules/_struct.c b/Modules/_struct.c index f9bac34c1e38b8..c960b81b246ece 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -1829,8 +1829,7 @@ Struct_iter_unpack(PyStructObject *self, PyObject *buffer) Py_DECREF(iter); return NULL; } - Py_INCREF(self); - iter->so = self; + iter->so = (PyStructObject*)Py_NewRef(self); iter->index = 0; return (PyObject *)iter; } @@ -2165,8 +2164,7 @@ cache_struct_converter(PyObject *module, PyObject *fmt, PyStructObject **ptr) _structmodulestate *state = get_struct_state(module); if (fmt == NULL) { - Py_DECREF(*ptr); - *ptr = NULL; + Py_SETREF(*ptr, NULL); return 1; } @@ -2178,8 +2176,7 @@ cache_struct_converter(PyObject *module, PyObject *fmt, PyStructObject **ptr) s_object = PyDict_GetItemWithError(state->cache, fmt); if (s_object != NULL) { - Py_INCREF(s_object); - *ptr = (PyStructObject *)s_object; + *ptr = (PyStructObject *)Py_NewRef(s_object); return Py_CLEANUP_SUPPORTED; } else if (PyErr_Occurred()) { diff --git a/Modules/_testbuffer.c b/Modules/_testbuffer.c index eea9d217557e21..63ed4dc6ca80e1 100644 --- a/Modules/_testbuffer.c +++ b/Modules/_testbuffer.c @@ -1524,8 +1524,7 @@ ndarray_getbuf(NDArrayObject *self, Py_buffer *view, int flags) return -1; } - view->obj = (PyObject *)self; - Py_INCREF(view->obj); + view->obj = Py_NewRef(self); self->head->exports++; return 0; @@ -1788,8 +1787,7 @@ ndarray_subscript(NDArrayObject *self, PyObject *key) return unpack_single(base->buf, base->format, base->itemsize); } else if (key == Py_Ellipsis) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar"); @@ -2021,8 +2019,7 @@ ndarray_get_obj(NDArrayObject *self, void *closure) if (base->obj == NULL) { Py_RETURN_NONE; } - Py_INCREF(base->obj); - return base->obj; + return Py_NewRef(base->obj); } static PyObject * @@ -2559,8 +2556,7 @@ cmp_contig(PyObject *self, PyObject *args) PyBuffer_Release(&v2); ret = equal ? Py_True : Py_False; - Py_INCREF(ret); - return ret; + return Py_NewRef(ret); } static PyObject * @@ -2597,8 +2593,7 @@ is_contiguous(PyObject *self, PyObject *args) PyBuffer_Release(&view); } - Py_INCREF(ret); - return ret; + return Py_NewRef(ret); } static Py_hash_t @@ -2748,8 +2743,7 @@ staticarray_getbuf(StaticArrayObject *self, Py_buffer *view, int flags) view->obj = NULL; /* Don't use this in new code. */ } else { - view->obj = (PyObject *)self; - Py_INCREF(view->obj); + view->obj = Py_NewRef(self); } return 0; diff --git a/Modules/_testcapi/datetime.c b/Modules/_testcapi/datetime.c new file mode 100644 index 00000000000000..88f992915fa8c1 --- /dev/null +++ b/Modules/_testcapi/datetime.c @@ -0,0 +1,451 @@ +#include "parts.h" + +#include "datetime.h" // PyDateTimeAPI + + +static int test_run_counter = 0; + +static PyObject * +test_datetime_capi(PyObject *self, PyObject *args) +{ + if (PyDateTimeAPI) { + if (test_run_counter) { + /* Probably regrtest.py -R */ + Py_RETURN_NONE; + } + else { + PyErr_SetString(PyExc_AssertionError, + "PyDateTime_CAPI somehow initialized"); + return NULL; + } + } + test_run_counter++; + PyDateTime_IMPORT; + + if (PyDateTimeAPI) { + Py_RETURN_NONE; + } + return NULL; +} + +/* Functions exposing the C API type checking for testing */ +#define MAKE_DATETIME_CHECK_FUNC(check_method, exact_method) \ +do { \ + PyObject *obj; \ + int exact = 0; \ + if (!PyArg_ParseTuple(args, "O|p", &obj, &exact)) { \ + return NULL; \ + } \ + int rv = exact?exact_method(obj):check_method(obj); \ + if (rv) { \ + Py_RETURN_TRUE; \ + } \ + Py_RETURN_FALSE; \ +} while (0) \ + +static PyObject * +datetime_check_date(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyDate_Check, PyDate_CheckExact); +} + +static PyObject * +datetime_check_time(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyTime_Check, PyTime_CheckExact); +} + +static PyObject * +datetime_check_datetime(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyDateTime_Check, PyDateTime_CheckExact); +} + +static PyObject * +datetime_check_delta(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyDelta_Check, PyDelta_CheckExact); +} + +static PyObject * +datetime_check_tzinfo(PyObject *self, PyObject *args) +{ + MAKE_DATETIME_CHECK_FUNC(PyTZInfo_Check, PyTZInfo_CheckExact); +} +#undef MAKE_DATETIME_CHECK_FUNC + + +/* Makes three variations on timezone representing UTC-5: + 1. timezone with offset and name from PyDateTimeAPI + 2. timezone with offset and name from PyTimeZone_FromOffsetAndName + 3. timezone with offset (no name) from PyTimeZone_FromOffset +*/ +static PyObject * +make_timezones_capi(PyObject *self, PyObject *args) +{ + PyObject *offset = PyDelta_FromDSU(0, -18000, 0); + PyObject *name = PyUnicode_FromString("EST"); + + PyObject *est_zone_capi = PyDateTimeAPI->TimeZone_FromTimeZone(offset, name); + PyObject *est_zone_macro = PyTimeZone_FromOffsetAndName(offset, name); + PyObject *est_zone_macro_noname = PyTimeZone_FromOffset(offset); + + Py_DecRef(offset); + Py_DecRef(name); + + PyObject *rv = PyTuple_New(3); + if (rv == NULL) { + return NULL; + } + + PyTuple_SET_ITEM(rv, 0, est_zone_capi); + PyTuple_SET_ITEM(rv, 1, est_zone_macro); + PyTuple_SET_ITEM(rv, 2, est_zone_macro_noname); + + return rv; +} + +static PyObject * +get_timezones_offset_zero(PyObject *self, PyObject *args) +{ + PyObject *offset = PyDelta_FromDSU(0, 0, 0); + PyObject *name = PyUnicode_FromString(""); + + // These two should return the UTC singleton + PyObject *utc_singleton_0 = PyTimeZone_FromOffset(offset); + PyObject *utc_singleton_1 = PyTimeZone_FromOffsetAndName(offset, NULL); + + // This one will return +00:00 zone, but not the UTC singleton + PyObject *non_utc_zone = PyTimeZone_FromOffsetAndName(offset, name); + + Py_DecRef(offset); + Py_DecRef(name); + + PyObject *rv = PyTuple_New(3); + PyTuple_SET_ITEM(rv, 0, utc_singleton_0); + PyTuple_SET_ITEM(rv, 1, utc_singleton_1); + PyTuple_SET_ITEM(rv, 2, non_utc_zone); + + return rv; +} + +static PyObject * +get_timezone_utc_capi(PyObject *self, PyObject *args) +{ + int macro = 0; + if (!PyArg_ParseTuple(args, "|p", ¯o)) { + return NULL; + } + if (macro) { + return Py_NewRef(PyDateTime_TimeZone_UTC); + } + return Py_NewRef(PyDateTimeAPI->TimeZone_UTC); +} + +static PyObject * +get_date_fromdate(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int year, month, day; + + if (!PyArg_ParseTuple(args, "piii", ¯o, &year, &month, &day)) { + return NULL; + } + + if (macro) { + rv = PyDate_FromDate(year, month, day); + } + else { + rv = PyDateTimeAPI->Date_FromDate( + year, month, day, + PyDateTimeAPI->DateType); + } + return rv; +} + +static PyObject * +get_datetime_fromdateandtime(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int year, month, day; + int hour, minute, second, microsecond; + + if (!PyArg_ParseTuple(args, "piiiiiii", + ¯o, + &year, &month, &day, + &hour, &minute, &second, µsecond)) { + return NULL; + } + + if (macro) { + rv = PyDateTime_FromDateAndTime( + year, month, day, + hour, minute, second, microsecond); + } + else { + rv = PyDateTimeAPI->DateTime_FromDateAndTime( + year, month, day, + hour, minute, second, microsecond, + Py_None, + PyDateTimeAPI->DateTimeType); + } + return rv; +} + +static PyObject * +get_datetime_fromdateandtimeandfold(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int year, month, day; + int hour, minute, second, microsecond, fold; + + if (!PyArg_ParseTuple(args, "piiiiiiii", + ¯o, + &year, &month, &day, + &hour, &minute, &second, µsecond, + &fold)) { + return NULL; + } + + if (macro) { + rv = PyDateTime_FromDateAndTimeAndFold( + year, month, day, + hour, minute, second, microsecond, + fold); + } + else { + rv = PyDateTimeAPI->DateTime_FromDateAndTimeAndFold( + year, month, day, + hour, minute, second, microsecond, + Py_None, + fold, + PyDateTimeAPI->DateTimeType); + } + return rv; +} + +static PyObject * +get_time_fromtime(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int hour, minute, second, microsecond; + + if (!PyArg_ParseTuple(args, "piiii", + ¯o, + &hour, &minute, &second, µsecond)) + { + return NULL; + } + + if (macro) { + rv = PyTime_FromTime(hour, minute, second, microsecond); + } + else { + rv = PyDateTimeAPI->Time_FromTime( + hour, minute, second, microsecond, + Py_None, + PyDateTimeAPI->TimeType); + } + return rv; +} + +static PyObject * +get_time_fromtimeandfold(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int hour, minute, second, microsecond, fold; + + if (!PyArg_ParseTuple(args, "piiiii", + ¯o, + &hour, &minute, &second, µsecond, + &fold)) { + return NULL; + } + + if (macro) { + rv = PyTime_FromTimeAndFold(hour, minute, second, microsecond, fold); + } + else { + rv = PyDateTimeAPI->Time_FromTimeAndFold( + hour, minute, second, microsecond, + Py_None, + fold, + PyDateTimeAPI->TimeType); + } + return rv; +} + +static PyObject * +get_delta_fromdsu(PyObject *self, PyObject *args) +{ + PyObject *rv = NULL; + int macro; + int days, seconds, microseconds; + + if (!PyArg_ParseTuple(args, "piii", + ¯o, + &days, &seconds, µseconds)) { + return NULL; + } + + if (macro) { + rv = PyDelta_FromDSU(days, seconds, microseconds); + } + else { + rv = PyDateTimeAPI->Delta_FromDelta( + days, seconds, microseconds, 1, + PyDateTimeAPI->DeltaType); + } + + return rv; +} + +static PyObject * +get_date_fromtimestamp(PyObject *self, PyObject *args) +{ + PyObject *tsargs = NULL, *ts = NULL, *rv = NULL; + int macro = 0; + + if (!PyArg_ParseTuple(args, "O|p", &ts, ¯o)) { + return NULL; + } + + // Construct the argument tuple + if ((tsargs = PyTuple_Pack(1, ts)) == NULL) { + return NULL; + } + + // Pass along to the API function + if (macro) { + rv = PyDate_FromTimestamp(tsargs); + } + else { + rv = PyDateTimeAPI->Date_FromTimestamp( + (PyObject *)PyDateTimeAPI->DateType, tsargs + ); + } + + Py_DECREF(tsargs); + return rv; +} + +static PyObject * +get_datetime_fromtimestamp(PyObject *self, PyObject *args) +{ + int macro = 0; + int usetz = 0; + PyObject *tsargs = NULL, *ts = NULL, *tzinfo = Py_None, *rv = NULL; + if (!PyArg_ParseTuple(args, "OO|pp", &ts, &tzinfo, &usetz, ¯o)) { + return NULL; + } + + // Construct the argument tuple + if (usetz) { + tsargs = PyTuple_Pack(2, ts, tzinfo); + } + else { + tsargs = PyTuple_Pack(1, ts); + } + + if (tsargs == NULL) { + return NULL; + } + + // Pass along to the API function + if (macro) { + rv = PyDateTime_FromTimestamp(tsargs); + } + else { + rv = PyDateTimeAPI->DateTime_FromTimestamp( + (PyObject *)PyDateTimeAPI->DateTimeType, tsargs, NULL + ); + } + + Py_DECREF(tsargs); + return rv; +} + +static PyObject * +test_PyDateTime_GET(PyObject *self, PyObject *obj) +{ + int year, month, day; + + year = PyDateTime_GET_YEAR(obj); + month = PyDateTime_GET_MONTH(obj); + day = PyDateTime_GET_DAY(obj); + + return Py_BuildValue("(iii)", year, month, day); +} + +static PyObject * +test_PyDateTime_DATE_GET(PyObject *self, PyObject *obj) +{ + int hour = PyDateTime_DATE_GET_HOUR(obj); + int minute = PyDateTime_DATE_GET_MINUTE(obj); + int second = PyDateTime_DATE_GET_SECOND(obj); + int microsecond = PyDateTime_DATE_GET_MICROSECOND(obj); + PyObject *tzinfo = PyDateTime_DATE_GET_TZINFO(obj); + + return Py_BuildValue("(iiiiO)", hour, minute, second, microsecond, tzinfo); +} + +static PyObject * +test_PyDateTime_TIME_GET(PyObject *self, PyObject *obj) +{ + int hour = PyDateTime_TIME_GET_HOUR(obj); + int minute = PyDateTime_TIME_GET_MINUTE(obj); + int second = PyDateTime_TIME_GET_SECOND(obj); + int microsecond = PyDateTime_TIME_GET_MICROSECOND(obj); + PyObject *tzinfo = PyDateTime_TIME_GET_TZINFO(obj); + + return Py_BuildValue("(iiiiO)", hour, minute, second, microsecond, tzinfo); +} + +static PyObject * +test_PyDateTime_DELTA_GET(PyObject *self, PyObject *obj) +{ + int days = PyDateTime_DELTA_GET_DAYS(obj); + int seconds = PyDateTime_DELTA_GET_SECONDS(obj); + int microseconds = PyDateTime_DELTA_GET_MICROSECONDS(obj); + + return Py_BuildValue("(iii)", days, seconds, microseconds); +} + +static PyMethodDef test_methods[] = { + {"PyDateTime_DATE_GET", test_PyDateTime_DATE_GET, METH_O}, + {"PyDateTime_DELTA_GET", test_PyDateTime_DELTA_GET, METH_O}, + {"PyDateTime_GET", test_PyDateTime_GET, METH_O}, + {"PyDateTime_TIME_GET", test_PyDateTime_TIME_GET, METH_O}, + {"datetime_check_date", datetime_check_date, METH_VARARGS}, + {"datetime_check_datetime", datetime_check_datetime, METH_VARARGS}, + {"datetime_check_delta", datetime_check_delta, METH_VARARGS}, + {"datetime_check_time", datetime_check_time, METH_VARARGS}, + {"datetime_check_tzinfo", datetime_check_tzinfo, METH_VARARGS}, + {"get_date_fromdate", get_date_fromdate, METH_VARARGS}, + {"get_date_fromtimestamp", get_date_fromtimestamp, METH_VARARGS}, + {"get_datetime_fromdateandtime", get_datetime_fromdateandtime, METH_VARARGS}, + {"get_datetime_fromdateandtimeandfold", get_datetime_fromdateandtimeandfold, METH_VARARGS}, + {"get_datetime_fromtimestamp", get_datetime_fromtimestamp, METH_VARARGS}, + {"get_delta_fromdsu", get_delta_fromdsu, METH_VARARGS}, + {"get_time_fromtime", get_time_fromtime, METH_VARARGS}, + {"get_time_fromtimeandfold", get_time_fromtimeandfold, METH_VARARGS}, + {"get_timezone_utc_capi", get_timezone_utc_capi, METH_VARARGS}, + {"get_timezones_offset_zero", get_timezones_offset_zero, METH_NOARGS}, + {"make_timezones_capi", make_timezones_capi, METH_NOARGS}, + {"test_datetime_capi", test_datetime_capi, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_DateTime(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testcapi/docstring.c b/Modules/_testcapi/docstring.c new file mode 100644 index 00000000000000..a997c54a8a69c4 --- /dev/null +++ b/Modules/_testcapi/docstring.c @@ -0,0 +1,107 @@ +#include "parts.h" + + +PyDoc_STRVAR(docstring_empty, +"" +); + +PyDoc_STRVAR(docstring_no_signature, +"This docstring has no signature." +); + +PyDoc_STRVAR(docstring_with_invalid_signature, +"docstring_with_invalid_signature($module, /, boo)\n" +"\n" +"This docstring has an invalid signature." +); + +PyDoc_STRVAR(docstring_with_invalid_signature2, +"docstring_with_invalid_signature2($module, /, boo)\n" +"\n" +"--\n" +"\n" +"This docstring also has an invalid signature." +); + +PyDoc_STRVAR(docstring_with_signature, +"docstring_with_signature($module, /, sig)\n" +"--\n" +"\n" +"This docstring has a valid signature." +); + +PyDoc_STRVAR(docstring_with_signature_but_no_doc, +"docstring_with_signature_but_no_doc($module, /, sig)\n" +"--\n" +"\n" +); + +PyDoc_STRVAR(docstring_with_signature_and_extra_newlines, +"docstring_with_signature_and_extra_newlines($module, /, parameter)\n" +"--\n" +"\n" +"\n" +"This docstring has a valid signature and some extra newlines." +); + +PyDoc_STRVAR(docstring_with_signature_with_defaults, +"docstring_with_signature_with_defaults(module, s='avocado',\n" +" b=b'bytes', d=3.14, i=35, n=None, t=True, f=False,\n" +" local=the_number_three, sys=sys.maxsize,\n" +" exp=sys.maxsize - 1)\n" +"--\n" +"\n" +"\n" +"\n" +"This docstring has a valid signature with parameters,\n" +"and the parameters take defaults of varying types." +); + +/* This is here to provide a docstring for test_descr. */ +static PyObject * +test_with_docstring(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"docstring_empty", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_empty}, + {"docstring_no_signature", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_no_signature}, + {"docstring_with_invalid_signature", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_invalid_signature}, + {"docstring_with_invalid_signature2", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_invalid_signature2}, + {"docstring_with_signature", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_signature}, + {"docstring_with_signature_and_extra_newlines", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_signature_and_extra_newlines}, + {"docstring_with_signature_but_no_doc", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_signature_but_no_doc}, + {"docstring_with_signature_with_defaults", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_with_signature_with_defaults}, + {"no_docstring", + (PyCFunction)test_with_docstring, METH_NOARGS}, + {"test_with_docstring", + test_with_docstring, METH_NOARGS, + PyDoc_STR("This is a pretty normal docstring.")}, + {NULL}, +}; + +int +_PyTestCapi_Init_Docstring(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testcapi/float.c b/Modules/_testcapi/float.c new file mode 100644 index 00000000000000..26d99d990e0042 --- /dev/null +++ b/Modules/_testcapi/float.c @@ -0,0 +1,98 @@ +#define PY_SSIZE_T_CLEAN + +#include "parts.h" + + +// Test PyFloat_Pack2(), PyFloat_Pack4() and PyFloat_Pack8() +static PyObject * +test_float_pack(PyObject *self, PyObject *args) +{ + int size; + double d; + int le; + if (!PyArg_ParseTuple(args, "idi", &size, &d, &le)) { + return NULL; + } + switch (size) + { + case 2: + { + char data[2]; + if (PyFloat_Pack2(d, data, le) < 0) { + return NULL; + } + return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); + } + case 4: + { + char data[4]; + if (PyFloat_Pack4(d, data, le) < 0) { + return NULL; + } + return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); + } + case 8: + { + char data[8]; + if (PyFloat_Pack8(d, data, le) < 0) { + return NULL; + } + return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); + } + default: break; + } + + PyErr_SetString(PyExc_ValueError, "size must 2, 4 or 8"); + return NULL; +} + + +// Test PyFloat_Unpack2(), PyFloat_Unpack4() and PyFloat_Unpack8() +static PyObject * +test_float_unpack(PyObject *self, PyObject *args) +{ + assert(!PyErr_Occurred()); + const char *data; + Py_ssize_t size; + int le; + if (!PyArg_ParseTuple(args, "y#i", &data, &size, &le)) { + return NULL; + } + double d; + switch (size) + { + case 2: + d = PyFloat_Unpack2(data, le); + break; + case 4: + d = PyFloat_Unpack4(data, le); + break; + case 8: + d = PyFloat_Unpack8(data, le); + break; + default: + PyErr_SetString(PyExc_ValueError, "data length must 2, 4 or 8 bytes"); + return NULL; + } + + if (d == -1.0 && PyErr_Occurred()) { + return NULL; + } + return PyFloat_FromDouble(d); +} + +static PyMethodDef test_methods[] = { + {"float_pack", test_float_pack, METH_VARARGS, NULL}, + {"float_unpack", test_float_unpack, METH_VARARGS, NULL}, + {NULL}, +}; + +int +_PyTestCapi_Init_Float(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/getargs.c b/Modules/_testcapi/getargs.c new file mode 100644 index 00000000000000..25a8e5fe8b605b --- /dev/null +++ b/Modules/_testcapi/getargs.c @@ -0,0 +1,920 @@ +/* + * Tests for Python/getargs.c and Python/modsupport.c; + * APIs that parse and build arguments. + */ + +#define PY_SSIZE_T_CLEAN + +#include "parts.h" + +static PyObject * +parse_tuple_and_keywords(PyObject *self, PyObject *args) +{ + PyObject *sub_args; + PyObject *sub_kwargs; + const char *sub_format; + PyObject *sub_keywords; + + double buffers[8][4]; /* double ensures alignment where necessary */ + PyObject *converted[8]; + char *keywords[8 + 1]; /* space for NULL at end */ + + PyObject *return_value = NULL; + + if (!PyArg_ParseTuple(args, "OOsO:parse_tuple_and_keywords", + &sub_args, &sub_kwargs, &sub_format, &sub_keywords)) + { + return NULL; + } + + if (!(PyList_CheckExact(sub_keywords) || + PyTuple_CheckExact(sub_keywords))) + { + PyErr_SetString(PyExc_ValueError, + "parse_tuple_and_keywords: " + "sub_keywords must be either list or tuple"); + return NULL; + } + + memset(buffers, 0, sizeof(buffers)); + memset(converted, 0, sizeof(converted)); + memset(keywords, 0, sizeof(keywords)); + + Py_ssize_t size = PySequence_Fast_GET_SIZE(sub_keywords); + if (size > 8) { + PyErr_SetString(PyExc_ValueError, + "parse_tuple_and_keywords: too many keywords in sub_keywords"); + goto exit; + } + + for (Py_ssize_t i = 0; i < size; i++) { + PyObject *o = PySequence_Fast_GET_ITEM(sub_keywords, i); + if (!PyUnicode_FSConverter(o, (void *)(converted + i))) { + PyErr_Format(PyExc_ValueError, + "parse_tuple_and_keywords: " + "could not convert keywords[%zd] to narrow string", i); + goto exit; + } + keywords[i] = PyBytes_AS_STRING(converted[i]); + } + + int result = PyArg_ParseTupleAndKeywords(sub_args, sub_kwargs, + sub_format, keywords, + buffers + 0, buffers + 1, buffers + 2, buffers + 3, + buffers + 4, buffers + 5, buffers + 6, buffers + 7); + + if (result) { + return_value = Py_NewRef(Py_None); + } + +exit: + size = sizeof(converted) / sizeof(converted[0]); + for (Py_ssize_t i = 0; i < size; i++) { + Py_XDECREF(converted[i]); + } + return return_value; +} + +static PyObject * +get_args(PyObject *self, PyObject *args) +{ + if (args == NULL) { + args = Py_None; + } + return Py_NewRef(args); +} + +static PyObject * +get_kwargs(PyObject *self, PyObject *args, PyObject *kwargs) +{ + if (kwargs == NULL) { + kwargs = Py_None; + } + return Py_NewRef(kwargs); +} + +static PyObject * +getargs_w_star(PyObject *self, PyObject *args) +{ + Py_buffer buffer; + + if (!PyArg_ParseTuple(args, "w*:getargs_w_star", &buffer)) { + return NULL; + } + + if (2 <= buffer.len) { + char *str = buffer.buf; + str[0] = '['; + str[buffer.len-1] = ']'; + } + + PyObject *result = PyBytes_FromStringAndSize(buffer.buf, buffer.len); + PyBuffer_Release(&buffer); + return result; +} + +static PyObject * +test_empty_argparse(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + /* Test that formats can begin with '|'. See issue #4720. */ + PyObject *dict = NULL; + static char *kwlist[] = {NULL}; + PyObject *tuple = PyTuple_New(0); + if (!tuple) { + return NULL; + } + int result; + if (!(result = PyArg_ParseTuple(tuple, "|:test_empty_argparse"))) { + goto done; + } + dict = PyDict_New(); + if (!dict) { + goto done; + } + result = PyArg_ParseTupleAndKeywords(tuple, dict, "|:test_empty_argparse", + kwlist); + done: + Py_DECREF(tuple); + Py_XDECREF(dict); + if (!result) { + return NULL; + } + Py_RETURN_NONE; +} + +/* Test tuple argument processing */ +static PyObject * +getargs_tuple(PyObject *self, PyObject *args) +{ + int a, b, c; + if (!PyArg_ParseTuple(args, "i(ii)", &a, &b, &c)) { + return NULL; + } + return Py_BuildValue("iii", a, b, c); +} + +/* test PyArg_ParseTupleAndKeywords */ +static PyObject * +getargs_keywords(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"arg1","arg2","arg3","arg4","arg5", NULL}; + static const char fmt[] = "(ii)i|(i(ii))(iii)i"; + int int_args[10] = {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1}; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, fmt, keywords, + &int_args[0], &int_args[1], &int_args[2], &int_args[3], &int_args[4], + &int_args[5], &int_args[6], &int_args[7], &int_args[8], &int_args[9])) + { + return NULL; + } + return Py_BuildValue("iiiiiiiiii", + int_args[0], int_args[1], int_args[2], int_args[3], int_args[4], + int_args[5], int_args[6], int_args[7], int_args[8], int_args[9]); +} + +/* test PyArg_ParseTupleAndKeywords keyword-only arguments */ +static PyObject * +getargs_keyword_only(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"required", "optional", "keyword_only", NULL}; + int required = -1; + int optional = -1; + int keyword_only = -1; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|i$i", keywords, + &required, &optional, &keyword_only)) + { + return NULL; + } + return Py_BuildValue("iii", required, optional, keyword_only); +} + +/* test PyArg_ParseTupleAndKeywords positional-only arguments */ +static PyObject * +getargs_positional_only_and_keywords(PyObject *self, PyObject *args, + PyObject *kwargs) +{ + static char *keywords[] = {"", "", "keyword", NULL}; + int required = -1; + int optional = -1; + int keyword = -1; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|ii", keywords, + &required, &optional, &keyword)) + { + return NULL; + } + return Py_BuildValue("iii", required, optional, keyword); +} + +/* Functions to call PyArg_ParseTuple with integer format codes, + and return the result. +*/ +static PyObject * +getargs_b(PyObject *self, PyObject *args) +{ + unsigned char value; + if (!PyArg_ParseTuple(args, "b", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_B(PyObject *self, PyObject *args) +{ + unsigned char value; + if (!PyArg_ParseTuple(args, "B", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_h(PyObject *self, PyObject *args) +{ + short value; + if (!PyArg_ParseTuple(args, "h", &value)) { + return NULL; + } + return PyLong_FromLong((long)value); +} + +static PyObject * +getargs_H(PyObject *self, PyObject *args) +{ + unsigned short value; + if (!PyArg_ParseTuple(args, "H", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_I(PyObject *self, PyObject *args) +{ + unsigned int value; + if (!PyArg_ParseTuple(args, "I", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_k(PyObject *self, PyObject *args) +{ + unsigned long value; + if (!PyArg_ParseTuple(args, "k", &value)) { + return NULL; + } + return PyLong_FromUnsignedLong(value); +} + +static PyObject * +getargs_i(PyObject *self, PyObject *args) +{ + int value; + if (!PyArg_ParseTuple(args, "i", &value)) { + return NULL; + } + return PyLong_FromLong((long)value); +} + +static PyObject * +getargs_l(PyObject *self, PyObject *args) +{ + long value; + if (!PyArg_ParseTuple(args, "l", &value)) { + return NULL; + } + return PyLong_FromLong(value); +} + +static PyObject * +getargs_n(PyObject *self, PyObject *args) +{ + Py_ssize_t value; + if (!PyArg_ParseTuple(args, "n", &value)) { + return NULL; + } + return PyLong_FromSsize_t(value); +} + +static PyObject * +getargs_p(PyObject *self, PyObject *args) +{ + int value; + if (!PyArg_ParseTuple(args, "p", &value)) { + return NULL; + } + return PyLong_FromLong(value); +} + +static PyObject * +getargs_L(PyObject *self, PyObject *args) +{ + long long value; + if (!PyArg_ParseTuple(args, "L", &value)) { + return NULL; + } + return PyLong_FromLongLong(value); +} + +static PyObject * +getargs_K(PyObject *self, PyObject *args) +{ + unsigned long long value; + if (!PyArg_ParseTuple(args, "K", &value)) { + return NULL; + } + return PyLong_FromUnsignedLongLong(value); +} + +/* This function not only tests the 'k' getargs code, but also the + PyLong_AsUnsignedLongMask() function. */ +static PyObject * +test_k_code(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *tuple, *num; + unsigned long value; + + tuple = PyTuple_New(1); + if (tuple == NULL) { + return NULL; + } + + /* a number larger than ULONG_MAX even on 64-bit platforms */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) { + return NULL; + } + + value = PyLong_AsUnsignedLongMask(num); + if (value != ULONG_MAX) { + PyErr_SetString(PyExc_AssertionError, + "test_k_code: " + "PyLong_AsUnsignedLongMask() returned wrong value for long 0xFFF...FFF"); + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, num); + + value = 0; + if (!PyArg_ParseTuple(tuple, "k:test_k_code", &value)) { + return NULL; + } + if (value != ULONG_MAX) { + PyErr_SetString(PyExc_AssertionError, + "test_k_code: k code returned wrong value for long 0xFFF...FFF"); + return NULL; + } + + Py_DECREF(num); + num = PyLong_FromString("-FFFFFFFF000000000000000042", NULL, 16); + if (num == NULL) { + return NULL; + } + + value = PyLong_AsUnsignedLongMask(num); + if (value != (unsigned long)-0x42) { + PyErr_SetString(PyExc_AssertionError, + "test_k_code: " + "PyLong_AsUnsignedLongMask() returned wrong value for long -0xFFF..000042"); + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, num); + + value = 0; + if (!PyArg_ParseTuple(tuple, "k:test_k_code", &value)) { + return NULL; + } + if (value != (unsigned long)-0x42) { + PyErr_SetString(PyExc_AssertionError, + "test_k_code: k code returned wrong value for long -0xFFF..000042"); + return NULL; + } + + Py_DECREF(tuple); + Py_RETURN_NONE; +} + +static PyObject * +getargs_f(PyObject *self, PyObject *args) +{ + float f; + if (!PyArg_ParseTuple(args, "f", &f)) { + return NULL; + } + return PyFloat_FromDouble(f); +} + +static PyObject * +getargs_d(PyObject *self, PyObject *args) +{ + double d; + if (!PyArg_ParseTuple(args, "d", &d)) { + return NULL; + } + return PyFloat_FromDouble(d); +} + +static PyObject * +getargs_D(PyObject *self, PyObject *args) +{ + Py_complex cval; + if (!PyArg_ParseTuple(args, "D", &cval)) { + return NULL; + } + return PyComplex_FromCComplex(cval); +} + +static PyObject * +getargs_S(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "S", &obj)) { + return NULL; + } + return Py_NewRef(obj); +} + +static PyObject * +getargs_Y(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "Y", &obj)) { + return NULL; + } + return Py_NewRef(obj); +} + +static PyObject * +getargs_U(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "U", &obj)) { + return NULL; + } + return Py_NewRef(obj); +} + +static PyObject * +getargs_c(PyObject *self, PyObject *args) +{ + char c; + if (!PyArg_ParseTuple(args, "c", &c)) { + return NULL; + } + return PyLong_FromLong((unsigned char)c); +} + +static PyObject * +getargs_C(PyObject *self, PyObject *args) +{ + int c; + if (!PyArg_ParseTuple(args, "C", &c)) { + return NULL; + } + return PyLong_FromLong(c); +} + +static PyObject * +getargs_s(PyObject *self, PyObject *args) +{ + char *str; + if (!PyArg_ParseTuple(args, "s", &str)) { + return NULL; + } + return PyBytes_FromString(str); +} + +static PyObject * +getargs_s_star(PyObject *self, PyObject *args) +{ + Py_buffer buffer; + PyObject *bytes; + if (!PyArg_ParseTuple(args, "s*", &buffer)) { + return NULL; + } + bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); + PyBuffer_Release(&buffer); + return bytes; +} + +static PyObject * +getargs_s_hash(PyObject *self, PyObject *args) +{ + char *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "s#", &str, &size)) { + return NULL; + } + return PyBytes_FromStringAndSize(str, size); +} + +static PyObject * +getargs_z(PyObject *self, PyObject *args) +{ + char *str; + if (!PyArg_ParseTuple(args, "z", &str)) { + return NULL; + } + if (str != NULL) { + return PyBytes_FromString(str); + } + Py_RETURN_NONE; +} + +static PyObject * +getargs_z_star(PyObject *self, PyObject *args) +{ + Py_buffer buffer; + PyObject *bytes; + if (!PyArg_ParseTuple(args, "z*", &buffer)) { + return NULL; + } + if (buffer.buf != NULL) { + bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); + } + else { + bytes = Py_NewRef(Py_None); + } + PyBuffer_Release(&buffer); + return bytes; +} + +static PyObject * +getargs_z_hash(PyObject *self, PyObject *args) +{ + char *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &str, &size)) { + return NULL; + } + if (str != NULL) { + return PyBytes_FromStringAndSize(str, size); + } + Py_RETURN_NONE; +} + +static PyObject * +getargs_y(PyObject *self, PyObject *args) +{ + char *str; + if (!PyArg_ParseTuple(args, "y", &str)) { + return NULL; + } + return PyBytes_FromString(str); +} + +static PyObject * +getargs_y_star(PyObject *self, PyObject *args) +{ + Py_buffer buffer; + if (!PyArg_ParseTuple(args, "y*", &buffer)) { + return NULL; + } + PyObject *bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); + PyBuffer_Release(&buffer); + return bytes; +} + +static PyObject * +getargs_y_hash(PyObject *self, PyObject *args) +{ + char *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "y#", &str, &size)) { + return NULL; + } + return PyBytes_FromStringAndSize(str, size); +} + +static PyObject * +getargs_u(PyObject *self, PyObject *args) +{ + Py_UNICODE *str; + if (!PyArg_ParseTuple(args, "u", &str)) { + return NULL; + } + return PyUnicode_FromWideChar(str, -1); +} + +static PyObject * +getargs_u_hash(PyObject *self, PyObject *args) +{ + Py_UNICODE *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "u#", &str, &size)) { + return NULL; + } + return PyUnicode_FromWideChar(str, size); +} + +static PyObject * +getargs_Z(PyObject *self, PyObject *args) +{ + Py_UNICODE *str; + if (!PyArg_ParseTuple(args, "Z", &str)) { + return NULL; + } + if (str != NULL) { + return PyUnicode_FromWideChar(str, -1); + } + Py_RETURN_NONE; +} + +static PyObject * +getargs_Z_hash(PyObject *self, PyObject *args) +{ + Py_UNICODE *str; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Z#", &str, &size)) { + return NULL; + } + if (str != NULL) { + return PyUnicode_FromWideChar(str, size); + } + Py_RETURN_NONE; +} + +static PyObject * +getargs_es(PyObject *self, PyObject *args) +{ + PyObject *arg; + const char *encoding = NULL; + char *str; + + if (!PyArg_ParseTuple(args, "O|s", &arg, &encoding)) { + return NULL; + } + if (!PyArg_Parse(arg, "es", encoding, &str)) { + return NULL; + } + PyObject *result = PyBytes_FromString(str); + PyMem_Free(str); + return result; +} + +static PyObject * +getargs_et(PyObject *self, PyObject *args) +{ + PyObject *arg; + const char *encoding = NULL; + char *str; + + if (!PyArg_ParseTuple(args, "O|s", &arg, &encoding)) { + return NULL; + } + if (!PyArg_Parse(arg, "et", encoding, &str)) { + return NULL; + } + PyObject *result = PyBytes_FromString(str); + PyMem_Free(str); + return result; +} + +static PyObject * +getargs_es_hash(PyObject *self, PyObject *args) +{ + PyObject *arg; + const char *encoding = NULL; + PyByteArrayObject *buffer = NULL; + char *str = NULL; + Py_ssize_t size; + + if (!PyArg_ParseTuple(args, "O|sY", &arg, &encoding, &buffer)) { + return NULL; + } + if (buffer != NULL) { + str = PyByteArray_AS_STRING(buffer); + size = PyByteArray_GET_SIZE(buffer); + } + if (!PyArg_Parse(arg, "es#", encoding, &str, &size)) { + return NULL; + } + PyObject *result = PyBytes_FromStringAndSize(str, size); + if (buffer == NULL) { + PyMem_Free(str); + } + return result; +} + +static PyObject * +getargs_et_hash(PyObject *self, PyObject *args) +{ + PyObject *arg; + const char *encoding = NULL; + PyByteArrayObject *buffer = NULL; + char *str = NULL; + Py_ssize_t size; + + if (!PyArg_ParseTuple(args, "O|sY", &arg, &encoding, &buffer)) { + return NULL; + } + if (buffer != NULL) { + str = PyByteArray_AS_STRING(buffer); + size = PyByteArray_GET_SIZE(buffer); + } + if (!PyArg_Parse(arg, "et#", encoding, &str, &size)) { + return NULL; + } + PyObject *result = PyBytes_FromStringAndSize(str, size); + if (buffer == NULL) { + PyMem_Free(str); + } + return result; +} + +/* Test the L code for PyArg_ParseTuple. This should deliver a long long + for both long and int arguments. The test may leak a little memory if + it fails. +*/ +static PyObject * +test_L_code(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *tuple, *num; + long long value; + + tuple = PyTuple_New(1); + if (tuple == NULL) { + return NULL; + } + + num = PyLong_FromLong(42); + if (num == NULL) { + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, num); + + value = -1; + if (!PyArg_ParseTuple(tuple, "L:test_L_code", &value)) { + return NULL; + } + if (value != 42) { + PyErr_SetString(PyExc_AssertionError, + "test_L_code: L code returned wrong value for long 42"); + return NULL; + } + + Py_DECREF(num); + num = PyLong_FromLong(42); + if (num == NULL) { + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, num); + + value = -1; + if (!PyArg_ParseTuple(tuple, "L:test_L_code", &value)) { + return NULL; + } + if (value != 42) { + PyErr_SetString(PyExc_AssertionError, + "test_L_code: L code returned wrong value for int 42"); + return NULL; + } + + Py_DECREF(tuple); + Py_RETURN_NONE; +} + +/* Test the s and z codes for PyArg_ParseTuple. +*/ +static PyObject * +test_s_code(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + /* Unicode strings should be accepted */ + PyObject *tuple = PyTuple_New(1); + if (tuple == NULL) { + return NULL; + } + + PyObject *obj = PyUnicode_Decode("t\xeate", strlen("t\xeate"), + "latin-1", NULL); + if (obj == NULL) { + return NULL; + } + + PyTuple_SET_ITEM(tuple, 0, obj); + + /* These two blocks used to raise a TypeError: + * "argument must be string without null bytes, not str" + */ + char *value; + if (!PyArg_ParseTuple(tuple, "s:test_s_code1", &value)) { + return NULL; + } + + if (!PyArg_ParseTuple(tuple, "z:test_s_code2", &value)) { + return NULL; + } + + Py_DECREF(tuple); + Py_RETURN_NONE; +} + +#undef PyArg_ParseTupleAndKeywords +PyAPI_FUNC(int) PyArg_ParseTupleAndKeywords(PyObject *, PyObject *, + const char *, char **, ...); + +static PyObject * +getargs_s_hash_int(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"", "", "x", NULL}; + Py_buffer buf = {NULL}; + const char *s; + int len; + int i = 0; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|s#i", keywords, + &buf, &s, &len, &i)) + { + return NULL; + } + PyBuffer_Release(&buf); + Py_RETURN_NONE; +} + +static PyObject * +getargs_s_hash_int2(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"", "", "x", NULL}; + Py_buffer buf = {NULL}; + const char *s; + int len; + int i = 0; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|(s#)i", keywords, + &buf, &s, &len, &i)) + { + return NULL; + } + PyBuffer_Release(&buf); + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"get_args", get_args, METH_VARARGS}, + {"get_kwargs", _PyCFunction_CAST(get_kwargs), METH_VARARGS|METH_KEYWORDS}, + {"getargs_B", getargs_B, METH_VARARGS}, + {"getargs_C", getargs_C, METH_VARARGS}, + {"getargs_D", getargs_D, METH_VARARGS}, + {"getargs_H", getargs_H, METH_VARARGS}, + {"getargs_I", getargs_I, METH_VARARGS}, + {"getargs_K", getargs_K, METH_VARARGS}, + {"getargs_L", getargs_L, METH_VARARGS}, + {"getargs_S", getargs_S, METH_VARARGS}, + {"getargs_U", getargs_U, METH_VARARGS}, + {"getargs_Y", getargs_Y, METH_VARARGS}, + {"getargs_Z", getargs_Z, METH_VARARGS}, + {"getargs_Z_hash", getargs_Z_hash, METH_VARARGS}, + {"getargs_b", getargs_b, METH_VARARGS}, + {"getargs_c", getargs_c, METH_VARARGS}, + {"getargs_d", getargs_d, METH_VARARGS}, + {"getargs_es", getargs_es, METH_VARARGS}, + {"getargs_es_hash", getargs_es_hash, METH_VARARGS}, + {"getargs_et", getargs_et, METH_VARARGS}, + {"getargs_et_hash", getargs_et_hash, METH_VARARGS}, + {"getargs_f", getargs_f, METH_VARARGS}, + {"getargs_h", getargs_h, METH_VARARGS}, + {"getargs_i", getargs_i, METH_VARARGS}, + {"getargs_k", getargs_k, METH_VARARGS}, + {"getargs_keyword_only", _PyCFunction_CAST(getargs_keyword_only), METH_VARARGS|METH_KEYWORDS}, + {"getargs_keywords", _PyCFunction_CAST(getargs_keywords), METH_VARARGS|METH_KEYWORDS}, + {"getargs_l", getargs_l, METH_VARARGS}, + {"getargs_n", getargs_n, METH_VARARGS}, + {"getargs_p", getargs_p, METH_VARARGS}, + {"getargs_positional_only_and_keywords", _PyCFunction_CAST(getargs_positional_only_and_keywords), METH_VARARGS|METH_KEYWORDS}, + {"getargs_s", getargs_s, METH_VARARGS}, + {"getargs_s_hash", getargs_s_hash, METH_VARARGS}, + {"getargs_s_hash_int", _PyCFunction_CAST(getargs_s_hash_int), METH_VARARGS|METH_KEYWORDS}, + {"getargs_s_hash_int2", _PyCFunction_CAST(getargs_s_hash_int2), METH_VARARGS|METH_KEYWORDS}, + {"getargs_s_star", getargs_s_star, METH_VARARGS}, + {"getargs_tuple", getargs_tuple, METH_VARARGS}, + {"getargs_u", getargs_u, METH_VARARGS}, + {"getargs_u_hash", getargs_u_hash, METH_VARARGS}, + {"getargs_w_star", getargs_w_star, METH_VARARGS}, + {"getargs_y", getargs_y, METH_VARARGS}, + {"getargs_y_hash", getargs_y_hash, METH_VARARGS}, + {"getargs_y_star", getargs_y_star, METH_VARARGS}, + {"getargs_z", getargs_z, METH_VARARGS}, + {"getargs_z_hash", getargs_z_hash, METH_VARARGS}, + {"getargs_z_star", getargs_z_star, METH_VARARGS}, + {"parse_tuple_and_keywords", parse_tuple_and_keywords, METH_VARARGS}, + {"test_L_code", test_L_code, METH_NOARGS}, + {"test_empty_argparse", test_empty_argparse, METH_NOARGS}, + {"test_k_code", test_k_code, METH_NOARGS}, + {"test_s_code", test_s_code, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_GetArgs(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/long.c b/Modules/_testcapi/long.c new file mode 100644 index 00000000000000..1be8de5e576254 --- /dev/null +++ b/Modules/_testcapi/long.c @@ -0,0 +1,557 @@ +#include "parts.h" + + +static PyObject * +raiseTestError(const char* test_name, const char* msg) +{ + PyErr_Format(PyExc_AssertionError, "%s: %s", test_name, msg); + return NULL; +} + +/* Tests of PyLong_{As, From}{Unsigned,}Long(), and + PyLong_{As, From}{Unsigned,}LongLong(). + + Note that the meat of the test is contained in testcapi_long.h. + This is revolting, but delicate code duplication is worse: "almost + exactly the same" code is needed to test long long, but the ubiquitous + dependence on type names makes it impossible to use a parameterized + function. A giant macro would be even worse than this. A C++ template + would be perfect. + + The "report an error" functions are deliberately not part of the #include + file: if the test fails, you can set a breakpoint in the appropriate + error function directly, and crawl back from there in the debugger. +*/ + +#define UNBIND(X) Py_DECREF(X); (X) = NULL + +static PyObject * +raise_test_long_error(const char* msg) +{ + return raiseTestError("test_long_api", msg); +} + +#define TESTNAME test_long_api_inner +#define TYPENAME long +#define F_S_TO_PY PyLong_FromLong +#define F_PY_TO_S PyLong_AsLong +#define F_U_TO_PY PyLong_FromUnsignedLong +#define F_PY_TO_U PyLong_AsUnsignedLong + +#include "testcapi_long.h" + +static PyObject * +test_long_api(PyObject* self, PyObject *Py_UNUSED(ignored)) +{ + return TESTNAME(raise_test_long_error); +} + +#undef TESTNAME +#undef TYPENAME +#undef F_S_TO_PY +#undef F_PY_TO_S +#undef F_U_TO_PY +#undef F_PY_TO_U + +static PyObject * +raise_test_longlong_error(const char* msg) +{ + return raiseTestError("test_longlong_api", msg); +} + +#define TESTNAME test_longlong_api_inner +#define TYPENAME long long +#define F_S_TO_PY PyLong_FromLongLong +#define F_PY_TO_S PyLong_AsLongLong +#define F_U_TO_PY PyLong_FromUnsignedLongLong +#define F_PY_TO_U PyLong_AsUnsignedLongLong + +#include "testcapi_long.h" + +static PyObject * +test_longlong_api(PyObject* self, PyObject *args) +{ + return TESTNAME(raise_test_longlong_error); +} + +#undef TESTNAME +#undef TYPENAME +#undef F_S_TO_PY +#undef F_PY_TO_S +#undef F_U_TO_PY +#undef F_PY_TO_U + +/* Test the PyLong_AsLongAndOverflow API. General conversion to PY_LONG + is tested by test_long_api_inner. This test will concentrate on proper + handling of overflow. +*/ + +static PyObject * +test_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *num, *one, *temp; + long value; + int overflow; + + /* Test that overflow is set properly for a large value. */ + /* num is a number larger than LONG_MAX even on 64-bit platforms */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to 1"); + + /* Same again, with num = LONG_MAX + 1 */ + num = PyLong_FromLong(LONG_MAX); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Add(num, one); + Py_DECREF(one); + Py_SETREF(num, temp); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to 1"); + + /* Test that overflow is set properly for a large negative value. */ + /* num is a number smaller than LONG_MIN even on 64-bit platforms */ + num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to -1"); + + /* Same again, with num = LONG_MIN - 1 */ + num = PyLong_FromLong(LONG_MIN); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Subtract(num, one); + Py_DECREF(one); + Py_SETREF(num, temp); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to -1"); + + /* Test that overflow is cleared properly for small values. */ + num = PyLong_FromString("FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != 0xFF) + return raiseTestError("test_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromString("-FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -0xFF) + return raiseTestError("test_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was set incorrectly"); + + num = PyLong_FromLong(LONG_MAX); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LONG_MAX) + return raiseTestError("test_long_and_overflow", + "expected return value LONG_MAX"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromLong(LONG_MIN); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LONG_MIN) + return raiseTestError("test_long_and_overflow", + "expected return value LONG_MIN"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + Py_RETURN_NONE; +} + +/* Test the PyLong_AsLongLongAndOverflow API. General conversion to + long long is tested by test_long_api_inner. This test will + concentrate on proper handling of overflow. +*/ + +static PyObject * +test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *num, *one, *temp; + long long value; + int overflow; + + /* Test that overflow is set properly for a large value. */ + /* num is a number larger than LLONG_MAX on a typical machine. */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to 1"); + + /* Same again, with num = LLONG_MAX + 1 */ + num = PyLong_FromLongLong(LLONG_MAX); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Add(num, one); + Py_DECREF(one); + Py_SETREF(num, temp); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to 1"); + + /* Test that overflow is set properly for a large negative value. */ + /* num is a number smaller than LLONG_MIN on a typical platform */ + num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to -1"); + + /* Same again, with num = LLONG_MIN - 1 */ + num = PyLong_FromLongLong(LLONG_MIN); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Subtract(num, one); + Py_DECREF(one); + Py_SETREF(num, temp); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to -1"); + + /* Test that overflow is cleared properly for small values. */ + num = PyLong_FromString("FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != 0xFF) + return raiseTestError("test_long_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromString("-FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -0xFF) + return raiseTestError("test_long_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was set incorrectly"); + + num = PyLong_FromLongLong(LLONG_MAX); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LLONG_MAX) + return raiseTestError("test_long_long_and_overflow", + "expected return value LLONG_MAX"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromLongLong(LLONG_MIN); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LLONG_MIN) + return raiseTestError("test_long_long_and_overflow", + "expected return value LLONG_MIN"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + Py_RETURN_NONE; +} + +/* Test the PyLong_As{Size,Ssize}_t API. At present this just tests that + non-integer arguments are handled correctly. It should be extended to + test overflow handling. + */ + +static PyObject * +test_long_as_size_t(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + size_t out_u; + Py_ssize_t out_s; + + Py_INCREF(Py_None); + + out_u = PyLong_AsSize_t(Py_None); + if (out_u != (size_t)-1 || !PyErr_Occurred()) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSize_t(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSize_t(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + out_s = PyLong_AsSsize_t(Py_None); + if (out_s != (Py_ssize_t)-1 || !PyErr_Occurred()) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSsize_t(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSsize_t(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ + return Py_None; +} + +static PyObject * +test_long_as_unsigned_long_long_mask(PyObject *self, + PyObject *Py_UNUSED(ignored)) +{ + unsigned long long res = PyLong_AsUnsignedLongLongMask(NULL); + + if (res != (unsigned long long)-1 || !PyErr_Occurred()) { + return raiseTestError("test_long_as_unsigned_long_long_mask", + "PyLong_AsUnsignedLongLongMask(NULL) didn't " + "complain"); + } + if (!PyErr_ExceptionMatches(PyExc_SystemError)) { + return raiseTestError("test_long_as_unsigned_long_long_mask", + "PyLong_AsUnsignedLongLongMask(NULL) raised " + "something other than SystemError"); + } + PyErr_Clear(); + Py_RETURN_NONE; +} + +/* Test the PyLong_AsDouble API. At present this just tests that + non-integer arguments are handled correctly. + */ + +static PyObject * +test_long_as_double(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + double out; + + Py_INCREF(Py_None); + + out = PyLong_AsDouble(Py_None); + if (out != -1.0 || !PyErr_Occurred()) + return raiseTestError("test_long_as_double", + "PyLong_AsDouble(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_double", + "PyLong_AsDouble(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ + return Py_None; +} + +/* Simple test of _PyLong_NumBits and _PyLong_Sign. */ +static PyObject * +test_long_numbits(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + struct triple { + long input; + size_t nbits; + int sign; + } testcases[] = {{0, 0, 0}, + {1L, 1, 1}, + {-1L, 1, -1}, + {2L, 2, 1}, + {-2L, 2, -1}, + {3L, 2, 1}, + {-3L, 2, -1}, + {4L, 3, 1}, + {-4L, 3, -1}, + {0x7fffL, 15, 1}, /* one Python int digit */ + {-0x7fffL, 15, -1}, + {0xffffL, 16, 1}, + {-0xffffL, 16, -1}, + {0xfffffffL, 28, 1}, + {-0xfffffffL, 28, -1}}; + size_t i; + + for (i = 0; i < Py_ARRAY_LENGTH(testcases); ++i) { + size_t nbits; + int sign; + PyObject *plong; + + plong = PyLong_FromLong(testcases[i].input); + if (plong == NULL) + return NULL; + nbits = _PyLong_NumBits(plong); + sign = _PyLong_Sign(plong); + + Py_DECREF(plong); + if (nbits != testcases[i].nbits) + return raiseTestError("test_long_numbits", + "wrong result for _PyLong_NumBits"); + if (sign != testcases[i].sign) + return raiseTestError("test_long_numbits", + "wrong result for _PyLong_Sign"); + } + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_long_and_overflow", test_long_and_overflow, METH_NOARGS}, + {"test_long_api", test_long_api, METH_NOARGS}, + {"test_long_as_double", test_long_as_double, METH_NOARGS}, + {"test_long_as_size_t", test_long_as_size_t, METH_NOARGS}, + {"test_long_as_unsigned_long_long_mask", test_long_as_unsigned_long_long_mask, METH_NOARGS}, + {"test_long_long_and_overflow",test_long_long_and_overflow, METH_NOARGS}, + {"test_long_numbits", test_long_numbits, METH_NOARGS}, + {"test_longlong_api", test_longlong_api, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_Long(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/mem.c b/Modules/_testcapi/mem.c new file mode 100644 index 00000000000000..ef723bf0658533 --- /dev/null +++ b/Modules/_testcapi/mem.c @@ -0,0 +1,643 @@ +#include "parts.h" + +#include + + +typedef struct { + PyMemAllocatorEx alloc; + + size_t malloc_size; + size_t calloc_nelem; + size_t calloc_elsize; + void *realloc_ptr; + size_t realloc_new_size; + void *free_ptr; + void *ctx; +} alloc_hook_t; + +static void * +hook_malloc(void *ctx, size_t size) +{ + alloc_hook_t *hook = (alloc_hook_t *)ctx; + hook->ctx = ctx; + hook->malloc_size = size; + return hook->alloc.malloc(hook->alloc.ctx, size); +} + +static void * +hook_calloc(void *ctx, size_t nelem, size_t elsize) +{ + alloc_hook_t *hook = (alloc_hook_t *)ctx; + hook->ctx = ctx; + hook->calloc_nelem = nelem; + hook->calloc_elsize = elsize; + return hook->alloc.calloc(hook->alloc.ctx, nelem, elsize); +} + +static void * +hook_realloc(void *ctx, void *ptr, size_t new_size) +{ + alloc_hook_t *hook = (alloc_hook_t *)ctx; + hook->ctx = ctx; + hook->realloc_ptr = ptr; + hook->realloc_new_size = new_size; + return hook->alloc.realloc(hook->alloc.ctx, ptr, new_size); +} + +static void +hook_free(void *ctx, void *ptr) +{ + alloc_hook_t *hook = (alloc_hook_t *)ctx; + hook->ctx = ctx; + hook->free_ptr = ptr; + hook->alloc.free(hook->alloc.ctx, ptr); +} + +/* Most part of the following code is inherited from the pyfailmalloc project + * written by Victor Stinner. */ +static struct { + int installed; + PyMemAllocatorEx raw; + PyMemAllocatorEx mem; + PyMemAllocatorEx obj; +} FmHook; + +static struct { + int start; + int stop; + Py_ssize_t count; +} FmData; + +static int +fm_nomemory(void) +{ + FmData.count++; + if (FmData.count > FmData.start && + (FmData.stop <= 0 || FmData.count <= FmData.stop)) + { + return 1; + } + return 0; +} + +static void * +hook_fmalloc(void *ctx, size_t size) +{ + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + if (fm_nomemory()) { + return NULL; + } + return alloc->malloc(alloc->ctx, size); +} + +static void * +hook_fcalloc(void *ctx, size_t nelem, size_t elsize) +{ + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + if (fm_nomemory()) { + return NULL; + } + return alloc->calloc(alloc->ctx, nelem, elsize); +} + +static void * +hook_frealloc(void *ctx, void *ptr, size_t new_size) +{ + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + if (fm_nomemory()) { + return NULL; + } + return alloc->realloc(alloc->ctx, ptr, new_size); +} + +static void +hook_ffree(void *ctx, void *ptr) +{ + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + alloc->free(alloc->ctx, ptr); +} + +static void +fm_setup_hooks(void) +{ + if (FmHook.installed) { + return; + } + FmHook.installed = 1; + + PyMemAllocatorEx alloc; + alloc.malloc = hook_fmalloc; + alloc.calloc = hook_fcalloc; + alloc.realloc = hook_frealloc; + alloc.free = hook_ffree; + PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &FmHook.raw); + PyMem_GetAllocator(PYMEM_DOMAIN_MEM, &FmHook.mem); + PyMem_GetAllocator(PYMEM_DOMAIN_OBJ, &FmHook.obj); + + alloc.ctx = &FmHook.raw; + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &alloc); + + alloc.ctx = &FmHook.mem; + PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &alloc); + + alloc.ctx = &FmHook.obj; + PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); +} + +static void +fm_remove_hooks(void) +{ + if (FmHook.installed) { + FmHook.installed = 0; + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &FmHook.raw); + PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &FmHook.mem); + PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &FmHook.obj); + } +} + +static PyObject * +set_nomemory(PyObject *self, PyObject *args) +{ + /* Memory allocation fails after 'start' allocation requests, and until + * 'stop' allocation requests except when 'stop' is negative or equal + * to 0 (default) in which case allocation failures never stop. */ + FmData.count = 0; + FmData.stop = 0; + if (!PyArg_ParseTuple(args, "i|i", &FmData.start, &FmData.stop)) { + return NULL; + } + fm_setup_hooks(); + Py_RETURN_NONE; +} + +static PyObject * +remove_mem_hooks(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + fm_remove_hooks(); + Py_RETURN_NONE; +} + +static PyObject * +test_setallocators(PyMemAllocatorDomain domain) +{ + PyObject *res = NULL; + const char *error_msg; + alloc_hook_t hook; + + memset(&hook, 0, sizeof(hook)); + + PyMemAllocatorEx alloc; + alloc.ctx = &hook; + alloc.malloc = &hook_malloc; + alloc.calloc = &hook_calloc; + alloc.realloc = &hook_realloc; + alloc.free = &hook_free; + PyMem_GetAllocator(domain, &hook.alloc); + PyMem_SetAllocator(domain, &alloc); + + /* malloc, realloc, free */ + size_t size = 42; + hook.ctx = NULL; + void *ptr; + switch(domain) { + case PYMEM_DOMAIN_RAW: + ptr = PyMem_RawMalloc(size); + break; + case PYMEM_DOMAIN_MEM: + ptr = PyMem_Malloc(size); + break; + case PYMEM_DOMAIN_OBJ: + ptr = PyObject_Malloc(size); + break; + default: + ptr = NULL; + break; + } + +#define CHECK_CTX(FUNC) \ + if (hook.ctx != &hook) { \ + error_msg = FUNC " wrong context"; \ + goto fail; \ + } \ + hook.ctx = NULL; /* reset for next check */ + + if (ptr == NULL) { + error_msg = "malloc failed"; + goto fail; + } + CHECK_CTX("malloc"); + if (hook.malloc_size != size) { + error_msg = "malloc invalid size"; + goto fail; + } + + size_t size2 = 200; + void *ptr2; + switch(domain) { + case PYMEM_DOMAIN_RAW: + ptr2 = PyMem_RawRealloc(ptr, size2); + break; + case PYMEM_DOMAIN_MEM: + ptr2 = PyMem_Realloc(ptr, size2); + break; + case PYMEM_DOMAIN_OBJ: + ptr2 = PyObject_Realloc(ptr, size2); + break; + default: + ptr2 = NULL; + break; + } + + if (ptr2 == NULL) { + error_msg = "realloc failed"; + goto fail; + } + CHECK_CTX("realloc"); + if (hook.realloc_ptr != ptr || hook.realloc_new_size != size2) { + error_msg = "realloc invalid parameters"; + goto fail; + } + + switch(domain) { + case PYMEM_DOMAIN_RAW: + PyMem_RawFree(ptr2); + break; + case PYMEM_DOMAIN_MEM: + PyMem_Free(ptr2); + break; + case PYMEM_DOMAIN_OBJ: + PyObject_Free(ptr2); + break; + } + + CHECK_CTX("free"); + if (hook.free_ptr != ptr2) { + error_msg = "free invalid pointer"; + goto fail; + } + + /* calloc, free */ + size_t nelem = 2; + size_t elsize = 5; + switch(domain) { + case PYMEM_DOMAIN_RAW: + ptr = PyMem_RawCalloc(nelem, elsize); + break; + case PYMEM_DOMAIN_MEM: + ptr = PyMem_Calloc(nelem, elsize); + break; + case PYMEM_DOMAIN_OBJ: + ptr = PyObject_Calloc(nelem, elsize); + break; + default: + ptr = NULL; + break; + } + + if (ptr == NULL) { + error_msg = "calloc failed"; + goto fail; + } + CHECK_CTX("calloc"); + if (hook.calloc_nelem != nelem || hook.calloc_elsize != elsize) { + error_msg = "calloc invalid nelem or elsize"; + goto fail; + } + + hook.free_ptr = NULL; + switch(domain) { + case PYMEM_DOMAIN_RAW: + PyMem_RawFree(ptr); + break; + case PYMEM_DOMAIN_MEM: + PyMem_Free(ptr); + break; + case PYMEM_DOMAIN_OBJ: + PyObject_Free(ptr); + break; + } + + CHECK_CTX("calloc free"); + if (hook.free_ptr != ptr) { + error_msg = "calloc free invalid pointer"; + goto fail; + } + + res = Py_NewRef(Py_None); + goto finally; + +fail: + PyErr_SetString(PyExc_RuntimeError, error_msg); + +finally: + PyMem_SetAllocator(domain, &hook.alloc); + return res; + +#undef CHECK_CTX +} + +static PyObject * +test_pyobject_setallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return test_setallocators(PYMEM_DOMAIN_OBJ); +} + +static PyObject * +test_pyobject_new(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj; + PyTypeObject *type = &PyBaseObject_Type; + PyTypeObject *var_type = &PyLong_Type; + + // PyObject_New() + obj = PyObject_New(PyObject, type); + if (obj == NULL) { + goto alloc_failed; + } + Py_DECREF(obj); + + // PyObject_NEW() + obj = PyObject_NEW(PyObject, type); + if (obj == NULL) { + goto alloc_failed; + } + Py_DECREF(obj); + + // PyObject_NewVar() + obj = PyObject_NewVar(PyObject, var_type, 3); + if (obj == NULL) { + goto alloc_failed; + } + Py_DECREF(obj); + + // PyObject_NEW_VAR() + obj = PyObject_NEW_VAR(PyObject, var_type, 3); + if (obj == NULL) { + goto alloc_failed; + } + Py_DECREF(obj); + + Py_RETURN_NONE; + +alloc_failed: + PyErr_NoMemory(); + return NULL; +} + +static PyObject * +test_pymem_alloc0(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + void *ptr; + + ptr = PyMem_RawMalloc(0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyMem_RawMalloc(0) returns NULL"); + return NULL; + } + PyMem_RawFree(ptr); + + ptr = PyMem_RawCalloc(0, 0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyMem_RawCalloc(0, 0) returns NULL"); + return NULL; + } + PyMem_RawFree(ptr); + + ptr = PyMem_Malloc(0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyMem_Malloc(0) returns NULL"); + return NULL; + } + PyMem_Free(ptr); + + ptr = PyMem_Calloc(0, 0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyMem_Calloc(0, 0) returns NULL"); + return NULL; + } + PyMem_Free(ptr); + + ptr = PyObject_Malloc(0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyObject_Malloc(0) returns NULL"); + return NULL; + } + PyObject_Free(ptr); + + ptr = PyObject_Calloc(0, 0); + if (ptr == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "PyObject_Calloc(0, 0) returns NULL"); + return NULL; + } + PyObject_Free(ptr); + + Py_RETURN_NONE; +} + +static PyObject * +test_pymem_getallocatorsname(PyObject *self, PyObject *args) +{ + const char *name = _PyMem_GetCurrentAllocatorName(); + if (name == NULL) { + PyErr_SetString(PyExc_RuntimeError, "cannot get allocators name"); + return NULL; + } + return PyUnicode_FromString(name); +} + +static PyObject * +test_pymem_setrawallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return test_setallocators(PYMEM_DOMAIN_RAW); +} + +static PyObject * +test_pymem_setallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return test_setallocators(PYMEM_DOMAIN_MEM); +} + +static PyObject * +pyobject_malloc_without_gil(PyObject *self, PyObject *args) +{ + char *buffer; + + /* Deliberate bug to test debug hooks on Python memory allocators: + call PyObject_Malloc() without holding the GIL */ + Py_BEGIN_ALLOW_THREADS + buffer = PyObject_Malloc(10); + Py_END_ALLOW_THREADS + + PyObject_Free(buffer); + + Py_RETURN_NONE; +} + +static PyObject * +pymem_buffer_overflow(PyObject *self, PyObject *args) +{ + char *buffer; + + /* Deliberate buffer overflow to check that PyMem_Free() detects + the overflow when debug hooks are installed. */ + buffer = PyMem_Malloc(16); + if (buffer == NULL) { + PyErr_NoMemory(); + return NULL; + } + buffer[16] = 'x'; + PyMem_Free(buffer); + + Py_RETURN_NONE; +} + +static PyObject * +pymem_api_misuse(PyObject *self, PyObject *args) +{ + char *buffer; + + /* Deliberate misusage of Python allocators: + allococate with PyMem but release with PyMem_Raw. */ + buffer = PyMem_Malloc(16); + PyMem_RawFree(buffer); + + Py_RETURN_NONE; +} + +static PyObject * +pymem_malloc_without_gil(PyObject *self, PyObject *args) +{ + char *buffer; + + /* Deliberate bug to test debug hooks on Python memory allocators: + call PyMem_Malloc() without holding the GIL */ + Py_BEGIN_ALLOW_THREADS + buffer = PyMem_Malloc(10); + Py_END_ALLOW_THREADS + + PyMem_Free(buffer); + + Py_RETURN_NONE; +} + +static PyObject * +test_pyobject_is_freed(const char *test_name, PyObject *op) +{ + if (!_PyObject_IsFreed(op)) { + PyErr_SetString(PyExc_AssertionError, + "object is not seen as freed"); + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +check_pyobject_null_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) +{ + PyObject *op = NULL; + return test_pyobject_is_freed("check_pyobject_null_is_freed", op); +} + + +static PyObject * +check_pyobject_uninitialized_is_freed(PyObject *self, + PyObject *Py_UNUSED(args)) +{ + PyObject *op = (PyObject *)PyObject_Malloc(sizeof(PyObject)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_SET_REFCNT(op, 1); + /* object fields like ob_type are uninitialized! */ + return test_pyobject_is_freed("check_pyobject_uninitialized_is_freed", op); +} + + +static PyObject * +check_pyobject_forbidden_bytes_is_freed(PyObject *self, + PyObject *Py_UNUSED(args)) +{ + /* Allocate an incomplete PyObject structure: truncate 'ob_type' field */ + PyObject *op = (PyObject *)PyObject_Malloc(offsetof(PyObject, ob_type)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_SET_REFCNT(op, 1); + /* ob_type field is after the memory block: part of "forbidden bytes" + when using debug hooks on memory allocators! */ + return test_pyobject_is_freed("check_pyobject_forbidden_bytes_is_freed", op); +} + + +static PyObject * +check_pyobject_freed_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) +{ + /* This test would fail if run with the address sanitizer */ +#ifdef _Py_ADDRESS_SANITIZER + Py_RETURN_NONE; +#else + PyObject *op = PyObject_CallNoArgs((PyObject *)&PyBaseObject_Type); + if (op == NULL) { + return NULL; + } + Py_TYPE(op)->tp_dealloc(op); + /* Reset reference count to avoid early crash in ceval or GC */ + Py_SET_REFCNT(op, 1); + /* object memory is freed! */ + return test_pyobject_is_freed("check_pyobject_freed_is_freed", op); +#endif +} + +static PyMethodDef test_methods[] = { + {"check_pyobject_forbidden_bytes_is_freed", + check_pyobject_forbidden_bytes_is_freed, METH_NOARGS}, + {"check_pyobject_freed_is_freed", check_pyobject_freed_is_freed, METH_NOARGS}, + {"check_pyobject_null_is_freed", check_pyobject_null_is_freed, METH_NOARGS}, + {"check_pyobject_uninitialized_is_freed", + check_pyobject_uninitialized_is_freed, METH_NOARGS}, + {"pymem_api_misuse", pymem_api_misuse, METH_NOARGS}, + {"pymem_buffer_overflow", pymem_buffer_overflow, METH_NOARGS}, + {"pymem_getallocatorsname", test_pymem_getallocatorsname, METH_NOARGS}, + {"pymem_malloc_without_gil", pymem_malloc_without_gil, METH_NOARGS}, + {"pyobject_malloc_without_gil", pyobject_malloc_without_gil, METH_NOARGS}, + {"remove_mem_hooks", remove_mem_hooks, METH_NOARGS, + PyDoc_STR("Remove memory hooks.")}, + {"set_nomemory", (PyCFunction)set_nomemory, METH_VARARGS, + PyDoc_STR("set_nomemory(start:int, stop:int = 0)")}, + {"test_pymem_alloc0", test_pymem_alloc0, METH_NOARGS}, + {"test_pymem_setallocators", test_pymem_setallocators, METH_NOARGS}, + {"test_pymem_setrawallocators", test_pymem_setrawallocators, METH_NOARGS}, + {"test_pyobject_new", test_pyobject_new, METH_NOARGS}, + {"test_pyobject_setallocators", test_pyobject_setallocators, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_Mem(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + PyObject *v; +#ifdef WITH_PYMALLOC + v = Py_NewRef(Py_True); +#else + v = Py_NewRef(Py_False); +#endif + int rc = PyModule_AddObjectRef(mod, "WITH_PYMALLOC", v); + Py_DECREF(v); + if (rc < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index 304e5922c0d50b..7ba3c4ebff8cde 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -27,6 +27,15 @@ int _PyTestCapi_Init_Vectorcall(PyObject *module); int _PyTestCapi_Init_Heaptype(PyObject *module); int _PyTestCapi_Init_Unicode(PyObject *module); +int _PyTestCapi_Init_GetArgs(PyObject *module); +int _PyTestCapi_Init_PyTime(PyObject *module); +int _PyTestCapi_Init_DateTime(PyObject *module); +int _PyTestCapi_Init_Docstring(PyObject *module); +int _PyTestCapi_Init_Mem(PyObject *module); +int _PyTestCapi_Init_Watchers(PyObject *module); +int _PyTestCapi_Init_Long(PyObject *module); +int _PyTestCapi_Init_Float(PyObject *module); +int _PyTestCapi_Init_Structmember(PyObject *module); #ifdef LIMITED_API_AVAILABLE int _PyTestCapi_Init_VectorcallLimited(PyObject *module); diff --git a/Modules/_testcapi/pytime.c b/Modules/_testcapi/pytime.c new file mode 100644 index 00000000000000..7422bafc30193a --- /dev/null +++ b/Modules/_testcapi/pytime.c @@ -0,0 +1,274 @@ +#include "parts.h" + +#ifdef MS_WINDOWS +# include // struct timeval +#endif + +static PyObject * +test_pytime_fromseconds(PyObject *self, PyObject *args) +{ + int seconds; + if (!PyArg_ParseTuple(args, "i", &seconds)) { + return NULL; + } + _PyTime_t ts = _PyTime_FromSeconds(seconds); + return _PyTime_AsNanosecondsObject(ts); +} + +static int +check_time_rounding(int round) +{ + if (round != _PyTime_ROUND_FLOOR + && round != _PyTime_ROUND_CEILING + && round != _PyTime_ROUND_HALF_EVEN + && round != _PyTime_ROUND_UP) + { + PyErr_SetString(PyExc_ValueError, "invalid rounding"); + return -1; + } + return 0; +} + +static PyObject * +test_pytime_fromsecondsobject(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t ts; + if (_PyTime_FromSecondsObject(&ts, obj, round) == -1) { + return NULL; + } + return _PyTime_AsNanosecondsObject(ts); +} + +static PyObject * +test_pytime_assecondsdouble(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "O", &obj)) { + return NULL; + } + _PyTime_t ts; + if (_PyTime_FromNanosecondsObject(&ts, obj) < 0) { + return NULL; + } + double d = _PyTime_AsSecondsDouble(ts); + return PyFloat_FromDouble(d); +} + +static PyObject * +test_PyTime_AsTimeval(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + struct timeval tv; + if (_PyTime_AsTimeval(t, &tv, round) < 0) { + return NULL; + } + + PyObject *seconds = PyLong_FromLongLong(tv.tv_sec); + if (seconds == NULL) { + return NULL; + } + return Py_BuildValue("Nl", seconds, (long)tv.tv_usec); +} + +static PyObject * +test_PyTime_AsTimeval_clamp(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + struct timeval tv; + _PyTime_AsTimeval_clamp(t, &tv, round); + + PyObject *seconds = PyLong_FromLongLong(tv.tv_sec); + if (seconds == NULL) { + return NULL; + } + return Py_BuildValue("Nl", seconds, (long)tv.tv_usec); +} + +#ifdef HAVE_CLOCK_GETTIME +static PyObject * +test_PyTime_AsTimespec(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "O", &obj)) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + struct timespec ts; + if (_PyTime_AsTimespec(t, &ts) == -1) { + return NULL; + } + return Py_BuildValue("Nl", _PyLong_FromTime_t(ts.tv_sec), ts.tv_nsec); +} + +static PyObject * +test_PyTime_AsTimespec_clamp(PyObject *self, PyObject *args) +{ + PyObject *obj; + if (!PyArg_ParseTuple(args, "O", &obj)) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + struct timespec ts; + _PyTime_AsTimespec_clamp(t, &ts); + return Py_BuildValue("Nl", _PyLong_FromTime_t(ts.tv_sec), ts.tv_nsec); +} +#endif + +static PyObject * +test_PyTime_AsMilliseconds(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t ms = _PyTime_AsMilliseconds(t, round); + _PyTime_t ns = _PyTime_FromNanoseconds(ms); + return _PyTime_AsNanosecondsObject(ns); +} + +static PyObject * +test_PyTime_AsMicroseconds(PyObject *self, PyObject *args) +{ + PyObject *obj; + int round; + if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { + return NULL; + } + _PyTime_t t; + if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + _PyTime_t us = _PyTime_AsMicroseconds(t, round); + _PyTime_t ns = _PyTime_FromNanoseconds(us); + return _PyTime_AsNanosecondsObject(ns); +} + +static PyObject * +test_pytime_object_to_time_t(PyObject *self, PyObject *args) +{ + PyObject *obj; + time_t sec; + int round; + if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_time_t", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + if (_PyTime_ObjectToTime_t(obj, &sec, round) == -1) { + return NULL; + } + return _PyLong_FromTime_t(sec); +} + +static PyObject * +test_pytime_object_to_timeval(PyObject *self, PyObject *args) +{ + PyObject *obj; + time_t sec; + long usec; + int round; + if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_timeval", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + if (_PyTime_ObjectToTimeval(obj, &sec, &usec, round) == -1) { + return NULL; + } + return Py_BuildValue("Nl", _PyLong_FromTime_t(sec), usec); +} + +static PyObject * +test_pytime_object_to_timespec(PyObject *self, PyObject *args) +{ + PyObject *obj; + time_t sec; + long nsec; + int round; + if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_timespec", &obj, &round)) { + return NULL; + } + if (check_time_rounding(round) < 0) { + return NULL; + } + if (_PyTime_ObjectToTimespec(obj, &sec, &nsec, round) == -1) { + return NULL; + } + return Py_BuildValue("Nl", _PyLong_FromTime_t(sec), nsec); +} + +static PyMethodDef test_methods[] = { + {"PyTime_AsMicroseconds", test_PyTime_AsMicroseconds, METH_VARARGS}, + {"PyTime_AsMilliseconds", test_PyTime_AsMilliseconds, METH_VARARGS}, + {"PyTime_AsSecondsDouble", test_pytime_assecondsdouble, METH_VARARGS}, +#ifdef HAVE_CLOCK_GETTIME + {"PyTime_AsTimespec", test_PyTime_AsTimespec, METH_VARARGS}, + {"PyTime_AsTimespec_clamp", test_PyTime_AsTimespec_clamp, METH_VARARGS}, +#endif + {"PyTime_AsTimeval", test_PyTime_AsTimeval, METH_VARARGS}, + {"PyTime_AsTimeval_clamp", test_PyTime_AsTimeval_clamp, METH_VARARGS}, + {"PyTime_FromSeconds", test_pytime_fromseconds, METH_VARARGS}, + {"PyTime_FromSecondsObject", test_pytime_fromsecondsobject, METH_VARARGS}, + {"pytime_object_to_time_t", test_pytime_object_to_time_t, METH_VARARGS}, + {"pytime_object_to_timespec", test_pytime_object_to_timespec, METH_VARARGS}, + {"pytime_object_to_timeval", test_pytime_object_to_timeval, METH_VARARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_PyTime(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testcapi/structmember.c b/Modules/_testcapi/structmember.c new file mode 100644 index 00000000000000..0fb872a4328d60 --- /dev/null +++ b/Modules/_testcapi/structmember.c @@ -0,0 +1,217 @@ +#define PY_SSIZE_T_CLEAN +#include "parts.h" +#include // for offsetof() + + +// This defines two classes that contain all the simple member types, one +// using "new" Py_-prefixed API, and the other using "old" . +// They should behave identically in Python. + +typedef struct { + char bool_member; + char byte_member; + unsigned char ubyte_member; + short short_member; + unsigned short ushort_member; + int int_member; + unsigned int uint_member; + long long_member; + unsigned long ulong_member; + Py_ssize_t pyssizet_member; + float float_member; + double double_member; + char inplace_member[6]; + long long longlong_member; + unsigned long long ulonglong_member; +} all_structmembers; + +typedef struct { + PyObject_HEAD + all_structmembers structmembers; +} test_structmembers; + + +static struct PyMemberDef test_members_newapi[] = { + {"T_BOOL", Py_T_BOOL, offsetof(test_structmembers, structmembers.bool_member), 0, NULL}, + {"T_BYTE", Py_T_BYTE, offsetof(test_structmembers, structmembers.byte_member), 0, NULL}, + {"T_UBYTE", Py_T_UBYTE, offsetof(test_structmembers, structmembers.ubyte_member), 0, NULL}, + {"T_SHORT", Py_T_SHORT, offsetof(test_structmembers, structmembers.short_member), 0, NULL}, + {"T_USHORT", Py_T_USHORT, offsetof(test_structmembers, structmembers.ushort_member), 0, NULL}, + {"T_INT", Py_T_INT, offsetof(test_structmembers, structmembers.int_member), 0, NULL}, + {"T_UINT", Py_T_UINT, offsetof(test_structmembers, structmembers.uint_member), 0, NULL}, + {"T_LONG", Py_T_LONG, offsetof(test_structmembers, structmembers.long_member), 0, NULL}, + {"T_ULONG", Py_T_ULONG, offsetof(test_structmembers, structmembers.ulong_member), 0, NULL}, + {"T_PYSSIZET", Py_T_PYSSIZET, offsetof(test_structmembers, structmembers.pyssizet_member), 0, NULL}, + {"T_FLOAT", Py_T_FLOAT, offsetof(test_structmembers, structmembers.float_member), 0, NULL}, + {"T_DOUBLE", Py_T_DOUBLE, offsetof(test_structmembers, structmembers.double_member), 0, NULL}, + {"T_STRING_INPLACE", Py_T_STRING_INPLACE, offsetof(test_structmembers, structmembers.inplace_member), 0, NULL}, + {"T_LONGLONG", Py_T_LONGLONG, offsetof(test_structmembers, structmembers.longlong_member), 0, NULL}, + {"T_ULONGLONG", Py_T_ULONGLONG, offsetof(test_structmembers, structmembers.ulonglong_member), 0, NULL}, + {NULL} +}; + +static PyObject * +test_structmembers_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = { + "T_BOOL", "T_BYTE", "T_UBYTE", "T_SHORT", "T_USHORT", + "T_INT", "T_UINT", "T_LONG", "T_ULONG", "T_PYSSIZET", + "T_FLOAT", "T_DOUBLE", "T_STRING_INPLACE", + "T_LONGLONG", "T_ULONGLONG", + NULL}; + static const char fmt[] = "|bbBhHiIlknfds#LK"; + test_structmembers *ob; + const char *s = NULL; + Py_ssize_t string_len = 0; + ob = PyObject_New(test_structmembers, type); + if (ob == NULL) { + return NULL; + } + memset(&ob->structmembers, 0, sizeof(all_structmembers)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, fmt, keywords, + &ob->structmembers.bool_member, + &ob->structmembers.byte_member, + &ob->structmembers.ubyte_member, + &ob->structmembers.short_member, + &ob->structmembers.ushort_member, + &ob->structmembers.int_member, + &ob->structmembers.uint_member, + &ob->structmembers.long_member, + &ob->structmembers.ulong_member, + &ob->structmembers.pyssizet_member, + &ob->structmembers.float_member, + &ob->structmembers.double_member, + &s, &string_len, + &ob->structmembers.longlong_member, + &ob->structmembers.ulonglong_member)) + { + Py_DECREF(ob); + return NULL; + } + if (s != NULL) { + if (string_len > 5) { + Py_DECREF(ob); + PyErr_SetString(PyExc_ValueError, "string too long"); + return NULL; + } + strcpy(ob->structmembers.inplace_member, s); + } + else { + strcpy(ob->structmembers.inplace_member, ""); + } + return (PyObject *)ob; +} + +static PyType_Slot test_structmembers_slots[] = { + {Py_tp_new, test_structmembers_new}, + {Py_tp_members, test_members_newapi}, + {0}, +}; + +static PyType_Spec test_structmembers_spec = { + .name = "_testcapi._test_structmembersType_NewAPI", + .flags = Py_TPFLAGS_DEFAULT, + .basicsize = sizeof(test_structmembers), + .slots = test_structmembers_slots, +}; + +#include + +static struct PyMemberDef test_members[] = { + {"T_BOOL", T_BOOL, offsetof(test_structmembers, structmembers.bool_member), 0, NULL}, + {"T_BYTE", T_BYTE, offsetof(test_structmembers, structmembers.byte_member), 0, NULL}, + {"T_UBYTE", T_UBYTE, offsetof(test_structmembers, structmembers.ubyte_member), 0, NULL}, + {"T_SHORT", T_SHORT, offsetof(test_structmembers, structmembers.short_member), 0, NULL}, + {"T_USHORT", T_USHORT, offsetof(test_structmembers, structmembers.ushort_member), 0, NULL}, + {"T_INT", T_INT, offsetof(test_structmembers, structmembers.int_member), 0, NULL}, + {"T_UINT", T_UINT, offsetof(test_structmembers, structmembers.uint_member), 0, NULL}, + {"T_LONG", T_LONG, offsetof(test_structmembers, structmembers.long_member), 0, NULL}, + {"T_ULONG", T_ULONG, offsetof(test_structmembers, structmembers.ulong_member), 0, NULL}, + {"T_PYSSIZET", T_PYSSIZET, offsetof(test_structmembers, structmembers.pyssizet_member), 0, NULL}, + {"T_FLOAT", T_FLOAT, offsetof(test_structmembers, structmembers.float_member), 0, NULL}, + {"T_DOUBLE", T_DOUBLE, offsetof(test_structmembers, structmembers.double_member), 0, NULL}, + {"T_STRING_INPLACE", T_STRING_INPLACE, offsetof(test_structmembers, structmembers.inplace_member), 0, NULL}, + {"T_LONGLONG", T_LONGLONG, offsetof(test_structmembers, structmembers.longlong_member), 0, NULL}, + {"T_ULONGLONG", T_ULONGLONG, offsetof(test_structmembers, structmembers.ulonglong_member), 0, NULL}, + {NULL} +}; + + +static void +test_structmembers_free(PyObject *ob) +{ + PyObject_Free(ob); +} + +/* Designated initializers would work too, but this does test the *old* API */ +static PyTypeObject test_structmembersType_OldAPI= { + PyVarObject_HEAD_INIT(NULL, 0) + "test_structmembersType_OldAPI", + sizeof(test_structmembers), /* tp_basicsize */ + 0, /* tp_itemsize */ + test_structmembers_free, /* destructor tp_dealloc */ + 0, /* tp_vectorcall_offset */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_as_async */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + PyObject_GenericGetAttr, /* tp_getattro */ + PyObject_GenericSetAttr, /* tp_setattro */ + 0, /* tp_as_buffer */ + 0, /* tp_flags */ + "Type containing all structmember types", + 0, /* traverseproc tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + 0, /* tp_methods */ + test_members, /* tp_members */ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + test_structmembers_new, /* tp_new */ +}; + + +int +_PyTestCapi_Init_Structmember(PyObject *m) +{ + int res; + res = PyType_Ready(&test_structmembersType_OldAPI); + if (res < 0) { + return -1; + } + res = PyModule_AddObject( + m, + "_test_structmembersType_OldAPI", + (PyObject *)&test_structmembersType_OldAPI); + if (res < 0) { + return -1; + } + + PyObject *test_structmembersType_NewAPI = PyType_FromModuleAndSpec( + m, &test_structmembers_spec, NULL); + if (!test_structmembersType_NewAPI) { + return -1; + } + res = PyModule_AddType(m, (PyTypeObject*)test_structmembersType_NewAPI); + Py_DECREF(test_structmembersType_NewAPI); + if (res < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/testcapi_long.h b/Modules/_testcapi/testcapi_long.h similarity index 99% rename from Modules/testcapi_long.h rename to Modules/_testcapi/testcapi_long.h index 6bddad7bb5d249..143258140b4bb4 100644 --- a/Modules/testcapi_long.h +++ b/Modules/_testcapi/testcapi_long.h @@ -202,6 +202,5 @@ TESTNAME(PyObject *error(const char*)) Py_DECREF(Py_None); } - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } diff --git a/Modules/_testcapi/vectorcall.c b/Modules/_testcapi/vectorcall.c index e9c863a7570458..dcbc973c9fb991 100644 --- a/Modules/_testcapi/vectorcall.c +++ b/Modules/_testcapi/vectorcall.c @@ -297,8 +297,7 @@ static PyObject * func_descr_get(PyObject *func, PyObject *obj, PyObject *type) { if (obj == Py_None || obj == NULL) { - Py_INCREF(func); - return func; + return Py_NewRef(func); } return PyMethod_New(func, obj); } @@ -306,15 +305,13 @@ func_descr_get(PyObject *func, PyObject *obj, PyObject *type) static PyObject * nop_descr_get(PyObject *func, PyObject *obj, PyObject *type) { - Py_INCREF(func); - return func; + return Py_NewRef(func); } static PyObject * call_return_args(PyObject *self, PyObject *args, PyObject *kwargs) { - Py_INCREF(args); - return args; + return Py_NewRef(args); } static PyTypeObject MethodDescriptorBase_Type = { diff --git a/Modules/_testcapi/vectorcall_limited.c b/Modules/_testcapi/vectorcall_limited.c index ee57af84b1bb5f..a69f1d3f2a79b5 100644 --- a/Modules/_testcapi/vectorcall_limited.c +++ b/Modules/_testcapi/vectorcall_limited.c @@ -32,6 +32,105 @@ LimitedVectorCallClass_new(PyTypeObject *tp, PyTypeObject *a, PyTypeObject *kw) return self; } +static PyObject * +call_vectorcall(PyObject* self, PyObject *callable) +{ + PyObject *args[3] = { NULL, NULL, NULL }; + PyObject *kwname = NULL, *kwnames = NULL, *result = NULL; + + args[1] = PyUnicode_FromString("foo"); + if (!args[1]) { + goto leave; + } + + args[2] = PyUnicode_FromString("bar"); + if (!args[2]) { + goto leave; + } + + kwname = PyUnicode_InternFromString("baz"); + if (!kwname) { + goto leave; + } + + kwnames = PyTuple_New(1); + if (!kwnames) { + goto leave; + } + + if (PyTuple_SetItem(kwnames, 0, kwname)) { + goto leave; + } + + result = PyObject_Vectorcall( + callable, + args + 1, + 1 | PY_VECTORCALL_ARGUMENTS_OFFSET, + kwnames + ); + +leave: + Py_XDECREF(args[1]); + Py_XDECREF(args[2]); + Py_XDECREF(kwnames); + + return result; +} + +static PyObject * +call_vectorcall_method(PyObject* self, PyObject *callable) +{ + PyObject *args[3] = { NULL, NULL, NULL }; + PyObject *name = NULL, *kwname = NULL, + *kwnames = NULL, *result = NULL; + + name = PyUnicode_FromString("f"); + if (!name) { + goto leave; + } + + args[0] = callable; + args[1] = PyUnicode_FromString("foo"); + if (!args[1]) { + goto leave; + } + + args[2] = PyUnicode_FromString("bar"); + if (!args[2]) { + goto leave; + } + + kwname = PyUnicode_InternFromString("baz"); + if (!kwname) { + goto leave; + } + + kwnames = PyTuple_New(1); + if (!kwnames) { + goto leave; + } + + if (PyTuple_SetItem(kwnames, 0, kwname)) { + goto leave; + } + + + result = PyObject_VectorcallMethod( + name, + args, + 2 | PY_VECTORCALL_ARGUMENTS_OFFSET, + kwnames + ); + +leave: + Py_XDECREF(name); + Py_XDECREF(args[1]); + Py_XDECREF(args[2]); + Py_XDECREF(kwnames); + + return result; +} + static PyMemberDef LimitedVectorCallClass_members[] = { {"__vectorcalloffset__", T_PYSSIZET, sizeof(PyObject), READONLY}, {NULL} @@ -54,10 +153,8 @@ static PyType_Spec LimitedVectorCallClass_spec = { }; static PyMethodDef TestMethods[] = { - /* Add module methods here. - * (Empty list left here as template/example, since using - * PyModule_AddFunctions isn't very common.) - */ + {"call_vectorcall", call_vectorcall, METH_O}, + {"call_vectorcall_method", call_vectorcall_method, METH_O}, {NULL}, }; diff --git a/Modules/_testcapi/watchers.c b/Modules/_testcapi/watchers.c new file mode 100644 index 00000000000000..608cd780d12a26 --- /dev/null +++ b/Modules/_testcapi/watchers.c @@ -0,0 +1,539 @@ +#include "parts.h" + +#define Py_BUILD_CORE +#include "pycore_function.h" // FUNC_MAX_WATCHERS + +// Test dict watching +static PyObject *g_dict_watch_events; +static int g_dict_watchers_installed; + +static int +dict_watch_callback(PyDict_WatchEvent event, + PyObject *dict, + PyObject *key, + PyObject *new_value) +{ + PyObject *msg; + switch (event) { + case PyDict_EVENT_CLEARED: + msg = PyUnicode_FromString("clear"); + break; + case PyDict_EVENT_DEALLOCATED: + msg = PyUnicode_FromString("dealloc"); + break; + case PyDict_EVENT_CLONED: + msg = PyUnicode_FromString("clone"); + break; + case PyDict_EVENT_ADDED: + msg = PyUnicode_FromFormat("new:%S:%S", key, new_value); + break; + case PyDict_EVENT_MODIFIED: + msg = PyUnicode_FromFormat("mod:%S:%S", key, new_value); + break; + case PyDict_EVENT_DELETED: + msg = PyUnicode_FromFormat("del:%S", key); + break; + default: + msg = PyUnicode_FromString("unknown"); + } + if (msg == NULL) { + return -1; + } + assert(PyList_Check(g_dict_watch_events)); + if (PyList_Append(g_dict_watch_events, msg) < 0) { + Py_DECREF(msg); + return -1; + } + Py_DECREF(msg); + return 0; +} + +static int +dict_watch_callback_second(PyDict_WatchEvent event, + PyObject *dict, + PyObject *key, + PyObject *new_value) +{ + PyObject *msg = PyUnicode_FromString("second"); + if (msg == NULL) { + return -1; + } + int rc = PyList_Append(g_dict_watch_events, msg); + Py_DECREF(msg); + if (rc < 0) { + return -1; + } + return 0; +} + +static int +dict_watch_callback_error(PyDict_WatchEvent event, + PyObject *dict, + PyObject *key, + PyObject *new_value) +{ + PyErr_SetString(PyExc_RuntimeError, "boom!"); + return -1; +} + +static PyObject * +add_dict_watcher(PyObject *self, PyObject *kind) +{ + int watcher_id; + assert(PyLong_Check(kind)); + long kind_l = PyLong_AsLong(kind); + if (kind_l == 2) { + watcher_id = PyDict_AddWatcher(dict_watch_callback_second); + } + else if (kind_l == 1) { + watcher_id = PyDict_AddWatcher(dict_watch_callback_error); + } + else { + watcher_id = PyDict_AddWatcher(dict_watch_callback); + } + if (watcher_id < 0) { + return NULL; + } + if (!g_dict_watchers_installed) { + assert(!g_dict_watch_events); + if (!(g_dict_watch_events = PyList_New(0))) { + return NULL; + } + } + g_dict_watchers_installed++; + return PyLong_FromLong(watcher_id); +} + +static PyObject * +clear_dict_watcher(PyObject *self, PyObject *watcher_id) +{ + if (PyDict_ClearWatcher(PyLong_AsLong(watcher_id))) { + return NULL; + } + g_dict_watchers_installed--; + if (!g_dict_watchers_installed) { + assert(g_dict_watch_events); + Py_CLEAR(g_dict_watch_events); + } + Py_RETURN_NONE; +} + +static PyObject * +watch_dict(PyObject *self, PyObject *args) +{ + PyObject *dict; + int watcher_id; + if (!PyArg_ParseTuple(args, "iO", &watcher_id, &dict)) { + return NULL; + } + if (PyDict_Watch(watcher_id, dict)) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +unwatch_dict(PyObject *self, PyObject *args) +{ + PyObject *dict; + int watcher_id; + if (!PyArg_ParseTuple(args, "iO", &watcher_id, &dict)) { + return NULL; + } + if (PyDict_Unwatch(watcher_id, dict)) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +get_dict_watcher_events(PyObject *self, PyObject *Py_UNUSED(args)) +{ + if (!g_dict_watch_events) { + PyErr_SetString(PyExc_RuntimeError, "no watchers active"); + return NULL; + } + return Py_NewRef(g_dict_watch_events); +} + +// Test type watchers +static PyObject *g_type_modified_events; +static int g_type_watchers_installed; + +static int +type_modified_callback(PyTypeObject *type) +{ + assert(PyList_Check(g_type_modified_events)); + if(PyList_Append(g_type_modified_events, (PyObject *)type) < 0) { + return -1; + } + return 0; +} + +static int +type_modified_callback_wrap(PyTypeObject *type) +{ + assert(PyList_Check(g_type_modified_events)); + PyObject *list = PyList_New(0); + if (list == NULL) { + return -1; + } + if (PyList_Append(list, (PyObject *)type) < 0) { + Py_DECREF(list); + return -1; + } + if (PyList_Append(g_type_modified_events, list) < 0) { + Py_DECREF(list); + return -1; + } + Py_DECREF(list); + return 0; +} + +static int +type_modified_callback_error(PyTypeObject *type) +{ + PyErr_SetString(PyExc_RuntimeError, "boom!"); + return -1; +} + +static PyObject * +add_type_watcher(PyObject *self, PyObject *kind) +{ + int watcher_id; + assert(PyLong_Check(kind)); + long kind_l = PyLong_AsLong(kind); + if (kind_l == 2) { + watcher_id = PyType_AddWatcher(type_modified_callback_wrap); + } + else if (kind_l == 1) { + watcher_id = PyType_AddWatcher(type_modified_callback_error); + } + else { + watcher_id = PyType_AddWatcher(type_modified_callback); + } + if (watcher_id < 0) { + return NULL; + } + if (!g_type_watchers_installed) { + assert(!g_type_modified_events); + if (!(g_type_modified_events = PyList_New(0))) { + return NULL; + } + } + g_type_watchers_installed++; + return PyLong_FromLong(watcher_id); +} + +static PyObject * +clear_type_watcher(PyObject *self, PyObject *watcher_id) +{ + if (PyType_ClearWatcher(PyLong_AsLong(watcher_id))) { + return NULL; + } + g_type_watchers_installed--; + if (!g_type_watchers_installed) { + assert(g_type_modified_events); + Py_CLEAR(g_type_modified_events); + } + Py_RETURN_NONE; +} + +static PyObject * +get_type_modified_events(PyObject *self, PyObject *Py_UNUSED(args)) +{ + if (!g_type_modified_events) { + PyErr_SetString(PyExc_RuntimeError, "no watchers active"); + return NULL; + } + return Py_NewRef(g_type_modified_events); +} + +static PyObject * +watch_type(PyObject *self, PyObject *args) +{ + PyObject *type; + int watcher_id; + if (!PyArg_ParseTuple(args, "iO", &watcher_id, &type)) { + return NULL; + } + if (PyType_Watch(watcher_id, type)) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +unwatch_type(PyObject *self, PyObject *args) +{ + PyObject *type; + int watcher_id; + if (!PyArg_ParseTuple(args, "iO", &watcher_id, &type)) { + return NULL; + } + if (PyType_Unwatch(watcher_id, type)) { + return NULL; + } + Py_RETURN_NONE; +} + +// Test function watchers + +#define NUM_FUNC_WATCHERS 2 +static PyObject *pyfunc_watchers[NUM_FUNC_WATCHERS]; +static int func_watcher_ids[NUM_FUNC_WATCHERS] = {-1, -1}; + +static PyObject * +get_id(PyObject *obj) +{ + PyObject *builtins = PyEval_GetBuiltins(); // borrowed ref. + if (builtins == NULL) { + return NULL; + } + PyObject *id_str = PyUnicode_FromString("id"); + if (id_str == NULL) { + return NULL; + } + PyObject *id_func = PyObject_GetItem(builtins, id_str); + Py_DECREF(id_str); + if (id_func == NULL) { + return NULL; + } + PyObject *stack[] = {obj}; + PyObject *id = PyObject_Vectorcall(id_func, stack, 1, NULL); + Py_DECREF(id_func); + return id; +} + +static int +call_pyfunc_watcher(PyObject *watcher, PyFunction_WatchEvent event, + PyFunctionObject *func, PyObject *new_value) +{ + PyObject *event_obj = PyLong_FromLong(event); + if (event_obj == NULL) { + return -1; + } + if (new_value == NULL) { + new_value = Py_None; + } + Py_INCREF(new_value); + PyObject *func_or_id = NULL; + if (event == PyFunction_EVENT_DESTROY) { + /* Don't expose a function that's about to be destroyed to managed code */ + func_or_id = get_id((PyObject *) func); + if (func_or_id == NULL) { + Py_DECREF(event_obj); + Py_DECREF(new_value); + return -1; + } + } + else { + Py_INCREF(func); + func_or_id = (PyObject *) func; + } + PyObject *stack[] = {event_obj, func_or_id, new_value}; + PyObject *res = PyObject_Vectorcall(watcher, stack, 3, NULL); + int st = (res == NULL) ? -1 : 0; + Py_XDECREF(res); + Py_DECREF(new_value); + Py_DECREF(event_obj); + Py_DECREF(func_or_id); + return st; +} + +static int +first_func_watcher_callback(PyFunction_WatchEvent event, PyFunctionObject *func, + PyObject *new_value) +{ + return call_pyfunc_watcher(pyfunc_watchers[0], event, func, new_value); +} + +static int +second_func_watcher_callback(PyFunction_WatchEvent event, + PyFunctionObject *func, PyObject *new_value) +{ + return call_pyfunc_watcher(pyfunc_watchers[1], event, func, new_value); +} + +static PyFunction_WatchCallback func_watcher_callbacks[NUM_FUNC_WATCHERS] = { + first_func_watcher_callback, + second_func_watcher_callback +}; + +static int +add_func_event(PyObject *module, const char *name, PyFunction_WatchEvent event) +{ + PyObject *value = PyLong_FromLong(event); + if (value == NULL) { + return -1; + } + int ok = PyModule_AddObjectRef(module, name, value); + Py_DECREF(value); + return ok; +} + +static PyObject * +add_func_watcher(PyObject *self, PyObject *func) +{ + if (!PyFunction_Check(func)) { + PyErr_SetString(PyExc_TypeError, "'func' must be a function"); + return NULL; + } + int idx = -1; + for (int i = 0; i < NUM_FUNC_WATCHERS; i++) { + if (func_watcher_ids[i] == -1) { + idx = i; + break; + } + } + if (idx == -1) { + PyErr_SetString(PyExc_RuntimeError, "no free watchers"); + return NULL; + } + PyObject *result = PyLong_FromLong(idx); + if (result == NULL) { + return NULL; + } + func_watcher_ids[idx] = PyFunction_AddWatcher(func_watcher_callbacks[idx]); + if (func_watcher_ids[idx] < 0) { + Py_DECREF(result); + return NULL; + } + pyfunc_watchers[idx] = Py_NewRef(func); + return result; +} + +static PyObject * +clear_func_watcher(PyObject *self, PyObject *watcher_id_obj) +{ + long watcher_id = PyLong_AsLong(watcher_id_obj); + if ((watcher_id < INT_MIN) || (watcher_id > INT_MAX)) { + PyErr_SetString(PyExc_ValueError, "invalid watcher ID"); + return NULL; + } + int wid = (int) watcher_id; + if (PyFunction_ClearWatcher(wid) < 0) { + return NULL; + } + int idx = -1; + for (int i = 0; i < NUM_FUNC_WATCHERS; i++) { + if (func_watcher_ids[i] == wid) { + idx = i; + break; + } + } + assert(idx != -1); + Py_CLEAR(pyfunc_watchers[idx]); + func_watcher_ids[idx] = -1; + Py_RETURN_NONE; +} + +static int +noop_func_event_handler(PyFunction_WatchEvent event, PyFunctionObject *func, + PyObject *new_value) +{ + return 0; +} + +static PyObject * +allocate_too_many_func_watchers(PyObject *self, PyObject *args) +{ + int watcher_ids[FUNC_MAX_WATCHERS + 1]; + int num_watchers = 0; + for (unsigned long i = 0; i < sizeof(watcher_ids) / sizeof(int); i++) { + int watcher_id = PyFunction_AddWatcher(noop_func_event_handler); + if (watcher_id == -1) { + break; + } + watcher_ids[i] = watcher_id; + num_watchers++; + } + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + for (int i = 0; i < num_watchers; i++) { + if (PyFunction_ClearWatcher(watcher_ids[i]) < 0) { + PyErr_WriteUnraisable(Py_None); + break; + } + } + if (type) { + PyErr_Restore(type, value, traceback); + return NULL; + } + else if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +set_func_defaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL; + PyObject *defaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { + return NULL; + } + if (PyFunction_SetDefaults(func, defaults) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +set_func_kwdefaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL; + PyObject *kwdefaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &kwdefaults)) { + return NULL; + } + if (PyFunction_SetKwDefaults(func, kwdefaults) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + // Dict watchers. + {"add_dict_watcher", add_dict_watcher, METH_O, NULL}, + {"clear_dict_watcher", clear_dict_watcher, METH_O, NULL}, + {"watch_dict", watch_dict, METH_VARARGS, NULL}, + {"unwatch_dict", unwatch_dict, METH_VARARGS, NULL}, + {"get_dict_watcher_events", get_dict_watcher_events, METH_NOARGS, NULL}, + + // Type watchers. + {"add_type_watcher", add_type_watcher, METH_O, NULL}, + {"clear_type_watcher", clear_type_watcher, METH_O, NULL}, + {"watch_type", watch_type, METH_VARARGS, NULL}, + {"unwatch_type", unwatch_type, METH_VARARGS, NULL}, + {"get_type_modified_events", get_type_modified_events, METH_NOARGS, NULL}, + + // Function watchers. + {"add_func_watcher", add_func_watcher, METH_O, NULL}, + {"clear_func_watcher", clear_func_watcher, METH_O, NULL}, + {"set_func_defaults_via_capi", set_func_defaults, METH_VARARGS, NULL}, + {"set_func_kwdefaults_via_capi", set_func_kwdefaults, METH_VARARGS, NULL}, + {"allocate_too_many_func_watchers", allocate_too_many_func_watchers, + METH_NOARGS, NULL}, + {NULL}, +}; + +int +_PyTestCapi_Init_Watchers(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + /* Expose each event as an attribute on the module */ +#define ADD_EVENT(event) \ + if (add_func_event(mod, "PYFUNC_EVENT_" #event, \ + PyFunction_EVENT_##event)) { \ + return -1; \ + } + FOREACH_FUNC_EVENT(ADD_EVENT); +#undef ADD_EVENT + + return 0; +} diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 3d6535f50be957..3617fafe9b4fdd 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -13,7 +13,6 @@ #undef Py_BUILD_CORE_MODULE #undef Py_BUILD_CORE_BUILTIN -#define NEEDS_PY_IDENTIFIER /* Always enable assertions */ #undef NDEBUG @@ -21,16 +20,11 @@ #define PY_SSIZE_T_CLEAN #include "Python.h" -#include "datetime.h" // PyDateTimeAPI #include "marshal.h" // PyMarshal_WriteLongToFile -#include "structmember.h" // PyMemberDef +#include "structmember.h" // for offsetof(), T_OBJECT #include // FLT_MAX #include -#ifdef MS_WINDOWS -# include // struct timeval -#endif - #ifdef HAVE_SYS_WAIT_H #include // W_STOPCODE #endif @@ -343,8 +337,7 @@ dict_getitem_knownhash(PyObject *self, PyObject *args) return NULL; } - Py_XINCREF(result); - return result; + return Py_XNewRef(result); } /* Issue #4701: Check that PyObject_Hash implicitly calls @@ -467,537 +460,6 @@ test_lazy_hash_inheritance(PyObject* self, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } - -/* Tests of PyLong_{As, From}{Unsigned,}Long(), and - PyLong_{As, From}{Unsigned,}LongLong(). - - Note that the meat of the test is contained in testcapi_long.h. - This is revolting, but delicate code duplication is worse: "almost - exactly the same" code is needed to test long long, but the ubiquitous - dependence on type names makes it impossible to use a parameterized - function. A giant macro would be even worse than this. A C++ template - would be perfect. - - The "report an error" functions are deliberately not part of the #include - file: if the test fails, you can set a breakpoint in the appropriate - error function directly, and crawl back from there in the debugger. -*/ - -#define UNBIND(X) Py_DECREF(X); (X) = NULL - -static PyObject * -raise_test_long_error(const char* msg) -{ - return raiseTestError("test_long_api", msg); -} - -#define TESTNAME test_long_api_inner -#define TYPENAME long -#define F_S_TO_PY PyLong_FromLong -#define F_PY_TO_S PyLong_AsLong -#define F_U_TO_PY PyLong_FromUnsignedLong -#define F_PY_TO_U PyLong_AsUnsignedLong - -#include "testcapi_long.h" - -static PyObject * -test_long_api(PyObject* self, PyObject *Py_UNUSED(ignored)) -{ - return TESTNAME(raise_test_long_error); -} - -#undef TESTNAME -#undef TYPENAME -#undef F_S_TO_PY -#undef F_PY_TO_S -#undef F_U_TO_PY -#undef F_PY_TO_U - -static PyObject * -raise_test_longlong_error(const char* msg) -{ - return raiseTestError("test_longlong_api", msg); -} - -#define TESTNAME test_longlong_api_inner -#define TYPENAME long long -#define F_S_TO_PY PyLong_FromLongLong -#define F_PY_TO_S PyLong_AsLongLong -#define F_U_TO_PY PyLong_FromUnsignedLongLong -#define F_PY_TO_U PyLong_AsUnsignedLongLong - -#include "testcapi_long.h" - -static PyObject * -test_longlong_api(PyObject* self, PyObject *args) -{ - return TESTNAME(raise_test_longlong_error); -} - -#undef TESTNAME -#undef TYPENAME -#undef F_S_TO_PY -#undef F_PY_TO_S -#undef F_U_TO_PY -#undef F_PY_TO_U - -/* Test the PyLong_AsLongAndOverflow API. General conversion to PY_LONG - is tested by test_long_api_inner. This test will concentrate on proper - handling of overflow. -*/ - -static PyObject * -test_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *num, *one, *temp; - long value; - int overflow; - - /* Test that overflow is set properly for a large value. */ - /* num is a number larger than LONG_MAX even on 64-bit platforms */ - num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to 1"); - - /* Same again, with num = LONG_MAX + 1 */ - num = PyLong_FromLong(LONG_MAX); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Add(num, one); - Py_DECREF(one); - Py_DECREF(num); - num = temp; - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to 1"); - - /* Test that overflow is set properly for a large negative value. */ - /* num is a number smaller than LONG_MIN even on 64-bit platforms */ - num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to -1"); - - /* Same again, with num = LONG_MIN - 1 */ - num = PyLong_FromLong(LONG_MIN); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Subtract(num, one); - Py_DECREF(one); - Py_DECREF(num); - num = temp; - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to -1"); - - /* Test that overflow is cleared properly for small values. */ - num = PyLong_FromString("FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != 0xFF) - return raiseTestError("test_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromString("-FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -0xFF) - return raiseTestError("test_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was set incorrectly"); - - num = PyLong_FromLong(LONG_MAX); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LONG_MAX) - return raiseTestError("test_long_and_overflow", - "expected return value LONG_MAX"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromLong(LONG_MIN); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LONG_MIN) - return raiseTestError("test_long_and_overflow", - "expected return value LONG_MIN"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was not cleared"); - - Py_RETURN_NONE; -} - -/* Test the PyLong_AsLongLongAndOverflow API. General conversion to - long long is tested by test_long_api_inner. This test will - concentrate on proper handling of overflow. -*/ - -static PyObject * -test_long_long_and_overflow(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *num, *one, *temp; - long long value; - int overflow; - - /* Test that overflow is set properly for a large value. */ - /* num is a number larger than LLONG_MAX on a typical machine. */ - num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to 1"); - - /* Same again, with num = LLONG_MAX + 1 */ - num = PyLong_FromLongLong(LLONG_MAX); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Add(num, one); - Py_DECREF(one); - Py_DECREF(num); - num = temp; - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to 1"); - - /* Test that overflow is set properly for a large negative value. */ - /* num is a number smaller than LLONG_MIN on a typical platform */ - num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to -1"); - - /* Same again, with num = LLONG_MIN - 1 */ - num = PyLong_FromLongLong(LLONG_MIN); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Subtract(num, one); - Py_DECREF(one); - Py_DECREF(num); - num = temp; - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to -1"); - - /* Test that overflow is cleared properly for small values. */ - num = PyLong_FromString("FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != 0xFF) - return raiseTestError("test_long_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromString("-FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -0xFF) - return raiseTestError("test_long_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was set incorrectly"); - - num = PyLong_FromLongLong(LLONG_MAX); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LLONG_MAX) - return raiseTestError("test_long_long_and_overflow", - "expected return value LLONG_MAX"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromLongLong(LLONG_MIN); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LLONG_MIN) - return raiseTestError("test_long_long_and_overflow", - "expected return value LLONG_MIN"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was not cleared"); - - Py_RETURN_NONE; -} - -/* Test the PyLong_As{Size,Ssize}_t API. At present this just tests that - non-integer arguments are handled correctly. It should be extended to - test overflow handling. - */ - -static PyObject * -test_long_as_size_t(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - size_t out_u; - Py_ssize_t out_s; - - Py_INCREF(Py_None); - - out_u = PyLong_AsSize_t(Py_None); - if (out_u != (size_t)-1 || !PyErr_Occurred()) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSize_t(None) didn't complain"); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSize_t(None) raised " - "something other than TypeError"); - PyErr_Clear(); - - out_s = PyLong_AsSsize_t(Py_None); - if (out_s != (Py_ssize_t)-1 || !PyErr_Occurred()) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSsize_t(None) didn't complain"); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSsize_t(None) raised " - "something other than TypeError"); - PyErr_Clear(); - - /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ - return Py_None; -} - -static PyObject * -test_long_as_unsigned_long_long_mask(PyObject *self, - PyObject *Py_UNUSED(ignored)) -{ - unsigned long long res = PyLong_AsUnsignedLongLongMask(NULL); - - if (res != (unsigned long long)-1 || !PyErr_Occurred()) { - return raiseTestError("test_long_as_unsigned_long_long_mask", - "PyLong_AsUnsignedLongLongMask(NULL) didn't " - "complain"); - } - if (!PyErr_ExceptionMatches(PyExc_SystemError)) { - return raiseTestError("test_long_as_unsigned_long_long_mask", - "PyLong_AsUnsignedLongLongMask(NULL) raised " - "something other than SystemError"); - } - PyErr_Clear(); - Py_RETURN_NONE; -} - -/* Test the PyLong_AsDouble API. At present this just tests that - non-integer arguments are handled correctly. - */ - -static PyObject * -test_long_as_double(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - double out; - - Py_INCREF(Py_None); - - out = PyLong_AsDouble(Py_None); - if (out != -1.0 || !PyErr_Occurred()) - return raiseTestError("test_long_as_double", - "PyLong_AsDouble(None) didn't complain"); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return raiseTestError("test_long_as_double", - "PyLong_AsDouble(None) raised " - "something other than TypeError"); - PyErr_Clear(); - - /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ - return Py_None; -} - -/* Test the L code for PyArg_ParseTuple. This should deliver a long long - for both long and int arguments. The test may leak a little memory if - it fails. -*/ -static PyObject * -test_L_code(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *tuple, *num; - long long value; - - tuple = PyTuple_New(1); - if (tuple == NULL) - return NULL; - - num = PyLong_FromLong(42); - if (num == NULL) - return NULL; - - PyTuple_SET_ITEM(tuple, 0, num); - - value = -1; - if (!PyArg_ParseTuple(tuple, "L:test_L_code", &value)) { - return NULL; - } - if (value != 42) - return raiseTestError("test_L_code", - "L code returned wrong value for long 42"); - - Py_DECREF(num); - num = PyLong_FromLong(42); - if (num == NULL) - return NULL; - - PyTuple_SET_ITEM(tuple, 0, num); - - value = -1; - if (!PyArg_ParseTuple(tuple, "L:test_L_code", &value)) { - return NULL; - } - if (value != 42) - return raiseTestError("test_L_code", - "L code returned wrong value for int 42"); - - Py_DECREF(tuple); - Py_RETURN_NONE; -} - static PyObject * return_none(void *unused) { @@ -1236,1312 +698,158 @@ test_get_type_qualname(PyObject *self, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } - -static PyObject * -get_args(PyObject *self, PyObject *args) -{ - if (args == NULL) { - args = Py_None; - } - Py_INCREF(args); - return args; -} - static PyObject * -get_kwargs(PyObject *self, PyObject *args, PyObject *kwargs) +pyobject_repr_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) { - if (kwargs == NULL) { - kwargs = Py_None; - } - Py_INCREF(kwargs); - return kwargs; + return PyObject_Repr(NULL); } -/* Test tuple argument processing */ static PyObject * -getargs_tuple(PyObject *self, PyObject *args) +pyobject_str_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) { - int a, b, c; - if (!PyArg_ParseTuple(args, "i(ii)", &a, &b, &c)) - return NULL; - return Py_BuildValue("iii", a, b, c); + return PyObject_Str(NULL); } -/* test PyArg_ParseTupleAndKeywords */ static PyObject * -getargs_keywords(PyObject *self, PyObject *args, PyObject *kwargs) +pyobject_bytes_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) { - static char *keywords[] = {"arg1","arg2","arg3","arg4","arg5", NULL}; - static const char fmt[] = "(ii)i|(i(ii))(iii)i"; - int int_args[10]={-1, -1, -1, -1, -1, -1, -1, -1, -1, -1}; - - if (!PyArg_ParseTupleAndKeywords(args, kwargs, fmt, keywords, - &int_args[0], &int_args[1], &int_args[2], &int_args[3], &int_args[4], - &int_args[5], &int_args[6], &int_args[7], &int_args[8], &int_args[9])) - return NULL; - return Py_BuildValue("iiiiiiiiii", - int_args[0], int_args[1], int_args[2], int_args[3], int_args[4], - int_args[5], int_args[6], int_args[7], int_args[8], int_args[9]); + return PyObject_Bytes(NULL); } -/* test PyArg_ParseTupleAndKeywords keyword-only arguments */ static PyObject * -getargs_keyword_only(PyObject *self, PyObject *args, PyObject *kwargs) +raise_exception(PyObject *self, PyObject *args) { - static char *keywords[] = {"required", "optional", "keyword_only", NULL}; - int required = -1; - int optional = -1; - int keyword_only = -1; + PyObject *exc; + PyObject *exc_args, *v; + int num_args, i; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|i$i", keywords, - &required, &optional, &keyword_only)) + if (!PyArg_ParseTuple(args, "Oi:raise_exception", + &exc, &num_args)) return NULL; - return Py_BuildValue("iii", required, optional, keyword_only); -} - -/* test PyArg_ParseTupleAndKeywords positional-only arguments */ -static PyObject * -getargs_positional_only_and_keywords(PyObject *self, PyObject *args, PyObject *kwargs) -{ - static char *keywords[] = {"", "", "keyword", NULL}; - int required = -1; - int optional = -1; - int keyword = -1; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|ii", keywords, - &required, &optional, &keyword)) + exc_args = PyTuple_New(num_args); + if (exc_args == NULL) return NULL; - return Py_BuildValue("iii", required, optional, keyword); + for (i = 0; i < num_args; ++i) { + v = PyLong_FromLong(i); + if (v == NULL) { + Py_DECREF(exc_args); + return NULL; + } + PyTuple_SET_ITEM(exc_args, i, v); + } + PyErr_SetObject(exc, exc_args); + Py_DECREF(exc_args); + return NULL; } -/* Functions to call PyArg_ParseTuple with integer format codes, - and return the result. -*/ static PyObject * -getargs_b(PyObject *self, PyObject *args) +set_errno(PyObject *self, PyObject *args) { - unsigned char value; - if (!PyArg_ParseTuple(args, "b", &value)) - return NULL; - return PyLong_FromUnsignedLong((unsigned long)value); -} + int new_errno; -static PyObject * -getargs_B(PyObject *self, PyObject *args) -{ - unsigned char value; - if (!PyArg_ParseTuple(args, "B", &value)) + if (!PyArg_ParseTuple(args, "i:set_errno", &new_errno)) return NULL; - return PyLong_FromUnsignedLong((unsigned long)value); -} -static PyObject * -getargs_h(PyObject *self, PyObject *args) -{ - short value; - if (!PyArg_ParseTuple(args, "h", &value)) - return NULL; - return PyLong_FromLong((long)value); + errno = new_errno; + Py_RETURN_NONE; } static PyObject * -getargs_H(PyObject *self, PyObject *args) +test_set_exception(PyObject *self, PyObject *new_exc) { - unsigned short value; - if (!PyArg_ParseTuple(args, "H", &value)) - return NULL; - return PyLong_FromUnsignedLong((unsigned long)value); -} + PyObject *exc = PyErr_GetHandledException(); + assert(PyExceptionInstance_Check(exc) || exc == NULL); -static PyObject * -getargs_I(PyObject *self, PyObject *args) -{ - unsigned int value; - if (!PyArg_ParseTuple(args, "I", &value)) - return NULL; - return PyLong_FromUnsignedLong((unsigned long)value); + PyErr_SetHandledException(new_exc); + return exc; } static PyObject * -getargs_k(PyObject *self, PyObject *args) +test_set_exc_info(PyObject *self, PyObject *args) { - unsigned long value; - if (!PyArg_ParseTuple(args, "k", &value)) + PyObject *orig_exc; + PyObject *new_type, *new_value, *new_tb; + PyObject *type, *value, *tb; + if (!PyArg_ParseTuple(args, "OOO:test_set_exc_info", + &new_type, &new_value, &new_tb)) return NULL; - return PyLong_FromUnsignedLong(value); -} -static PyObject * -getargs_i(PyObject *self, PyObject *args) -{ - int value; - if (!PyArg_ParseTuple(args, "i", &value)) - return NULL; - return PyLong_FromLong((long)value); -} + PyErr_GetExcInfo(&type, &value, &tb); -static PyObject * -getargs_l(PyObject *self, PyObject *args) -{ - long value; - if (!PyArg_ParseTuple(args, "l", &value)) - return NULL; - return PyLong_FromLong(value); -} + Py_INCREF(new_type); + Py_INCREF(new_value); + Py_INCREF(new_tb); + PyErr_SetExcInfo(new_type, new_value, new_tb); -static PyObject * -getargs_n(PyObject *self, PyObject *args) -{ - Py_ssize_t value; - if (!PyArg_ParseTuple(args, "n", &value)) - return NULL; - return PyLong_FromSsize_t(value); + orig_exc = PyTuple_Pack(3, type ? type : Py_None, value ? value : Py_None, tb ? tb : Py_None); + Py_XDECREF(type); + Py_XDECREF(value); + Py_XDECREF(tb); + return orig_exc; } -static PyObject * -getargs_p(PyObject *self, PyObject *args) -{ - int value; - if (!PyArg_ParseTuple(args, "p", &value)) - return NULL; - return PyLong_FromLong(value); -} +/* test_thread_state spawns a thread of its own, and that thread releases + * `thread_done` when it's finished. The driver code has to know when the + * thread finishes, because the thread uses a PyObject (the callable) that + * may go away when the driver finishes. The former lack of this explicit + * synchronization caused rare segfaults, so rare that they were seen only + * on a Mac buildbot (although they were possible on any box). + */ +static PyThread_type_lock thread_done = NULL; -static PyObject * -getargs_L(PyObject *self, PyObject *args) +static int +_make_call(void *callable) { - long long value; - if (!PyArg_ParseTuple(args, "L", &value)) - return NULL; - return PyLong_FromLongLong(value); + PyObject *rc; + int success; + PyGILState_STATE s = PyGILState_Ensure(); + rc = PyObject_CallNoArgs((PyObject *)callable); + success = (rc != NULL); + Py_XDECREF(rc); + PyGILState_Release(s); + return success; } -static PyObject * -getargs_K(PyObject *self, PyObject *args) +/* Same thing, but releases `thread_done` when it returns. This variant + * should be called only from threads spawned by test_thread_state(). + */ +static void +_make_call_from_thread(void *callable) { - unsigned long long value; - if (!PyArg_ParseTuple(args, "K", &value)) - return NULL; - return PyLong_FromUnsignedLongLong(value); + _make_call(callable); + PyThread_release_lock(thread_done); } -/* This function not only tests the 'k' getargs code, but also the - PyLong_AsUnsignedLongMask() function. */ static PyObject * -test_k_code(PyObject *self, PyObject *Py_UNUSED(ignored)) +test_thread_state(PyObject *self, PyObject *args) { - PyObject *tuple, *num; - unsigned long value; - - tuple = PyTuple_New(1); - if (tuple == NULL) - return NULL; + PyObject *fn; + int success = 1; - /* a number larger than ULONG_MAX even on 64-bit platforms */ - num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) + if (!PyArg_ParseTuple(args, "O:test_thread_state", &fn)) return NULL; - value = PyLong_AsUnsignedLongMask(num); - if (value != ULONG_MAX) - return raiseTestError("test_k_code", - "PyLong_AsUnsignedLongMask() returned wrong value for long 0xFFF...FFF"); - - PyTuple_SET_ITEM(tuple, 0, num); - - value = 0; - if (!PyArg_ParseTuple(tuple, "k:test_k_code", &value)) { + if (!PyCallable_Check(fn)) { + PyErr_Format(PyExc_TypeError, "'%s' object is not callable", + Py_TYPE(fn)->tp_name); return NULL; } - if (value != ULONG_MAX) - return raiseTestError("test_k_code", - "k code returned wrong value for long 0xFFF...FFF"); - - Py_DECREF(num); - num = PyLong_FromString("-FFFFFFFF000000000000000042", NULL, 16); - if (num == NULL) - return NULL; - value = PyLong_AsUnsignedLongMask(num); - if (value != (unsigned long)-0x42) - return raiseTestError("test_k_code", - "PyLong_AsUnsignedLongMask() returned wrong " - "value for long -0xFFF..000042"); - - PyTuple_SET_ITEM(tuple, 0, num); + thread_done = PyThread_allocate_lock(); + if (thread_done == NULL) + return PyErr_NoMemory(); + PyThread_acquire_lock(thread_done, 1); - value = 0; - if (!PyArg_ParseTuple(tuple, "k:test_k_code", &value)) { - return NULL; - } - if (value != (unsigned long)-0x42) - return raiseTestError("test_k_code", - "k code returned wrong value for long -0xFFF..000042"); - - Py_DECREF(tuple); - Py_RETURN_NONE; -} - -static PyObject * -getargs_f(PyObject *self, PyObject *args) -{ - float f; - if (!PyArg_ParseTuple(args, "f", &f)) - return NULL; - return PyFloat_FromDouble(f); -} - -static PyObject * -getargs_d(PyObject *self, PyObject *args) -{ - double d; - if (!PyArg_ParseTuple(args, "d", &d)) - return NULL; - return PyFloat_FromDouble(d); -} - -static PyObject * -getargs_D(PyObject *self, PyObject *args) -{ - Py_complex cval; - if (!PyArg_ParseTuple(args, "D", &cval)) - return NULL; - return PyComplex_FromCComplex(cval); -} - -static PyObject * -getargs_S(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "S", &obj)) - return NULL; - Py_INCREF(obj); - return obj; -} - -static PyObject * -getargs_Y(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "Y", &obj)) - return NULL; - Py_INCREF(obj); - return obj; -} - -static PyObject * -getargs_U(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "U", &obj)) - return NULL; - Py_INCREF(obj); - return obj; -} - -static PyObject * -getargs_c(PyObject *self, PyObject *args) -{ - char c; - if (!PyArg_ParseTuple(args, "c", &c)) - return NULL; - return PyLong_FromLong((unsigned char)c); -} - -static PyObject * -getargs_C(PyObject *self, PyObject *args) -{ - int c; - if (!PyArg_ParseTuple(args, "C", &c)) - return NULL; - return PyLong_FromLong(c); -} - -static PyObject * -getargs_s(PyObject *self, PyObject *args) -{ - char *str; - if (!PyArg_ParseTuple(args, "s", &str)) - return NULL; - return PyBytes_FromString(str); -} - -static PyObject * -getargs_s_star(PyObject *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *bytes; - if (!PyArg_ParseTuple(args, "s*", &buffer)) - return NULL; - bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); - PyBuffer_Release(&buffer); - return bytes; -} - -static PyObject * -getargs_s_hash(PyObject *self, PyObject *args) -{ - char *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "s#", &str, &size)) - return NULL; - return PyBytes_FromStringAndSize(str, size); -} - -static PyObject * -getargs_z(PyObject *self, PyObject *args) -{ - char *str; - if (!PyArg_ParseTuple(args, "z", &str)) - return NULL; - if (str != NULL) - return PyBytes_FromString(str); - else - Py_RETURN_NONE; -} - -static PyObject * -getargs_z_star(PyObject *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *bytes; - if (!PyArg_ParseTuple(args, "z*", &buffer)) - return NULL; - if (buffer.buf != NULL) - bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); - else { - Py_INCREF(Py_None); - bytes = Py_None; - } - PyBuffer_Release(&buffer); - return bytes; -} - -static PyObject * -getargs_z_hash(PyObject *self, PyObject *args) -{ - char *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "z#", &str, &size)) - return NULL; - if (str != NULL) - return PyBytes_FromStringAndSize(str, size); - else - Py_RETURN_NONE; -} - -static PyObject * -getargs_y(PyObject *self, PyObject *args) -{ - char *str; - if (!PyArg_ParseTuple(args, "y", &str)) - return NULL; - return PyBytes_FromString(str); -} - -static PyObject * -getargs_y_star(PyObject *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *bytes; - if (!PyArg_ParseTuple(args, "y*", &buffer)) - return NULL; - bytes = PyBytes_FromStringAndSize(buffer.buf, buffer.len); - PyBuffer_Release(&buffer); - return bytes; -} - -static PyObject * -getargs_y_hash(PyObject *self, PyObject *args) -{ - char *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "y#", &str, &size)) - return NULL; - return PyBytes_FromStringAndSize(str, size); -} - -static PyObject * -getargs_u(PyObject *self, PyObject *args) -{ - Py_UNICODE *str; - if (!PyArg_ParseTuple(args, "u", &str)) - return NULL; - return PyUnicode_FromWideChar(str, -1); -} - -static PyObject * -getargs_u_hash(PyObject *self, PyObject *args) -{ - Py_UNICODE *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "u#", &str, &size)) - return NULL; - return PyUnicode_FromWideChar(str, size); -} - -static PyObject * -getargs_Z(PyObject *self, PyObject *args) -{ - Py_UNICODE *str; - if (!PyArg_ParseTuple(args, "Z", &str)) - return NULL; - if (str != NULL) { - return PyUnicode_FromWideChar(str, -1); - } else - Py_RETURN_NONE; -} - -static PyObject * -getargs_Z_hash(PyObject *self, PyObject *args) -{ - Py_UNICODE *str; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Z#", &str, &size)) - return NULL; - if (str != NULL) - return PyUnicode_FromWideChar(str, size); - else - Py_RETURN_NONE; -} - -static PyObject * -getargs_es(PyObject *self, PyObject *args) -{ - PyObject *arg, *result; - const char *encoding = NULL; - char *str; - - if (!PyArg_ParseTuple(args, "O|s", &arg, &encoding)) - return NULL; - if (!PyArg_Parse(arg, "es", encoding, &str)) - return NULL; - result = PyBytes_FromString(str); - PyMem_Free(str); - return result; -} - -static PyObject * -getargs_et(PyObject *self, PyObject *args) -{ - PyObject *arg, *result; - const char *encoding = NULL; - char *str; - - if (!PyArg_ParseTuple(args, "O|s", &arg, &encoding)) - return NULL; - if (!PyArg_Parse(arg, "et", encoding, &str)) - return NULL; - result = PyBytes_FromString(str); - PyMem_Free(str); - return result; -} - -static PyObject * -getargs_es_hash(PyObject *self, PyObject *args) -{ - PyObject *arg, *result; - const char *encoding = NULL; - PyByteArrayObject *buffer = NULL; - char *str = NULL; - Py_ssize_t size; - - if (!PyArg_ParseTuple(args, "O|sY", &arg, &encoding, &buffer)) - return NULL; - if (buffer != NULL) { - str = PyByteArray_AS_STRING(buffer); - size = PyByteArray_GET_SIZE(buffer); - } - if (!PyArg_Parse(arg, "es#", encoding, &str, &size)) - return NULL; - result = PyBytes_FromStringAndSize(str, size); - if (buffer == NULL) - PyMem_Free(str); - return result; -} - -static PyObject * -getargs_et_hash(PyObject *self, PyObject *args) -{ - PyObject *arg, *result; - const char *encoding = NULL; - PyByteArrayObject *buffer = NULL; - char *str = NULL; - Py_ssize_t size; - - if (!PyArg_ParseTuple(args, "O|sY", &arg, &encoding, &buffer)) - return NULL; - if (buffer != NULL) { - str = PyByteArray_AS_STRING(buffer); - size = PyByteArray_GET_SIZE(buffer); - } - if (!PyArg_Parse(arg, "et#", encoding, &str, &size)) - return NULL; - result = PyBytes_FromStringAndSize(str, size); - if (buffer == NULL) - PyMem_Free(str); - return result; -} - -/* Test the s and z codes for PyArg_ParseTuple. -*/ -static PyObject * -test_s_code(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - /* Unicode strings should be accepted */ - PyObject *tuple, *obj; - char *value; - - tuple = PyTuple_New(1); - if (tuple == NULL) - return NULL; - - obj = PyUnicode_Decode("t\xeate", strlen("t\xeate"), - "latin-1", NULL); - if (obj == NULL) - return NULL; - - PyTuple_SET_ITEM(tuple, 0, obj); - - /* These two blocks used to raise a TypeError: - * "argument must be string without null bytes, not str" - */ - if (!PyArg_ParseTuple(tuple, "s:test_s_code1", &value)) { - return NULL; - } - - if (!PyArg_ParseTuple(tuple, "z:test_s_code2", &value)) { - return NULL; - } - - Py_DECREF(tuple); - Py_RETURN_NONE; -} - -static PyObject * -parse_tuple_and_keywords(PyObject *self, PyObject *args) -{ - PyObject *sub_args; - PyObject *sub_kwargs; - const char *sub_format; - PyObject *sub_keywords; - - Py_ssize_t i, size; - char *keywords[8 + 1]; /* space for NULL at end */ - PyObject *o; - PyObject *converted[8]; - - int result; - PyObject *return_value = NULL; - - double buffers[8][4]; /* double ensures alignment where necessary */ - - if (!PyArg_ParseTuple(args, "OOsO:parse_tuple_and_keywords", - &sub_args, &sub_kwargs, - &sub_format, &sub_keywords)) - return NULL; - - if (!(PyList_CheckExact(sub_keywords) || PyTuple_CheckExact(sub_keywords))) { - PyErr_SetString(PyExc_ValueError, - "parse_tuple_and_keywords: sub_keywords must be either list or tuple"); - return NULL; - } - - memset(buffers, 0, sizeof(buffers)); - memset(converted, 0, sizeof(converted)); - memset(keywords, 0, sizeof(keywords)); - - size = PySequence_Fast_GET_SIZE(sub_keywords); - if (size > 8) { - PyErr_SetString(PyExc_ValueError, - "parse_tuple_and_keywords: too many keywords in sub_keywords"); - goto exit; - } - - for (i = 0; i < size; i++) { - o = PySequence_Fast_GET_ITEM(sub_keywords, i); - if (!PyUnicode_FSConverter(o, (void *)(converted + i))) { - PyErr_Format(PyExc_ValueError, - "parse_tuple_and_keywords: could not convert keywords[%zd] to narrow string", i); - goto exit; - } - keywords[i] = PyBytes_AS_STRING(converted[i]); - } - - result = PyArg_ParseTupleAndKeywords(sub_args, sub_kwargs, - sub_format, keywords, - buffers + 0, buffers + 1, buffers + 2, buffers + 3, - buffers + 4, buffers + 5, buffers + 6, buffers + 7); - - if (result) { - return_value = Py_None; - Py_INCREF(Py_None); - } - -exit: - size = sizeof(converted) / sizeof(converted[0]); - for (i = 0; i < size; i++) { - Py_XDECREF(converted[i]); - } - return return_value; -} - -static PyObject * -getargs_w_star(PyObject *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *result; - char *str; - - if (!PyArg_ParseTuple(args, "w*:getargs_w_star", &buffer)) - return NULL; - - if (2 <= buffer.len) { - str = buffer.buf; - str[0] = '['; - str[buffer.len-1] = ']'; - } - - result = PyBytes_FromStringAndSize(buffer.buf, buffer.len); - PyBuffer_Release(&buffer); - return result; -} - - -static PyObject * -test_empty_argparse(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - /* Test that formats can begin with '|'. See issue #4720. */ - PyObject *tuple, *dict = NULL; - static char *kwlist[] = {NULL}; - int result; - tuple = PyTuple_New(0); - if (!tuple) - return NULL; - if (!(result = PyArg_ParseTuple(tuple, "|:test_empty_argparse"))) { - goto done; - } - dict = PyDict_New(); - if (!dict) - goto done; - result = PyArg_ParseTupleAndKeywords(tuple, dict, "|:test_empty_argparse", kwlist); - done: - Py_DECREF(tuple); - Py_XDECREF(dict); - if (!result) { - return NULL; - } - else { - Py_RETURN_NONE; - } -} - -/* Simple test of _PyLong_NumBits and _PyLong_Sign. */ -static PyObject * -test_long_numbits(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - struct triple { - long input; - size_t nbits; - int sign; - } testcases[] = {{0, 0, 0}, - {1L, 1, 1}, - {-1L, 1, -1}, - {2L, 2, 1}, - {-2L, 2, -1}, - {3L, 2, 1}, - {-3L, 2, -1}, - {4L, 3, 1}, - {-4L, 3, -1}, - {0x7fffL, 15, 1}, /* one Python int digit */ - {-0x7fffL, 15, -1}, - {0xffffL, 16, 1}, - {-0xffffL, 16, -1}, - {0xfffffffL, 28, 1}, - {-0xfffffffL, 28, -1}}; - size_t i; - - for (i = 0; i < Py_ARRAY_LENGTH(testcases); ++i) { - size_t nbits; - int sign; - PyObject *plong; - - plong = PyLong_FromLong(testcases[i].input); - if (plong == NULL) - return NULL; - nbits = _PyLong_NumBits(plong); - sign = _PyLong_Sign(plong); - - Py_DECREF(plong); - if (nbits != testcases[i].nbits) - return raiseTestError("test_long_numbits", - "wrong result for _PyLong_NumBits"); - if (sign != testcases[i].sign) - return raiseTestError("test_long_numbits", - "wrong result for _PyLong_Sign"); - } - Py_RETURN_NONE; -} - -static PyObject * -pyobject_repr_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return PyObject_Repr(NULL); -} - -static PyObject * -pyobject_str_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return PyObject_Str(NULL); -} - -static PyObject * -pyobject_bytes_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return PyObject_Bytes(NULL); -} - -static PyObject * -raise_exception(PyObject *self, PyObject *args) -{ - PyObject *exc; - PyObject *exc_args, *v; - int num_args, i; - - if (!PyArg_ParseTuple(args, "Oi:raise_exception", - &exc, &num_args)) - return NULL; - - exc_args = PyTuple_New(num_args); - if (exc_args == NULL) - return NULL; - for (i = 0; i < num_args; ++i) { - v = PyLong_FromLong(i); - if (v == NULL) { - Py_DECREF(exc_args); - return NULL; - } - PyTuple_SET_ITEM(exc_args, i, v); - } - PyErr_SetObject(exc, exc_args); - Py_DECREF(exc_args); - return NULL; -} - -static PyObject * -set_errno(PyObject *self, PyObject *args) -{ - int new_errno; - - if (!PyArg_ParseTuple(args, "i:set_errno", &new_errno)) - return NULL; - - errno = new_errno; - Py_RETURN_NONE; -} - -static PyObject * -test_set_exception(PyObject *self, PyObject *new_exc) -{ - PyObject *exc = PyErr_GetHandledException(); - assert(PyExceptionInstance_Check(exc) || exc == NULL); - - PyErr_SetHandledException(new_exc); - return exc; -} - -static PyObject * -test_set_exc_info(PyObject *self, PyObject *args) -{ - PyObject *orig_exc; - PyObject *new_type, *new_value, *new_tb; - PyObject *type, *value, *tb; - if (!PyArg_ParseTuple(args, "OOO:test_set_exc_info", - &new_type, &new_value, &new_tb)) - return NULL; - - PyErr_GetExcInfo(&type, &value, &tb); - - Py_INCREF(new_type); - Py_INCREF(new_value); - Py_INCREF(new_tb); - PyErr_SetExcInfo(new_type, new_value, new_tb); - - orig_exc = PyTuple_Pack(3, type ? type : Py_None, value ? value : Py_None, tb ? tb : Py_None); - Py_XDECREF(type); - Py_XDECREF(value); - Py_XDECREF(tb); - return orig_exc; -} - -static int test_run_counter = 0; - -static PyObject * -test_datetime_capi(PyObject *self, PyObject *args) { - if (PyDateTimeAPI) { - if (test_run_counter) { - /* Probably regrtest.py -R */ - Py_RETURN_NONE; - } - else { - PyErr_SetString(PyExc_AssertionError, - "PyDateTime_CAPI somehow initialized"); - return NULL; - } - } - test_run_counter++; - PyDateTime_IMPORT; - - if (PyDateTimeAPI) - Py_RETURN_NONE; - else - return NULL; -} - -/* Functions exposing the C API type checking for testing */ -#define MAKE_DATETIME_CHECK_FUNC(check_method, exact_method) \ - PyObject *obj; \ - int exact = 0; \ - if (!PyArg_ParseTuple(args, "O|p", &obj, &exact)) { \ - return NULL; \ - } \ - int rv = exact?exact_method(obj):check_method(obj); \ - if (rv) { \ - Py_RETURN_TRUE; \ - } else { \ - Py_RETURN_FALSE; \ - } - -static PyObject * -datetime_check_date(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyDate_Check, PyDate_CheckExact) -} - -static PyObject * -datetime_check_time(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyTime_Check, PyTime_CheckExact) -} - -static PyObject * -datetime_check_datetime(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyDateTime_Check, PyDateTime_CheckExact) -} - -static PyObject * -datetime_check_delta(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyDelta_Check, PyDelta_CheckExact) -} - -static PyObject * -datetime_check_tzinfo(PyObject *self, PyObject *args) { - MAKE_DATETIME_CHECK_FUNC(PyTZInfo_Check, PyTZInfo_CheckExact) -} - - -/* Makes three variations on timezone representing UTC-5: - 1. timezone with offset and name from PyDateTimeAPI - 2. timezone with offset and name from PyTimeZone_FromOffsetAndName - 3. timezone with offset (no name) from PyTimeZone_FromOffset -*/ -static PyObject * -make_timezones_capi(PyObject *self, PyObject *args) { - PyObject *offset = PyDelta_FromDSU(0, -18000, 0); - PyObject *name = PyUnicode_FromString("EST"); - - PyObject *est_zone_capi = PyDateTimeAPI->TimeZone_FromTimeZone(offset, name); - PyObject *est_zone_macro = PyTimeZone_FromOffsetAndName(offset, name); - PyObject *est_zone_macro_noname = PyTimeZone_FromOffset(offset); - - Py_DecRef(offset); - Py_DecRef(name); - - PyObject *rv = PyTuple_New(3); - - PyTuple_SET_ITEM(rv, 0, est_zone_capi); - PyTuple_SET_ITEM(rv, 1, est_zone_macro); - PyTuple_SET_ITEM(rv, 2, est_zone_macro_noname); - - return rv; -} - -static PyObject * -get_timezones_offset_zero(PyObject *self, PyObject *args) { - PyObject *offset = PyDelta_FromDSU(0, 0, 0); - PyObject *name = PyUnicode_FromString(""); - - // These two should return the UTC singleton - PyObject *utc_singleton_0 = PyTimeZone_FromOffset(offset); - PyObject *utc_singleton_1 = PyTimeZone_FromOffsetAndName(offset, NULL); - - // This one will return +00:00 zone, but not the UTC singleton - PyObject *non_utc_zone = PyTimeZone_FromOffsetAndName(offset, name); - - Py_DecRef(offset); - Py_DecRef(name); - - PyObject *rv = PyTuple_New(3); - PyTuple_SET_ITEM(rv, 0, utc_singleton_0); - PyTuple_SET_ITEM(rv, 1, utc_singleton_1); - PyTuple_SET_ITEM(rv, 2, non_utc_zone); - - return rv; -} - -static PyObject * -get_timezone_utc_capi(PyObject* self, PyObject *args) { - int macro = 0; - if (!PyArg_ParseTuple(args, "|p", ¯o)) { - return NULL; - } - if (macro) { - Py_INCREF(PyDateTime_TimeZone_UTC); - return PyDateTime_TimeZone_UTC; - } else { - Py_INCREF(PyDateTimeAPI->TimeZone_UTC); - return PyDateTimeAPI->TimeZone_UTC; - } -} - -static PyObject * -get_date_fromdate(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int year, month, day; - - if (!PyArg_ParseTuple(args, "piii", ¯o, &year, &month, &day)) { - return NULL; - } - - if (macro) { - rv = PyDate_FromDate(year, month, day); - } - else { - rv = PyDateTimeAPI->Date_FromDate( - year, month, day, - PyDateTimeAPI->DateType); - } - return rv; -} - -static PyObject * -get_datetime_fromdateandtime(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int year, month, day; - int hour, minute, second, microsecond; - - if (!PyArg_ParseTuple(args, "piiiiiii", - ¯o, - &year, &month, &day, - &hour, &minute, &second, µsecond)) { - return NULL; - } - - if (macro) { - rv = PyDateTime_FromDateAndTime( - year, month, day, - hour, minute, second, microsecond); - } - else { - rv = PyDateTimeAPI->DateTime_FromDateAndTime( - year, month, day, - hour, minute, second, microsecond, - Py_None, - PyDateTimeAPI->DateTimeType); - } - return rv; -} - -static PyObject * -get_datetime_fromdateandtimeandfold(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int year, month, day; - int hour, minute, second, microsecond, fold; - - if (!PyArg_ParseTuple(args, "piiiiiiii", - ¯o, - &year, &month, &day, - &hour, &minute, &second, µsecond, - &fold)) { - return NULL; - } - - if (macro) { - rv = PyDateTime_FromDateAndTimeAndFold( - year, month, day, - hour, minute, second, microsecond, - fold); - } - else { - rv = PyDateTimeAPI->DateTime_FromDateAndTimeAndFold( - year, month, day, - hour, minute, second, microsecond, - Py_None, - fold, - PyDateTimeAPI->DateTimeType); - } - return rv; -} - -static PyObject * -get_time_fromtime(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int hour, minute, second, microsecond; - - if (!PyArg_ParseTuple(args, "piiii", - ¯o, - &hour, &minute, &second, µsecond)) { - return NULL; - } - - if (macro) { - rv = PyTime_FromTime(hour, minute, second, microsecond); - } - else { - rv = PyDateTimeAPI->Time_FromTime( - hour, minute, second, microsecond, - Py_None, - PyDateTimeAPI->TimeType); - } - return rv; -} - -static PyObject * -get_time_fromtimeandfold(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int hour, minute, second, microsecond, fold; - - if (!PyArg_ParseTuple(args, "piiiii", - ¯o, - &hour, &minute, &second, µsecond, - &fold)) { - return NULL; - } - - if (macro) { - rv = PyTime_FromTimeAndFold(hour, minute, second, microsecond, fold); - } - else { - rv = PyDateTimeAPI->Time_FromTimeAndFold( - hour, minute, second, microsecond, - Py_None, - fold, - PyDateTimeAPI->TimeType); - } - return rv; -} - -static PyObject * -get_delta_fromdsu(PyObject *self, PyObject *args) -{ - PyObject *rv = NULL; - int macro; - int days, seconds, microseconds; - - if (!PyArg_ParseTuple(args, "piii", - ¯o, - &days, &seconds, µseconds)) { - return NULL; - } - - if (macro) { - rv = PyDelta_FromDSU(days, seconds, microseconds); - } - else { - rv = PyDateTimeAPI->Delta_FromDelta( - days, seconds, microseconds, 1, - PyDateTimeAPI->DeltaType); - } - - return rv; -} - -static PyObject * -get_date_fromtimestamp(PyObject* self, PyObject *args) -{ - PyObject *tsargs = NULL, *ts = NULL, *rv = NULL; - int macro = 0; - - if (!PyArg_ParseTuple(args, "O|p", &ts, ¯o)) { - return NULL; - } - - // Construct the argument tuple - if ((tsargs = PyTuple_Pack(1, ts)) == NULL) { - return NULL; - } - - // Pass along to the API function - if (macro) { - rv = PyDate_FromTimestamp(tsargs); - } - else { - rv = PyDateTimeAPI->Date_FromTimestamp( - (PyObject *)PyDateTimeAPI->DateType, tsargs - ); - } - - Py_DECREF(tsargs); - return rv; -} - -static PyObject * -get_datetime_fromtimestamp(PyObject* self, PyObject *args) -{ - int macro = 0; - int usetz = 0; - PyObject *tsargs = NULL, *ts = NULL, *tzinfo = Py_None, *rv = NULL; - if (!PyArg_ParseTuple(args, "OO|pp", &ts, &tzinfo, &usetz, ¯o)) { - return NULL; - } - - // Construct the argument tuple - if (usetz) { - tsargs = PyTuple_Pack(2, ts, tzinfo); - } - else { - tsargs = PyTuple_Pack(1, ts); - } - - if (tsargs == NULL) { - return NULL; - } - - // Pass along to the API function - if (macro) { - rv = PyDateTime_FromTimestamp(tsargs); - } - else { - rv = PyDateTimeAPI->DateTime_FromTimestamp( - (PyObject *)PyDateTimeAPI->DateTimeType, tsargs, NULL - ); - } - - Py_DECREF(tsargs); - return rv; -} - -static PyObject * -test_PyDateTime_GET(PyObject *self, PyObject *obj) -{ - int year, month, day; - - year = PyDateTime_GET_YEAR(obj); - month = PyDateTime_GET_MONTH(obj); - day = PyDateTime_GET_DAY(obj); - - return Py_BuildValue("(iii)", year, month, day); -} - -static PyObject * -test_PyDateTime_DATE_GET(PyObject *self, PyObject *obj) -{ - int hour, minute, second, microsecond; - - hour = PyDateTime_DATE_GET_HOUR(obj); - minute = PyDateTime_DATE_GET_MINUTE(obj); - second = PyDateTime_DATE_GET_SECOND(obj); - microsecond = PyDateTime_DATE_GET_MICROSECOND(obj); - PyObject *tzinfo = PyDateTime_DATE_GET_TZINFO(obj); - - return Py_BuildValue("(iiiiO)", hour, minute, second, microsecond, tzinfo); -} - -static PyObject * -test_PyDateTime_TIME_GET(PyObject *self, PyObject *obj) -{ - int hour, minute, second, microsecond; - - hour = PyDateTime_TIME_GET_HOUR(obj); - minute = PyDateTime_TIME_GET_MINUTE(obj); - second = PyDateTime_TIME_GET_SECOND(obj); - microsecond = PyDateTime_TIME_GET_MICROSECOND(obj); - PyObject *tzinfo = PyDateTime_TIME_GET_TZINFO(obj); - - return Py_BuildValue("(iiiiO)", hour, minute, second, microsecond, tzinfo); -} - -static PyObject * -test_PyDateTime_DELTA_GET(PyObject *self, PyObject *obj) -{ - int days, seconds, microseconds; - - days = PyDateTime_DELTA_GET_DAYS(obj); - seconds = PyDateTime_DELTA_GET_SECONDS(obj); - microseconds = PyDateTime_DELTA_GET_MICROSECONDS(obj); - - return Py_BuildValue("(iii)", days, seconds, microseconds); -} - -/* test_thread_state spawns a thread of its own, and that thread releases - * `thread_done` when it's finished. The driver code has to know when the - * thread finishes, because the thread uses a PyObject (the callable) that - * may go away when the driver finishes. The former lack of this explicit - * synchronization caused rare segfaults, so rare that they were seen only - * on a Mac buildbot (although they were possible on any box). - */ -static PyThread_type_lock thread_done = NULL; - -static int -_make_call(void *callable) -{ - PyObject *rc; - int success; - PyGILState_STATE s = PyGILState_Ensure(); - rc = PyObject_CallNoArgs((PyObject *)callable); - success = (rc != NULL); - Py_XDECREF(rc); - PyGILState_Release(s); - return success; -} - -/* Same thing, but releases `thread_done` when it returns. This variant - * should be called only from threads spawned by test_thread_state(). - */ -static void -_make_call_from_thread(void *callable) -{ - _make_call(callable); - PyThread_release_lock(thread_done); -} - -static PyObject * -test_thread_state(PyObject *self, PyObject *args) -{ - PyObject *fn; - int success = 1; - - if (!PyArg_ParseTuple(args, "O:test_thread_state", &fn)) - return NULL; - - if (!PyCallable_Check(fn)) { - PyErr_Format(PyExc_TypeError, "'%s' object is not callable", - Py_TYPE(fn)->tp_name); - return NULL; - } - - thread_done = PyThread_allocate_lock(); - if (thread_done == NULL) - return PyErr_NoMemory(); - PyThread_acquire_lock(thread_done, 1); - - /* Start a new thread with our callback. */ - PyThread_start_new_thread(_make_call_from_thread, fn); - /* Make the callback with the thread lock held by this thread */ - success &= _make_call(fn); - /* Do it all again, but this time with the thread-lock released */ - Py_BEGIN_ALLOW_THREADS - success &= _make_call(fn); - PyThread_acquire_lock(thread_done, 1); /* wait for thread to finish */ - Py_END_ALLOW_THREADS + /* Start a new thread with our callback. */ + PyThread_start_new_thread(_make_call_from_thread, fn); + /* Make the callback with the thread lock held by this thread */ + success &= _make_call(fn); + /* Do it all again, but this time with the thread-lock released */ + Py_BEGIN_ALLOW_THREADS + success &= _make_call(fn); + PyThread_acquire_lock(thread_done, 1); /* wait for thread to finish */ + Py_END_ALLOW_THREADS /* And once more with and without a thread XXX - should use a lock and work out exactly what we are trying @@ -2598,13 +906,6 @@ pending_threadfunc(PyObject *self, PyObject *arg) Py_RETURN_TRUE; } -/* This is here to provide a docstring for test_descr. */ -static PyObject * -test_with_docstring(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - Py_RETURN_NONE; -} - /* Test PyOS_string_to_double. */ static PyObject * test_string_to_double(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -2985,8 +1286,7 @@ failing_converter(PyObject *obj, void *arg) { /* Clone str1, then let the conversion fail. */ assert(str1); - str2 = str1; - Py_INCREF(str2); + str2 = Py_NewRef(str1); return 0; } static PyObject* @@ -3225,70 +1525,79 @@ run_in_subinterp(PyObject *self, PyObject *args) return PyLong_FromLong(r); } -static int -check_time_rounding(int round) -{ - if (round != _PyTime_ROUND_FLOOR - && round != _PyTime_ROUND_CEILING - && round != _PyTime_ROUND_HALF_EVEN - && round != _PyTime_ROUND_UP) { - PyErr_SetString(PyExc_ValueError, "invalid rounding"); - return -1; - } - return 0; -} - +/* To run some code in a sub-interpreter. */ static PyObject * -test_pytime_object_to_time_t(PyObject *self, PyObject *args) +run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) { - PyObject *obj; - time_t sec; - int round; - if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_time_t", &obj, &round)) - return NULL; - if (check_time_rounding(round) < 0) - return NULL; - if (_PyTime_ObjectToTime_t(obj, &sec, round) == -1) - return NULL; - return _PyLong_FromTime_t(sec); -} + const char *code; + int allow_fork = -1; + int allow_exec = -1; + int allow_threads = -1; + int allow_daemon_threads = -1; + int r; + PyThreadState *substate, *mainstate; + /* only initialise 'cflags.cf_flags' to test backwards compatibility */ + PyCompilerFlags cflags = {0}; -static PyObject * -test_pytime_object_to_timeval(PyObject *self, PyObject *args) -{ - PyObject *obj; - time_t sec; - long usec; - int round; - if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_timeval", &obj, &round)) + static char *kwlist[] = {"code", + "allow_fork", + "allow_exec", + "allow_threads", + "allow_daemon_threads", + NULL}; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, + "s$pppp:run_in_subinterp_with_config", kwlist, + &code, &allow_fork, &allow_exec, + &allow_threads, &allow_daemon_threads)) { return NULL; - if (check_time_rounding(round) < 0) + } + if (allow_fork < 0) { + PyErr_SetString(PyExc_ValueError, "missing allow_fork"); return NULL; - if (_PyTime_ObjectToTimeval(obj, &sec, &usec, round) == -1) + } + if (allow_exec < 0) { + PyErr_SetString(PyExc_ValueError, "missing allow_exec"); return NULL; - return Py_BuildValue("Nl", _PyLong_FromTime_t(sec), usec); -} - -static PyObject * -test_pytime_object_to_timespec(PyObject *self, PyObject *args) -{ - PyObject *obj; - time_t sec; - long nsec; - int round; - if (!PyArg_ParseTuple(args, "Oi:pytime_object_to_timespec", &obj, &round)) + } + if (allow_threads < 0) { + PyErr_SetString(PyExc_ValueError, "missing allow_threads"); return NULL; - if (check_time_rounding(round) < 0) + } + if (allow_daemon_threads < 0) { + PyErr_SetString(PyExc_ValueError, "missing allow_daemon_threads"); return NULL; - if (_PyTime_ObjectToTimespec(obj, &sec, &nsec, round) == -1) + } + + mainstate = PyThreadState_Get(); + + PyThreadState_Swap(NULL); + + const _PyInterpreterConfig config = { + .allow_fork = allow_fork, + .allow_exec = allow_exec, + .allow_threads = allow_threads, + .allow_daemon_threads = allow_daemon_threads, + }; + substate = _Py_NewInterpreterFromConfig(&config); + if (substate == NULL) { + /* Since no new thread state was created, there is no exception to + propagate; raise a fresh one after swapping in the old thread + state. */ + PyThreadState_Swap(mainstate); + PyErr_SetString(PyExc_RuntimeError, "sub-interpreter creation failed"); return NULL; - return Py_BuildValue("Nl", _PyLong_FromTime_t(sec), nsec); + } + r = PyRun_SimpleStringFlags(code, &cflags); + Py_EndInterpreter(substate); + + PyThreadState_Swap(mainstate); + + return PyLong_FromLong(r); } static void slot_tp_del(PyObject *self) { - _Py_IDENTIFIER(__tp_del__); PyObject *del, *res; PyObject *error_type, *error_value, *error_traceback; @@ -3299,15 +1608,21 @@ slot_tp_del(PyObject *self) /* Save the current exception, if any. */ PyErr_Fetch(&error_type, &error_value, &error_traceback); + PyObject *tp_del = PyUnicode_InternFromString("__tp_del__"); + if (tp_del == NULL) { + PyErr_WriteUnraisable(NULL); + PyErr_Restore(error_type, error_value, error_traceback); + return; + } /* Execute __del__ method, if any. */ - del = _PyObject_LookupSpecialId(self, &PyId___tp_del__); + del = _PyType_Lookup(Py_TYPE(self), tp_del); + Py_DECREF(tp_del); if (del != NULL) { - res = PyObject_CallNoArgs(del); + res = PyObject_CallOneArg(del, self); if (res == NULL) PyErr_WriteUnraisable(del); else Py_DECREF(res); - Py_DECREF(del); } /* Restore the saved exception. */ @@ -3354,8 +1669,7 @@ with_tp_del(PyObject *self, PyObject *args) return NULL; } tp->tp_del = slot_tp_del; - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } static PyObject * @@ -3373,8 +1687,7 @@ without_gc(PyObject *Py_UNUSED(self), PyObject *obj) tp->tp_clear = NULL; } assert(!PyType_IS_GC(tp)); - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } static PyMethodDef ml; @@ -3395,8 +1708,7 @@ static PyMethodDef ml = { static PyObject * _test_incref(PyObject *ob) { - Py_INCREF(ob); - return ob; + return Py_NewRef(ob); } static PyObject * @@ -3439,522 +1751,56 @@ static PyObject * test_structseq_newtype_doesnt_leak(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(args)) { - PyStructSequence_Desc descr; - PyStructSequence_Field descr_fields[3]; - - descr_fields[0] = (PyStructSequence_Field){"foo", "foo value"}; - descr_fields[1] = (PyStructSequence_Field){NULL, "some hidden value"}; - descr_fields[2] = (PyStructSequence_Field){0, NULL}; - - descr.name = "_testcapi.test_descr"; - descr.doc = "This is used to test for memory leaks in NewType"; - descr.fields = descr_fields; - descr.n_in_sequence = 1; - - PyTypeObject* structseq_type = PyStructSequence_NewType(&descr); - assert(structseq_type != NULL); - assert(PyType_Check(structseq_type)); - assert(PyType_FastSubclass(structseq_type, Py_TPFLAGS_TUPLE_SUBCLASS)); - Py_DECREF(structseq_type); - - Py_RETURN_NONE; -} - -static PyObject * -test_structseq_newtype_null_descr_doc(PyObject *Py_UNUSED(self), - PyObject *Py_UNUSED(args)) -{ - PyStructSequence_Field descr_fields[1] = { - (PyStructSequence_Field){NULL, NULL} - }; - // Test specifically for NULL .doc field. - PyStructSequence_Desc descr = {"_testcapi.test_descr", NULL, &descr_fields[0], 0}; - - PyTypeObject* structseq_type = PyStructSequence_NewType(&descr); - assert(structseq_type != NULL); - assert(PyType_Check(structseq_type)); - assert(PyType_FastSubclass(structseq_type, Py_TPFLAGS_TUPLE_SUBCLASS)); - Py_DECREF(structseq_type); - - Py_RETURN_NONE; -} - -static PyObject * -test_incref_decref_API(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyLong_FromLong(0); - Py_IncRef(obj); - Py_DecRef(obj); - Py_DecRef(obj); - Py_RETURN_NONE; -} - -static PyObject * -test_pymem_alloc0(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - void *ptr; - - ptr = PyMem_RawMalloc(0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyMem_RawMalloc(0) returns NULL"); - return NULL; - } - PyMem_RawFree(ptr); - - ptr = PyMem_RawCalloc(0, 0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyMem_RawCalloc(0, 0) returns NULL"); - return NULL; - } - PyMem_RawFree(ptr); - - ptr = PyMem_Malloc(0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyMem_Malloc(0) returns NULL"); - return NULL; - } - PyMem_Free(ptr); - - ptr = PyMem_Calloc(0, 0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyMem_Calloc(0, 0) returns NULL"); - return NULL; - } - PyMem_Free(ptr); - - ptr = PyObject_Malloc(0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyObject_Malloc(0) returns NULL"); - return NULL; - } - PyObject_Free(ptr); - - ptr = PyObject_Calloc(0, 0); - if (ptr == NULL) { - PyErr_SetString(PyExc_RuntimeError, "PyObject_Calloc(0, 0) returns NULL"); - return NULL; - } - PyObject_Free(ptr); - - Py_RETURN_NONE; -} - -static PyObject * -test_pyobject_new(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj; - PyTypeObject *type = &PyBaseObject_Type; - PyTypeObject *var_type = &PyLong_Type; - - // PyObject_New() - obj = PyObject_New(PyObject, type); - if (obj == NULL) { - goto alloc_failed; - } - Py_DECREF(obj); - - // PyObject_NEW() - obj = PyObject_NEW(PyObject, type); - if (obj == NULL) { - goto alloc_failed; - } - Py_DECREF(obj); - - // PyObject_NewVar() - obj = PyObject_NewVar(PyObject, var_type, 3); - if (obj == NULL) { - goto alloc_failed; - } - Py_DECREF(obj); - - // PyObject_NEW_VAR() - obj = PyObject_NEW_VAR(PyObject, var_type, 3); - if (obj == NULL) { - goto alloc_failed; - } - Py_DECREF(obj); - - Py_RETURN_NONE; - -alloc_failed: - PyErr_NoMemory(); - return NULL; -} - -typedef struct { - PyMemAllocatorEx alloc; - - size_t malloc_size; - size_t calloc_nelem; - size_t calloc_elsize; - void *realloc_ptr; - size_t realloc_new_size; - void *free_ptr; - void *ctx; -} alloc_hook_t; - -static void* hook_malloc(void* ctx, size_t size) -{ - alloc_hook_t *hook = (alloc_hook_t *)ctx; - hook->ctx = ctx; - hook->malloc_size = size; - return hook->alloc.malloc(hook->alloc.ctx, size); -} - -static void* hook_calloc(void* ctx, size_t nelem, size_t elsize) -{ - alloc_hook_t *hook = (alloc_hook_t *)ctx; - hook->ctx = ctx; - hook->calloc_nelem = nelem; - hook->calloc_elsize = elsize; - return hook->alloc.calloc(hook->alloc.ctx, nelem, elsize); -} - -static void* hook_realloc(void* ctx, void* ptr, size_t new_size) -{ - alloc_hook_t *hook = (alloc_hook_t *)ctx; - hook->ctx = ctx; - hook->realloc_ptr = ptr; - hook->realloc_new_size = new_size; - return hook->alloc.realloc(hook->alloc.ctx, ptr, new_size); -} - -static void hook_free(void *ctx, void *ptr) -{ - alloc_hook_t *hook = (alloc_hook_t *)ctx; - hook->ctx = ctx; - hook->free_ptr = ptr; - hook->alloc.free(hook->alloc.ctx, ptr); -} - -static PyObject * -test_setallocators(PyMemAllocatorDomain domain) -{ - PyObject *res = NULL; - const char *error_msg; - alloc_hook_t hook; - PyMemAllocatorEx alloc; - size_t size, size2, nelem, elsize; - void *ptr, *ptr2; - - memset(&hook, 0, sizeof(hook)); - - alloc.ctx = &hook; - alloc.malloc = &hook_malloc; - alloc.calloc = &hook_calloc; - alloc.realloc = &hook_realloc; - alloc.free = &hook_free; - PyMem_GetAllocator(domain, &hook.alloc); - PyMem_SetAllocator(domain, &alloc); - - /* malloc, realloc, free */ - size = 42; - hook.ctx = NULL; - switch(domain) - { - case PYMEM_DOMAIN_RAW: ptr = PyMem_RawMalloc(size); break; - case PYMEM_DOMAIN_MEM: ptr = PyMem_Malloc(size); break; - case PYMEM_DOMAIN_OBJ: ptr = PyObject_Malloc(size); break; - default: ptr = NULL; break; - } - -#define CHECK_CTX(FUNC) \ - if (hook.ctx != &hook) { \ - error_msg = FUNC " wrong context"; \ - goto fail; \ - } \ - hook.ctx = NULL; /* reset for next check */ - - if (ptr == NULL) { - error_msg = "malloc failed"; - goto fail; - } - CHECK_CTX("malloc"); - if (hook.malloc_size != size) { - error_msg = "malloc invalid size"; - goto fail; - } - - size2 = 200; - switch(domain) - { - case PYMEM_DOMAIN_RAW: ptr2 = PyMem_RawRealloc(ptr, size2); break; - case PYMEM_DOMAIN_MEM: ptr2 = PyMem_Realloc(ptr, size2); break; - case PYMEM_DOMAIN_OBJ: ptr2 = PyObject_Realloc(ptr, size2); break; - default: ptr2 = NULL; break; - } - - if (ptr2 == NULL) { - error_msg = "realloc failed"; - goto fail; - } - CHECK_CTX("realloc"); - if (hook.realloc_ptr != ptr - || hook.realloc_new_size != size2) { - error_msg = "realloc invalid parameters"; - goto fail; - } - - switch(domain) - { - case PYMEM_DOMAIN_RAW: PyMem_RawFree(ptr2); break; - case PYMEM_DOMAIN_MEM: PyMem_Free(ptr2); break; - case PYMEM_DOMAIN_OBJ: PyObject_Free(ptr2); break; - } - - CHECK_CTX("free"); - if (hook.free_ptr != ptr2) { - error_msg = "free invalid pointer"; - goto fail; - } - - /* calloc, free */ - nelem = 2; - elsize = 5; - switch(domain) - { - case PYMEM_DOMAIN_RAW: ptr = PyMem_RawCalloc(nelem, elsize); break; - case PYMEM_DOMAIN_MEM: ptr = PyMem_Calloc(nelem, elsize); break; - case PYMEM_DOMAIN_OBJ: ptr = PyObject_Calloc(nelem, elsize); break; - default: ptr = NULL; break; - } - - if (ptr == NULL) { - error_msg = "calloc failed"; - goto fail; - } - CHECK_CTX("calloc"); - if (hook.calloc_nelem != nelem || hook.calloc_elsize != elsize) { - error_msg = "calloc invalid nelem or elsize"; - goto fail; - } - - hook.free_ptr = NULL; - switch(domain) - { - case PYMEM_DOMAIN_RAW: PyMem_RawFree(ptr); break; - case PYMEM_DOMAIN_MEM: PyMem_Free(ptr); break; - case PYMEM_DOMAIN_OBJ: PyObject_Free(ptr); break; - } - - CHECK_CTX("calloc free"); - if (hook.free_ptr != ptr) { - error_msg = "calloc free invalid pointer"; - goto fail; - } - - Py_INCREF(Py_None); - res = Py_None; - goto finally; - -fail: - PyErr_SetString(PyExc_RuntimeError, error_msg); - -finally: - PyMem_SetAllocator(domain, &hook.alloc); - return res; - -#undef CHECK_CTX -} - -static PyObject * -test_pymem_setrawallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return test_setallocators(PYMEM_DOMAIN_RAW); -} - -static PyObject * -test_pymem_setallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return test_setallocators(PYMEM_DOMAIN_MEM); -} - -static PyObject * -test_pyobject_setallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return test_setallocators(PYMEM_DOMAIN_OBJ); -} - -/* Most part of the following code is inherited from the pyfailmalloc project - * written by Victor Stinner. */ -static struct { - int installed; - PyMemAllocatorEx raw; - PyMemAllocatorEx mem; - PyMemAllocatorEx obj; -} FmHook; - -static struct { - int start; - int stop; - Py_ssize_t count; -} FmData; - -static int -fm_nomemory(void) -{ - FmData.count++; - if (FmData.count > FmData.start && - (FmData.stop <= 0 || FmData.count <= FmData.stop)) { - return 1; - } - return 0; -} - -static void * -hook_fmalloc(void *ctx, size_t size) -{ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (fm_nomemory()) { - return NULL; - } - return alloc->malloc(alloc->ctx, size); -} - -static void * -hook_fcalloc(void *ctx, size_t nelem, size_t elsize) -{ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (fm_nomemory()) { - return NULL; - } - return alloc->calloc(alloc->ctx, nelem, elsize); -} - -static void * -hook_frealloc(void *ctx, void *ptr, size_t new_size) -{ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (fm_nomemory()) { - return NULL; - } - return alloc->realloc(alloc->ctx, ptr, new_size); -} - -static void -hook_ffree(void *ctx, void *ptr) -{ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - alloc->free(alloc->ctx, ptr); -} - -static void -fm_setup_hooks(void) -{ - PyMemAllocatorEx alloc; - - if (FmHook.installed) { - return; - } - FmHook.installed = 1; + PyStructSequence_Desc descr; + PyStructSequence_Field descr_fields[3]; - alloc.malloc = hook_fmalloc; - alloc.calloc = hook_fcalloc; - alloc.realloc = hook_frealloc; - alloc.free = hook_ffree; - PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &FmHook.raw); - PyMem_GetAllocator(PYMEM_DOMAIN_MEM, &FmHook.mem); - PyMem_GetAllocator(PYMEM_DOMAIN_OBJ, &FmHook.obj); + descr_fields[0] = (PyStructSequence_Field){"foo", "foo value"}; + descr_fields[1] = (PyStructSequence_Field){NULL, "some hidden value"}; + descr_fields[2] = (PyStructSequence_Field){0, NULL}; - alloc.ctx = &FmHook.raw; - PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &alloc); + descr.name = "_testcapi.test_descr"; + descr.doc = "This is used to test for memory leaks in NewType"; + descr.fields = descr_fields; + descr.n_in_sequence = 1; - alloc.ctx = &FmHook.mem; - PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &alloc); + PyTypeObject* structseq_type = PyStructSequence_NewType(&descr); + assert(structseq_type != NULL); + assert(PyType_Check(structseq_type)); + assert(PyType_FastSubclass(structseq_type, Py_TPFLAGS_TUPLE_SUBCLASS)); + Py_DECREF(structseq_type); - alloc.ctx = &FmHook.obj; - PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); + Py_RETURN_NONE; } -static void -fm_remove_hooks(void) +static PyObject * +test_structseq_newtype_null_descr_doc(PyObject *Py_UNUSED(self), + PyObject *Py_UNUSED(args)) { - if (FmHook.installed) { - FmHook.installed = 0; - PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &FmHook.raw); - PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &FmHook.mem); - PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &FmHook.obj); - } -} + PyStructSequence_Field descr_fields[1] = { + (PyStructSequence_Field){NULL, NULL} + }; + // Test specifically for NULL .doc field. + PyStructSequence_Desc descr = {"_testcapi.test_descr", NULL, &descr_fields[0], 0}; + + PyTypeObject* structseq_type = PyStructSequence_NewType(&descr); + assert(structseq_type != NULL); + assert(PyType_Check(structseq_type)); + assert(PyType_FastSubclass(structseq_type, Py_TPFLAGS_TUPLE_SUBCLASS)); + Py_DECREF(structseq_type); -static PyObject* -set_nomemory(PyObject *self, PyObject *args) -{ - /* Memory allocation fails after 'start' allocation requests, and until - * 'stop' allocation requests except when 'stop' is negative or equal - * to 0 (default) in which case allocation failures never stop. */ - FmData.count = 0; - FmData.stop = 0; - if (!PyArg_ParseTuple(args, "i|i", &FmData.start, &FmData.stop)) { - return NULL; - } - fm_setup_hooks(); Py_RETURN_NONE; } -static PyObject* -remove_mem_hooks(PyObject *self, PyObject *Py_UNUSED(ignored)) +static PyObject * +test_incref_decref_API(PyObject *ob, PyObject *Py_UNUSED(ignored)) { - fm_remove_hooks(); + PyObject *obj = PyLong_FromLong(0); + Py_IncRef(obj); + Py_DecRef(obj); + Py_DecRef(obj); Py_RETURN_NONE; } -PyDoc_STRVAR(docstring_empty, -"" -); - -PyDoc_STRVAR(docstring_no_signature, -"This docstring has no signature." -); - -PyDoc_STRVAR(docstring_with_invalid_signature, -"docstring_with_invalid_signature($module, /, boo)\n" -"\n" -"This docstring has an invalid signature." -); - -PyDoc_STRVAR(docstring_with_invalid_signature2, -"docstring_with_invalid_signature2($module, /, boo)\n" -"\n" -"--\n" -"\n" -"This docstring also has an invalid signature." -); - -PyDoc_STRVAR(docstring_with_signature, -"docstring_with_signature($module, /, sig)\n" -"--\n" -"\n" -"This docstring has a valid signature." -); - -PyDoc_STRVAR(docstring_with_signature_but_no_doc, -"docstring_with_signature_but_no_doc($module, /, sig)\n" -"--\n" -"\n" -); - -PyDoc_STRVAR(docstring_with_signature_and_extra_newlines, -"docstring_with_signature_and_extra_newlines($module, /, parameter)\n" -"--\n" -"\n" -"\n" -"This docstring has a valid signature and some extra newlines." -); - -PyDoc_STRVAR(docstring_with_signature_with_defaults, -"docstring_with_signature_with_defaults(module, s='avocado',\n" -" b=b'bytes', d=3.14, i=35, n=None, t=True, f=False,\n" -" local=the_number_three, sys=sys.maxsize,\n" -" exp=sys.maxsize - 1)\n" -"--\n" -"\n" -"\n" -"\n" -"This docstring has a valid signature with parameters,\n" -"and the parameters take defaults of varying types." -); - typedef struct { PyThread_type_lock start_event; PyThread_type_lock exit_event; @@ -4004,8 +1850,7 @@ call_in_temporary_c_thread(PyObject *self, PyObject *callback) goto exit; } - Py_INCREF(callback); - test_c_thread.callback = callback; + test_c_thread.callback = Py_NewRef(callback); PyThread_acquire_lock(test_c_thread.start_event, 1); PyThread_acquire_lock(test_c_thread.exit_event, 1); @@ -4026,8 +1871,7 @@ call_in_temporary_c_thread(PyObject *self, PyObject *callback) PyThread_release_lock(test_c_thread.exit_event); Py_END_ALLOW_THREADS - Py_INCREF(Py_None); - res = Py_None; + res = Py_NewRef(Py_None); exit: Py_CLEAR(test_c_thread.callback); @@ -4221,322 +2065,6 @@ getitem_with_error(PyObject *self, PyObject *args) return PyObject_GetItem(map, key); } -static PyObject * -test_pytime_fromseconds(PyObject *self, PyObject *args) -{ - int seconds; - if (!PyArg_ParseTuple(args, "i", &seconds)) { - return NULL; - } - _PyTime_t ts = _PyTime_FromSeconds(seconds); - return _PyTime_AsNanosecondsObject(ts); -} - -static PyObject * -test_pytime_fromsecondsobject(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t ts; - if (_PyTime_FromSecondsObject(&ts, obj, round) == -1) { - return NULL; - } - return _PyTime_AsNanosecondsObject(ts); -} - -static PyObject * -test_pytime_assecondsdouble(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "O", &obj)) { - return NULL; - } - _PyTime_t ts; - if (_PyTime_FromNanosecondsObject(&ts, obj) < 0) { - return NULL; - } - double d = _PyTime_AsSecondsDouble(ts); - return PyFloat_FromDouble(d); -} - -static PyObject * -test_PyTime_AsTimeval(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - struct timeval tv; - if (_PyTime_AsTimeval(t, &tv, round) < 0) { - return NULL; - } - - PyObject *seconds = PyLong_FromLongLong(tv.tv_sec); - if (seconds == NULL) { - return NULL; - } - return Py_BuildValue("Nl", seconds, (long)tv.tv_usec); -} - -static PyObject * -test_PyTime_AsTimeval_clamp(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - struct timeval tv; - _PyTime_AsTimeval_clamp(t, &tv, round); - - PyObject *seconds = PyLong_FromLongLong(tv.tv_sec); - if (seconds == NULL) { - return NULL; - } - return Py_BuildValue("Nl", seconds, (long)tv.tv_usec); -} - -#ifdef HAVE_CLOCK_GETTIME -static PyObject * -test_PyTime_AsTimespec(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "O", &obj)) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - struct timespec ts; - if (_PyTime_AsTimespec(t, &ts) == -1) { - return NULL; - } - return Py_BuildValue("Nl", _PyLong_FromTime_t(ts.tv_sec), ts.tv_nsec); -} - -static PyObject * -test_PyTime_AsTimespec_clamp(PyObject *self, PyObject *args) -{ - PyObject *obj; - if (!PyArg_ParseTuple(args, "O", &obj)) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - struct timespec ts; - _PyTime_AsTimespec_clamp(t, &ts); - return Py_BuildValue("Nl", _PyLong_FromTime_t(ts.tv_sec), ts.tv_nsec); -} -#endif - -static PyObject * -test_PyTime_AsMilliseconds(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t ms = _PyTime_AsMilliseconds(t, round); - _PyTime_t ns = _PyTime_FromNanoseconds(ms); - return _PyTime_AsNanosecondsObject(ns); -} - -static PyObject * -test_PyTime_AsMicroseconds(PyObject *self, PyObject *args) -{ - PyObject *obj; - int round; - if (!PyArg_ParseTuple(args, "Oi", &obj, &round)) { - return NULL; - } - _PyTime_t t; - if (_PyTime_FromNanosecondsObject(&t, obj) < 0) { - return NULL; - } - if (check_time_rounding(round) < 0) { - return NULL; - } - _PyTime_t us = _PyTime_AsMicroseconds(t, round); - _PyTime_t ns = _PyTime_FromNanoseconds(us); - return _PyTime_AsNanosecondsObject(ns); -} - -static PyObject* -pymem_buffer_overflow(PyObject *self, PyObject *args) -{ - char *buffer; - - /* Deliberate buffer overflow to check that PyMem_Free() detects - the overflow when debug hooks are installed. */ - buffer = PyMem_Malloc(16); - if (buffer == NULL) { - PyErr_NoMemory(); - return NULL; - } - buffer[16] = 'x'; - PyMem_Free(buffer); - - Py_RETURN_NONE; -} - -static PyObject* -pymem_api_misuse(PyObject *self, PyObject *args) -{ - char *buffer; - - /* Deliberate misusage of Python allocators: - allococate with PyMem but release with PyMem_Raw. */ - buffer = PyMem_Malloc(16); - PyMem_RawFree(buffer); - - Py_RETURN_NONE; -} - -static PyObject* -pymem_malloc_without_gil(PyObject *self, PyObject *args) -{ - char *buffer; - - /* Deliberate bug to test debug hooks on Python memory allocators: - call PyMem_Malloc() without holding the GIL */ - Py_BEGIN_ALLOW_THREADS - buffer = PyMem_Malloc(10); - Py_END_ALLOW_THREADS - - PyMem_Free(buffer); - - Py_RETURN_NONE; -} - - -static PyObject* -test_pymem_getallocatorsname(PyObject *self, PyObject *args) -{ - const char *name = _PyMem_GetCurrentAllocatorName(); - if (name == NULL) { - PyErr_SetString(PyExc_RuntimeError, "cannot get allocators name"); - return NULL; - } - return PyUnicode_FromString(name); -} - - -static PyObject* -test_pyobject_is_freed(const char *test_name, PyObject *op) -{ - if (!_PyObject_IsFreed(op)) { - return raiseTestError(test_name, "object is not seen as freed"); - } - Py_RETURN_NONE; -} - - -static PyObject* -check_pyobject_null_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *op = NULL; - return test_pyobject_is_freed("check_pyobject_null_is_freed", op); -} - - -static PyObject* -check_pyobject_uninitialized_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *op = (PyObject *)PyObject_Malloc(sizeof(PyObject)); - if (op == NULL) { - return NULL; - } - /* Initialize reference count to avoid early crash in ceval or GC */ - Py_SET_REFCNT(op, 1); - /* object fields like ob_type are uninitialized! */ - return test_pyobject_is_freed("check_pyobject_uninitialized_is_freed", op); -} - - -static PyObject* -check_pyobject_forbidden_bytes_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) -{ - /* Allocate an incomplete PyObject structure: truncate 'ob_type' field */ - PyObject *op = (PyObject *)PyObject_Malloc(offsetof(PyObject, ob_type)); - if (op == NULL) { - return NULL; - } - /* Initialize reference count to avoid early crash in ceval or GC */ - Py_SET_REFCNT(op, 1); - /* ob_type field is after the memory block: part of "forbidden bytes" - when using debug hooks on memory allocators! */ - return test_pyobject_is_freed("check_pyobject_forbidden_bytes_is_freed", op); -} - - -static PyObject* -check_pyobject_freed_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) -{ - /* This test would fail if run with the address sanitizer */ -#ifdef _Py_ADDRESS_SANITIZER - Py_RETURN_NONE; -#else - PyObject *op = PyObject_CallNoArgs((PyObject *)&PyBaseObject_Type); - if (op == NULL) { - return NULL; - } - Py_TYPE(op)->tp_dealloc(op); - /* Reset reference count to avoid early crash in ceval or GC */ - Py_SET_REFCNT(op, 1); - /* object memory is freed! */ - return test_pyobject_is_freed("check_pyobject_freed_is_freed", op); -#endif -} - - -static PyObject* -pyobject_malloc_without_gil(PyObject *self, PyObject *args) -{ - char *buffer; - - /* Deliberate bug to test debug hooks on Python memory allocators: - call PyObject_Malloc() without holding the GIL */ - Py_BEGIN_ALLOW_THREADS - buffer = PyObject_Malloc(10); - Py_END_ALLOW_THREADS - - PyObject_Free(buffer); - - Py_RETURN_NONE; -} - static PyObject * tracemalloc_track(PyObject *self, PyObject *args) { @@ -4629,7 +2157,6 @@ dict_get_version(PyObject *self, PyObject *args) static PyObject * raise_SIGINT_then_send_None(PyObject *self, PyObject *args) { - _Py_IDENTIFIER(send); PyGenObject *gen; if (!PyArg_ParseTuple(args, "O!", &PyGen_Type, &gen)) @@ -4646,7 +2173,7 @@ raise_SIGINT_then_send_None(PyObject *self, PyObject *args) because we check for signals before every bytecode operation. */ raise(SIGINT); - return _PyObject_CallMethodIdOneArg((PyObject *)gen, &PyId_send, Py_None); + return PyObject_CallMethod((PyObject *)gen, "send", "O", Py_None); } @@ -4662,34 +2189,99 @@ stack_pointer(PyObject *self, PyObject *args) static PyObject* py_w_stopcode(PyObject *self, PyObject *args) { - int sig, status; - if (!PyArg_ParseTuple(args, "i", &sig)) { + int sig, status; + if (!PyArg_ParseTuple(args, "i", &sig)) { + return NULL; + } + status = W_STOPCODE(sig); + return PyLong_FromLong(status); +} +#endif + + +static PyObject * +get_mapping_keys(PyObject* self, PyObject *obj) +{ + return PyMapping_Keys(obj); +} + +static PyObject * +get_mapping_values(PyObject* self, PyObject *obj) +{ + return PyMapping_Values(obj); +} + +static PyObject * +get_mapping_items(PyObject* self, PyObject *obj) +{ + return PyMapping_Items(obj); +} + +static PyObject * +test_mapping_has_key_string(PyObject *self, PyObject *Py_UNUSED(args)) +{ + PyObject *context = PyDict_New(); + PyObject *val = PyLong_FromLong(1); + + // Since this uses `const char*` it is easier to test this in C: + PyDict_SetItemString(context, "a", val); + if (!PyMapping_HasKeyString(context, "a")) { + PyErr_SetString(PyExc_RuntimeError, + "Existing mapping key does not exist"); + return NULL; + } + if (PyMapping_HasKeyString(context, "b")) { + PyErr_SetString(PyExc_RuntimeError, + "Missing mapping key exists"); return NULL; } - status = W_STOPCODE(sig); - return PyLong_FromLong(status); -} -#endif + Py_DECREF(val); + Py_DECREF(context); + Py_RETURN_NONE; +} static PyObject * -get_mapping_keys(PyObject* self, PyObject *obj) +mapping_has_key(PyObject* self, PyObject *args) { - return PyMapping_Keys(obj); + PyObject *context, *key; + if (!PyArg_ParseTuple(args, "OO", &context, &key)) { + return NULL; + } + return PyLong_FromLong(PyMapping_HasKey(context, key)); } static PyObject * -get_mapping_values(PyObject* self, PyObject *obj) +sequence_set_slice(PyObject* self, PyObject *args) { - return PyMapping_Values(obj); + PyObject *sequence, *obj; + Py_ssize_t i1, i2; + if (!PyArg_ParseTuple(args, "OnnO", &sequence, &i1, &i2, &obj)) { + return NULL; + } + + int res = PySequence_SetSlice(sequence, i1, i2, obj); + if (res == -1) { + return NULL; + } + Py_RETURN_NONE; } static PyObject * -get_mapping_items(PyObject* self, PyObject *obj) +sequence_del_slice(PyObject* self, PyObject *args) { - return PyMapping_Items(obj); -} + PyObject *sequence; + Py_ssize_t i1, i2; + if (!PyArg_ParseTuple(args, "Onn", &sequence, &i1, &i2)) { + return NULL; + } + int res = PySequence_DelSlice(sequence, i1, i2); + if (res == -1) { + return NULL; + } + Py_RETURN_NONE; +} static PyObject * test_pythread_tss_key_state(PyObject *self, PyObject *args) @@ -4846,6 +2438,20 @@ sequence_setitem(PyObject *self, PyObject *args) } +static PyObject * +sequence_delitem(PyObject *self, PyObject *args) +{ + Py_ssize_t i; + PyObject *seq; + if (!PyArg_ParseTuple(args, "On", &seq, &i)) { + return NULL; + } + if (PySequence_DelItem(seq, i)) { + return NULL; + } + Py_RETURN_NONE; +} + static PyObject * hasattr_string(PyObject *self, PyObject* args) { @@ -4885,8 +2491,7 @@ _null_to_none(PyObject* obj) if (obj == NULL) { Py_RETURN_NONE; } - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } static PyObject* @@ -4950,7 +2555,6 @@ meth_fastcall_keywords(PyObject* self, PyObject* const* args, return Py_BuildValue("NNN", _null_to_none(self), pyargs, pykwargs); } - static PyObject* pynumber_tobase(PyObject *module, PyObject *args) { @@ -4963,7 +2567,6 @@ pynumber_tobase(PyObject *module, PyObject *args) return PyNumber_ToBase(obj, base); } - static PyObject* test_set_type_size(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -5168,85 +2771,6 @@ test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args)) Py_RETURN_NONE; } - -// Test PyFloat_Pack2(), PyFloat_Pack4() and PyFloat_Pack8() -static PyObject * -test_float_pack(PyObject *self, PyObject *args) -{ - int size; - double d; - int le; - if (!PyArg_ParseTuple(args, "idi", &size, &d, &le)) { - return NULL; - } - switch (size) - { - case 2: - { - char data[2]; - if (PyFloat_Pack2(d, data, le) < 0) { - return NULL; - } - return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); - } - case 4: - { - char data[4]; - if (PyFloat_Pack4(d, data, le) < 0) { - return NULL; - } - return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); - } - case 8: - { - char data[8]; - if (PyFloat_Pack8(d, data, le) < 0) { - return NULL; - } - return PyBytes_FromStringAndSize(data, Py_ARRAY_LENGTH(data)); - } - default: break; - } - - PyErr_SetString(PyExc_ValueError, "size must 2, 4 or 8"); - return NULL; -} - - -// Test PyFloat_Unpack2(), PyFloat_Unpack4() and PyFloat_Unpack8() -static PyObject * -test_float_unpack(PyObject *self, PyObject *args) -{ - assert(!PyErr_Occurred()); - const char *data; - Py_ssize_t size; - int le; - if (!PyArg_ParseTuple(args, "y#i", &data, &size, &le)) { - return NULL; - } - double d; - switch (size) - { - case 2: - d = PyFloat_Unpack2(data, le); - break; - case 4: - d = PyFloat_Unpack4(data, le); - break; - case 8: - d = PyFloat_Unpack8(data, le); - break; - default: - PyErr_SetString(PyExc_ValueError, "data length must 2, 4 or 8 bytes"); - return NULL; - } - - if (d == -1.0 && PyErr_Occurred()) { - return NULL; - } - return PyFloat_FromDouble(d); -} - static PyObject * frame_getlocals(PyObject *self, PyObject *frame) { @@ -5302,6 +2826,50 @@ frame_getlasti(PyObject *self, PyObject *frame) return PyLong_FromLong(lasti); } +static PyObject * +test_frame_getvar(PyObject *self, PyObject *args) +{ + PyObject *frame, *name; + if (!PyArg_ParseTuple(args, "OO", &frame, &name)) { + return NULL; + } + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + + return PyFrame_GetVar((PyFrameObject *)frame, name); +} + +static PyObject * +test_frame_getvarstring(PyObject *self, PyObject *args) +{ + PyObject *frame; + const char *name; + if (!PyArg_ParseTuple(args, "Oy", &frame, &name)) { + return NULL; + } + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + + return PyFrame_GetVarString((PyFrameObject *)frame, name); +} + + +static PyObject * +eval_get_func_name(PyObject *self, PyObject *func) +{ + return PyUnicode_FromString(PyEval_GetFuncName(func)); +} + +static PyObject * +eval_get_func_desc(PyObject *self, PyObject *func) +{ + return PyUnicode_FromString(PyEval_GetFuncDesc(func)); +} + static PyObject * get_feature_macros(PyObject *self, PyObject *Py_UNUSED(args)) { @@ -5485,10 +3053,92 @@ test_macros(PyObject *self, PyObject *Py_UNUSED(args)) Py_RETURN_NONE; } +static PyObject * +function_get_code(PyObject *self, PyObject *func) +{ + PyObject *code = PyFunction_GetCode(func); + if (code != NULL) { + return Py_NewRef(code); + } else { + return NULL; + } +} + +static PyObject * +function_get_globals(PyObject *self, PyObject *func) +{ + PyObject *globals = PyFunction_GetGlobals(func); + if (globals != NULL) { + return Py_NewRef(globals); + } else { + return NULL; + } +} + +static PyObject * +function_get_module(PyObject *self, PyObject *func) +{ + PyObject *module = PyFunction_GetModule(func); + if (module != NULL) { + return Py_NewRef(module); + } else { + return NULL; + } +} + +static PyObject * +function_get_defaults(PyObject *self, PyObject *func) +{ + PyObject *defaults = PyFunction_GetDefaults(func); + if (defaults != NULL) { + return Py_NewRef(defaults); + } else if (PyErr_Occurred()) { + return NULL; + } else { + Py_RETURN_NONE; // This can happen when `defaults` are set to `None` + } +} + +static PyObject * +function_set_defaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL, *defaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { + return NULL; + } + int result = PyFunction_SetDefaults(func, defaults); + if (result == -1) + return NULL; + Py_RETURN_NONE; +} + +static PyObject * +function_get_kw_defaults(PyObject *self, PyObject *func) +{ + PyObject *defaults = PyFunction_GetKwDefaults(func); + if (defaults != NULL) { + return Py_NewRef(defaults); + } else if (PyErr_Occurred()) { + return NULL; + } else { + Py_RETURN_NONE; // This can happen when `kwdefaults` are set to `None` + } +} + +static PyObject * +function_set_kw_defaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL, *defaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { + return NULL; + } + int result = PyFunction_SetKwDefaults(func, defaults); + if (result == -1) + return NULL; + Py_RETURN_NONE; +} static PyObject *test_buildvalue_issue38913(PyObject *, PyObject *); -static PyObject *getargs_s_hash_int(PyObject *, PyObject *, PyObject*); -static PyObject *getargs_s_hash_int2(PyObject *, PyObject *, PyObject*); static PyMethodDef TestMethods[] = { {"raise_exception", raise_exception, METH_VARARGS}, @@ -5496,33 +3146,11 @@ static PyMethodDef TestMethods[] = { {"set_errno", set_errno, METH_VARARGS}, {"test_config", test_config, METH_NOARGS}, {"test_sizeof_c_types", test_sizeof_c_types, METH_NOARGS}, - {"test_datetime_capi", test_datetime_capi, METH_NOARGS}, - {"datetime_check_date", datetime_check_date, METH_VARARGS}, - {"datetime_check_time", datetime_check_time, METH_VARARGS}, - {"datetime_check_datetime", datetime_check_datetime, METH_VARARGS}, - {"datetime_check_delta", datetime_check_delta, METH_VARARGS}, - {"datetime_check_tzinfo", datetime_check_tzinfo, METH_VARARGS}, - {"make_timezones_capi", make_timezones_capi, METH_NOARGS}, - {"get_timezones_offset_zero", get_timezones_offset_zero, METH_NOARGS}, - {"get_timezone_utc_capi", get_timezone_utc_capi, METH_VARARGS}, - {"get_date_fromdate", get_date_fromdate, METH_VARARGS}, - {"get_datetime_fromdateandtime", get_datetime_fromdateandtime, METH_VARARGS}, - {"get_datetime_fromdateandtimeandfold", get_datetime_fromdateandtimeandfold, METH_VARARGS}, - {"get_time_fromtime", get_time_fromtime, METH_VARARGS}, - {"get_time_fromtimeandfold", get_time_fromtimeandfold, METH_VARARGS}, - {"get_delta_fromdsu", get_delta_fromdsu, METH_VARARGS}, - {"get_date_fromtimestamp", get_date_fromtimestamp, METH_VARARGS}, - {"get_datetime_fromtimestamp", get_datetime_fromtimestamp, METH_VARARGS}, - {"PyDateTime_GET", test_PyDateTime_GET, METH_O}, - {"PyDateTime_DATE_GET", test_PyDateTime_DATE_GET, METH_O}, - {"PyDateTime_TIME_GET", test_PyDateTime_TIME_GET, METH_O}, - {"PyDateTime_DELTA_GET", test_PyDateTime_DELTA_GET, METH_O}, {"test_gc_control", test_gc_control, METH_NOARGS}, {"test_list_api", test_list_api, METH_NOARGS}, {"test_dict_iteration", test_dict_iteration, METH_NOARGS}, {"dict_getitem_knownhash", dict_getitem_knownhash, METH_VARARGS}, {"test_lazy_hash_inheritance", test_lazy_hash_inheritance,METH_NOARGS}, - {"test_long_api", test_long_api, METH_NOARGS}, {"test_xincref_doesnt_leak",test_xincref_doesnt_leak, METH_NOARGS}, {"test_incref_doesnt_leak", test_incref_doesnt_leak, METH_NOARGS}, {"test_xdecref_doesnt_leak",test_xdecref_doesnt_leak, METH_NOARGS}, @@ -5532,20 +3160,9 @@ static PyMethodDef TestMethods[] = { {"test_structseq_newtype_null_descr_doc", test_structseq_newtype_null_descr_doc, METH_NOARGS}, {"test_incref_decref_API", test_incref_decref_API, METH_NOARGS}, - {"test_long_and_overflow", test_long_and_overflow, METH_NOARGS}, - {"test_long_as_double", test_long_as_double, METH_NOARGS}, - {"test_long_as_size_t", test_long_as_size_t, METH_NOARGS}, - {"test_long_as_unsigned_long_long_mask", - test_long_as_unsigned_long_long_mask, METH_NOARGS}, - {"test_long_numbits", test_long_numbits, METH_NOARGS}, - {"test_k_code", test_k_code, METH_NOARGS}, - {"test_empty_argparse", test_empty_argparse, METH_NOARGS}, - {"parse_tuple_and_keywords", parse_tuple_and_keywords, METH_VARARGS}, {"pyobject_repr_from_null", pyobject_repr_from_null, METH_NOARGS}, {"pyobject_str_from_null", pyobject_str_from_null, METH_NOARGS}, {"pyobject_bytes_from_null", pyobject_bytes_from_null, METH_NOARGS}, - {"test_with_docstring", test_with_docstring, METH_NOARGS, - PyDoc_STR("This is a pretty normal docstring.")}, {"test_string_to_double", test_string_to_double, METH_NOARGS}, {"test_capsule", (PyCFunction)test_capsule, METH_NOARGS}, {"test_from_contiguous", (PyCFunction)test_from_contiguous, METH_NOARGS}, @@ -5556,66 +3173,9 @@ static PyMethodDef TestMethods[] = { {"PyBuffer_SizeFromFormat", test_PyBuffer_SizeFromFormat, METH_VARARGS}, {"test_buildvalue_N", test_buildvalue_N, METH_NOARGS}, {"test_buildvalue_issue38913", test_buildvalue_issue38913, METH_NOARGS}, - {"get_args", get_args, METH_VARARGS}, {"test_get_statictype_slots", test_get_statictype_slots, METH_NOARGS}, {"test_get_type_name", test_get_type_name, METH_NOARGS}, {"test_get_type_qualname", test_get_type_qualname, METH_NOARGS}, - {"get_kwargs", _PyCFunction_CAST(get_kwargs), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_tuple", getargs_tuple, METH_VARARGS}, - {"getargs_keywords", _PyCFunction_CAST(getargs_keywords), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_keyword_only", _PyCFunction_CAST(getargs_keyword_only), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_positional_only_and_keywords", - _PyCFunction_CAST(getargs_positional_only_and_keywords), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_b", getargs_b, METH_VARARGS}, - {"getargs_B", getargs_B, METH_VARARGS}, - {"getargs_h", getargs_h, METH_VARARGS}, - {"getargs_H", getargs_H, METH_VARARGS}, - {"getargs_I", getargs_I, METH_VARARGS}, - {"getargs_k", getargs_k, METH_VARARGS}, - {"getargs_i", getargs_i, METH_VARARGS}, - {"getargs_l", getargs_l, METH_VARARGS}, - {"getargs_n", getargs_n, METH_VARARGS}, - {"getargs_p", getargs_p, METH_VARARGS}, - {"getargs_L", getargs_L, METH_VARARGS}, - {"getargs_K", getargs_K, METH_VARARGS}, - {"test_longlong_api", test_longlong_api, METH_NOARGS}, - {"test_long_long_and_overflow",test_long_long_and_overflow, METH_NOARGS}, - {"test_L_code", test_L_code, METH_NOARGS}, - {"getargs_f", getargs_f, METH_VARARGS}, - {"getargs_d", getargs_d, METH_VARARGS}, - {"getargs_D", getargs_D, METH_VARARGS}, - {"getargs_S", getargs_S, METH_VARARGS}, - {"getargs_Y", getargs_Y, METH_VARARGS}, - {"getargs_U", getargs_U, METH_VARARGS}, - {"getargs_c", getargs_c, METH_VARARGS}, - {"getargs_C", getargs_C, METH_VARARGS}, - {"getargs_s", getargs_s, METH_VARARGS}, - {"getargs_s_star", getargs_s_star, METH_VARARGS}, - {"getargs_s_hash", getargs_s_hash, METH_VARARGS}, - {"getargs_s_hash_int", _PyCFunction_CAST(getargs_s_hash_int), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_s_hash_int2", _PyCFunction_CAST(getargs_s_hash_int2), - METH_VARARGS|METH_KEYWORDS}, - {"getargs_z", getargs_z, METH_VARARGS}, - {"getargs_z_star", getargs_z_star, METH_VARARGS}, - {"getargs_z_hash", getargs_z_hash, METH_VARARGS}, - {"getargs_y", getargs_y, METH_VARARGS}, - {"getargs_y_star", getargs_y_star, METH_VARARGS}, - {"getargs_y_hash", getargs_y_hash, METH_VARARGS}, - {"getargs_u", getargs_u, METH_VARARGS}, - {"getargs_u_hash", getargs_u_hash, METH_VARARGS}, - {"getargs_Z", getargs_Z, METH_VARARGS}, - {"getargs_Z_hash", getargs_Z_hash, METH_VARARGS}, - {"getargs_w_star", getargs_w_star, METH_VARARGS}, - {"getargs_es", getargs_es, METH_VARARGS}, - {"getargs_et", getargs_et, METH_VARARGS}, - {"getargs_es_hash", getargs_es_hash, METH_VARARGS}, - {"getargs_et_hash", getargs_et_hash, METH_VARARGS}, - {"test_s_code", test_s_code, METH_NOARGS}, {"_test_thread_state", test_thread_state, METH_VARARGS}, {"_pending_threadfunc", pending_threadfunc, METH_VARARGS}, #ifdef HAVE_GETTIMEOFDAY @@ -5633,46 +3193,11 @@ static PyMethodDef TestMethods[] = { METH_NOARGS}, {"crash_no_current_thread", crash_no_current_thread, METH_NOARGS}, {"run_in_subinterp", run_in_subinterp, METH_VARARGS}, - {"pytime_object_to_time_t", test_pytime_object_to_time_t, METH_VARARGS}, - {"pytime_object_to_timeval", test_pytime_object_to_timeval, METH_VARARGS}, - {"pytime_object_to_timespec", test_pytime_object_to_timespec, METH_VARARGS}, + {"run_in_subinterp_with_config", + _PyCFunction_CAST(run_in_subinterp_with_config), + METH_VARARGS | METH_KEYWORDS}, {"with_tp_del", with_tp_del, METH_VARARGS}, {"create_cfunction", create_cfunction, METH_NOARGS}, - {"test_pymem_alloc0", test_pymem_alloc0, METH_NOARGS}, - {"test_pyobject_new", test_pyobject_new, METH_NOARGS}, - {"test_pymem_setrawallocators",test_pymem_setrawallocators, METH_NOARGS}, - {"test_pymem_setallocators",test_pymem_setallocators, METH_NOARGS}, - {"test_pyobject_setallocators",test_pyobject_setallocators, METH_NOARGS}, - {"set_nomemory", (PyCFunction)set_nomemory, METH_VARARGS, - PyDoc_STR("set_nomemory(start:int, stop:int = 0)")}, - {"remove_mem_hooks", remove_mem_hooks, METH_NOARGS, - PyDoc_STR("Remove memory hooks.")}, - {"no_docstring", - (PyCFunction)test_with_docstring, METH_NOARGS}, - {"docstring_empty", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_empty}, - {"docstring_no_signature", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_no_signature}, - {"docstring_with_invalid_signature", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_invalid_signature}, - {"docstring_with_invalid_signature2", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_invalid_signature2}, - {"docstring_with_signature", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_signature}, - {"docstring_with_signature_but_no_doc", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_signature_but_no_doc}, - {"docstring_with_signature_and_extra_newlines", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_signature_and_extra_newlines}, - {"docstring_with_signature_with_defaults", - (PyCFunction)test_with_docstring, METH_NOARGS, - docstring_with_signature_with_defaults}, {"call_in_temporary_c_thread", call_in_temporary_c_thread, METH_O, PyDoc_STR("set_error_class(error_class) -> None")}, {"pymarshal_write_long_to_file", @@ -5691,26 +3216,6 @@ static PyMethodDef TestMethods[] = { {"return_result_with_error", return_result_with_error, METH_NOARGS}, {"getitem_with_error", getitem_with_error, METH_VARARGS}, {"Py_CompileString", pycompilestring, METH_O}, - {"PyTime_FromSeconds", test_pytime_fromseconds, METH_VARARGS}, - {"PyTime_FromSecondsObject", test_pytime_fromsecondsobject, METH_VARARGS}, - {"PyTime_AsSecondsDouble", test_pytime_assecondsdouble, METH_VARARGS}, - {"PyTime_AsTimeval", test_PyTime_AsTimeval, METH_VARARGS}, - {"PyTime_AsTimeval_clamp", test_PyTime_AsTimeval_clamp, METH_VARARGS}, -#ifdef HAVE_CLOCK_GETTIME - {"PyTime_AsTimespec", test_PyTime_AsTimespec, METH_VARARGS}, - {"PyTime_AsTimespec_clamp", test_PyTime_AsTimespec_clamp, METH_VARARGS}, -#endif - {"PyTime_AsMilliseconds", test_PyTime_AsMilliseconds, METH_VARARGS}, - {"PyTime_AsMicroseconds", test_PyTime_AsMicroseconds, METH_VARARGS}, - {"pymem_buffer_overflow", pymem_buffer_overflow, METH_NOARGS}, - {"pymem_api_misuse", pymem_api_misuse, METH_NOARGS}, - {"pymem_malloc_without_gil", pymem_malloc_without_gil, METH_NOARGS}, - {"pymem_getallocatorsname", test_pymem_getallocatorsname, METH_NOARGS}, - {"check_pyobject_null_is_freed", check_pyobject_null_is_freed, METH_NOARGS}, - {"check_pyobject_uninitialized_is_freed", check_pyobject_uninitialized_is_freed, METH_NOARGS}, - {"check_pyobject_forbidden_bytes_is_freed", check_pyobject_forbidden_bytes_is_freed, METH_NOARGS}, - {"check_pyobject_freed_is_freed", check_pyobject_freed_is_freed, METH_NOARGS}, - {"pyobject_malloc_without_gil", pyobject_malloc_without_gil, METH_NOARGS}, {"tracemalloc_track", tracemalloc_track, METH_VARARGS}, {"tracemalloc_untrack", tracemalloc_untrack, METH_VARARGS}, {"tracemalloc_get_traceback", tracemalloc_get_traceback, METH_VARARGS}, @@ -5723,6 +3228,10 @@ static PyMethodDef TestMethods[] = { {"get_mapping_keys", get_mapping_keys, METH_O}, {"get_mapping_values", get_mapping_values, METH_O}, {"get_mapping_items", get_mapping_items, METH_O}, + {"test_mapping_has_key_string", test_mapping_has_key_string, METH_NOARGS}, + {"mapping_has_key", mapping_has_key, METH_VARARGS}, + {"sequence_set_slice", sequence_set_slice, METH_VARARGS}, + {"sequence_del_slice", sequence_del_slice, METH_VARARGS}, {"test_pythread_tss_key_state", test_pythread_tss_key_state, METH_VARARGS}, {"hamt", new_hamt, METH_NOARGS}, {"bad_get", _PyCFunction_CAST(bad_get), METH_FASTCALL}, @@ -5732,6 +3241,7 @@ static PyMethodDef TestMethods[] = { {"write_unraisable_exc", test_write_unraisable_exc, METH_VARARGS}, {"sequence_getitem", sequence_getitem, METH_VARARGS}, {"sequence_setitem", sequence_setitem, METH_VARARGS}, + {"sequence_delitem", sequence_delitem, METH_VARARGS}, {"hasattr_string", hasattr_string, METH_VARARGS}, {"meth_varargs", meth_varargs, METH_VARARGS}, {"meth_varargs_keywords", _PyCFunction_CAST(meth_varargs_keywords), METH_VARARGS|METH_KEYWORDS}, @@ -5750,162 +3260,30 @@ static PyMethodDef TestMethods[] = { PyDoc_STR("fatal_error(message, release_gil=False): call Py_FatalError(message)")}, {"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")}, {"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL}, - {"float_pack", test_float_pack, METH_VARARGS, NULL}, - {"float_unpack", test_float_unpack, METH_VARARGS, NULL}, {"frame_getlocals", frame_getlocals, METH_O, NULL}, {"frame_getglobals", frame_getglobals, METH_O, NULL}, {"frame_getgenerator", frame_getgenerator, METH_O, NULL}, {"frame_getbuiltins", frame_getbuiltins, METH_O, NULL}, {"frame_getlasti", frame_getlasti, METH_O, NULL}, + {"frame_getvar", test_frame_getvar, METH_VARARGS, NULL}, + {"frame_getvarstring", test_frame_getvarstring, METH_VARARGS, NULL}, + {"eval_get_func_name", eval_get_func_name, METH_O, NULL}, + {"eval_get_func_desc", eval_get_func_desc, METH_O, NULL}, {"get_feature_macros", get_feature_macros, METH_NOARGS, NULL}, {"test_code_api", test_code_api, METH_NOARGS, NULL}, {"settrace_to_record", settrace_to_record, METH_O, NULL}, {"test_macros", test_macros, METH_NOARGS, NULL}, {"clear_managed_dict", clear_managed_dict, METH_O, NULL}, + {"function_get_code", function_get_code, METH_O, NULL}, + {"function_get_globals", function_get_globals, METH_O, NULL}, + {"function_get_module", function_get_module, METH_O, NULL}, + {"function_get_defaults", function_get_defaults, METH_O, NULL}, + {"function_set_defaults", function_set_defaults, METH_VARARGS, NULL}, + {"function_get_kw_defaults", function_get_kw_defaults, METH_O, NULL}, + {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, {NULL, NULL} /* sentinel */ }; -typedef struct { - char bool_member; - char byte_member; - unsigned char ubyte_member; - short short_member; - unsigned short ushort_member; - int int_member; - unsigned int uint_member; - long long_member; - unsigned long ulong_member; - Py_ssize_t pyssizet_member; - float float_member; - double double_member; - char inplace_member[6]; - long long longlong_member; - unsigned long long ulonglong_member; -} all_structmembers; - -typedef struct { - PyObject_HEAD - all_structmembers structmembers; -} test_structmembers; - -static struct PyMemberDef test_members[] = { - {"T_BOOL", T_BOOL, offsetof(test_structmembers, structmembers.bool_member), 0, NULL}, - {"T_BYTE", T_BYTE, offsetof(test_structmembers, structmembers.byte_member), 0, NULL}, - {"T_UBYTE", T_UBYTE, offsetof(test_structmembers, structmembers.ubyte_member), 0, NULL}, - {"T_SHORT", T_SHORT, offsetof(test_structmembers, structmembers.short_member), 0, NULL}, - {"T_USHORT", T_USHORT, offsetof(test_structmembers, structmembers.ushort_member), 0, NULL}, - {"T_INT", T_INT, offsetof(test_structmembers, structmembers.int_member), 0, NULL}, - {"T_UINT", T_UINT, offsetof(test_structmembers, structmembers.uint_member), 0, NULL}, - {"T_LONG", T_LONG, offsetof(test_structmembers, structmembers.long_member), 0, NULL}, - {"T_ULONG", T_ULONG, offsetof(test_structmembers, structmembers.ulong_member), 0, NULL}, - {"T_PYSSIZET", T_PYSSIZET, offsetof(test_structmembers, structmembers.pyssizet_member), 0, NULL}, - {"T_FLOAT", T_FLOAT, offsetof(test_structmembers, structmembers.float_member), 0, NULL}, - {"T_DOUBLE", T_DOUBLE, offsetof(test_structmembers, structmembers.double_member), 0, NULL}, - {"T_STRING_INPLACE", T_STRING_INPLACE, offsetof(test_structmembers, structmembers.inplace_member), 0, NULL}, - {"T_LONGLONG", T_LONGLONG, offsetof(test_structmembers, structmembers.longlong_member), 0, NULL}, - {"T_ULONGLONG", T_ULONGLONG, offsetof(test_structmembers, structmembers.ulonglong_member), 0, NULL}, - {NULL} -}; - - -static PyObject * -test_structmembers_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) -{ - static char *keywords[] = { - "T_BOOL", "T_BYTE", "T_UBYTE", "T_SHORT", "T_USHORT", - "T_INT", "T_UINT", "T_LONG", "T_ULONG", "T_PYSSIZET", - "T_FLOAT", "T_DOUBLE", "T_STRING_INPLACE", - "T_LONGLONG", "T_ULONGLONG", - NULL}; - static const char fmt[] = "|bbBhHiIlknfds#LK"; - test_structmembers *ob; - const char *s = NULL; - Py_ssize_t string_len = 0; - ob = PyObject_New(test_structmembers, type); - if (ob == NULL) - return NULL; - memset(&ob->structmembers, 0, sizeof(all_structmembers)); - if (!PyArg_ParseTupleAndKeywords(args, kwargs, fmt, keywords, - &ob->structmembers.bool_member, - &ob->structmembers.byte_member, - &ob->structmembers.ubyte_member, - &ob->structmembers.short_member, - &ob->structmembers.ushort_member, - &ob->structmembers.int_member, - &ob->structmembers.uint_member, - &ob->structmembers.long_member, - &ob->structmembers.ulong_member, - &ob->structmembers.pyssizet_member, - &ob->structmembers.float_member, - &ob->structmembers.double_member, - &s, &string_len - , &ob->structmembers.longlong_member, - &ob->structmembers.ulonglong_member - )) { - Py_DECREF(ob); - return NULL; - } - if (s != NULL) { - if (string_len > 5) { - Py_DECREF(ob); - PyErr_SetString(PyExc_ValueError, "string too long"); - return NULL; - } - strcpy(ob->structmembers.inplace_member, s); - } - else { - strcpy(ob->structmembers.inplace_member, ""); - } - return (PyObject *)ob; -} - -static void -test_structmembers_free(PyObject *ob) -{ - PyObject_Free(ob); -} - -static PyTypeObject test_structmembersType = { - PyVarObject_HEAD_INIT(NULL, 0) - "test_structmembersType", - sizeof(test_structmembers), /* tp_basicsize */ - 0, /* tp_itemsize */ - test_structmembers_free, /* destructor tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - PyObject_GenericSetAttr, /* tp_setattro */ - 0, /* tp_as_buffer */ - 0, /* tp_flags */ - "Type containing all structmember types", - 0, /* traverseproc tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - test_members, /* tp_members */ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - test_structmembers_new, /* tp_new */ -}; - typedef struct { PyObject_HEAD @@ -6052,8 +3430,7 @@ awaitObject_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - Py_INCREF(v); - ao->ao_iterator = v; + ao->ao_iterator = Py_NewRef(v); return (PyObject *)ao; } @@ -6070,8 +3447,7 @@ awaitObject_dealloc(awaitObject *ao) static PyObject * awaitObject_await(awaitObject *ao) { - Py_INCREF(ao->ao_iterator); - return ao->ao_iterator; + return Py_NewRef(ao->ao_iterator); } static PyAsyncMethods awaitType_as_async = { @@ -6292,8 +3668,7 @@ generic_alias_new(PyObject *item) if (o == NULL) { return NULL; } - Py_INCREF(item); - o->item = item; + o->item = Py_NewRef(item); return (PyObject*) o; } @@ -6461,11 +3836,6 @@ PyInit__testcapi(void) Py_SET_TYPE(&_HashInheritanceTester_Type, &PyType_Type); - Py_SET_TYPE(&test_structmembersType, &PyType_Type); - Py_INCREF(&test_structmembersType); - /* don't use a name starting with "test", since we don't want - test_capi to automatically call this */ - PyModule_AddObject(m, "_test_structmembersType", (PyObject *)&test_structmembersType); if (PyType_Ready(&matmulType) < 0) return NULL; Py_INCREF(&matmulType); @@ -6547,14 +3917,6 @@ PyInit__testcapi(void) PyModule_AddObject(m, "instancemethod", (PyObject *)&PyInstanceMethod_Type); PyModule_AddIntConstant(m, "the_number_three", 3); - PyObject *v; -#ifdef WITH_PYMALLOC - v = Py_True; -#else - v = Py_False; -#endif - Py_INCREF(v); - PyModule_AddObject(m, "WITH_PYMALLOC", v); TestError = PyErr_NewException("_testcapi.error", NULL, NULL); Py_INCREF(TestError); @@ -6578,6 +3940,33 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Unicode(m) < 0) { return NULL; } + if (_PyTestCapi_Init_GetArgs(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_PyTime(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_DateTime(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Docstring(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Mem(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Watchers(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Long(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Float(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Structmember(m) < 0) { + return NULL; + } #ifndef LIMITED_API_AVAILABLE PyModule_AddObjectRef(m, "LIMITED_API_AVAILABLE", Py_False); @@ -6635,35 +4024,3 @@ test_buildvalue_issue38913(PyObject *self, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } - -#undef PyArg_ParseTupleAndKeywords -PyAPI_FUNC(int) PyArg_ParseTupleAndKeywords(PyObject *, PyObject *, - const char *, char **, ...); - -static PyObject * -getargs_s_hash_int(PyObject *self, PyObject *args, PyObject *kwargs) -{ - static char *keywords[] = {"", "", "x", NULL}; - Py_buffer buf = {NULL}; - const char *s; - int len; - int i = 0; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|s#i", keywords, &buf, &s, &len, &i)) - return NULL; - PyBuffer_Release(&buf); - Py_RETURN_NONE; -} - -static PyObject * -getargs_s_hash_int2(PyObject *self, PyObject *args, PyObject *kwargs) -{ - static char *keywords[] = {"", "", "x", NULL}; - Py_buffer buf = {NULL}; - const char *s; - int len; - int i = 0; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|(s#)i", keywords, &buf, &s, &len, &i)) - return NULL; - PyBuffer_Release(&buf); - Py_RETURN_NONE; -} diff --git a/Modules/_testclinic.c b/Modules/_testclinic.c new file mode 100644 index 00000000000000..91fdee24d328d9 --- /dev/null +++ b/Modules/_testclinic.c @@ -0,0 +1,1187 @@ +#ifndef Py_BUILD_CORE_BUILTIN +# define Py_BUILD_CORE_MODULE 1 +#endif + +/* Always enable assertions */ +#undef NDEBUG + +#define PY_SSIZE_T_CLEAN + +#include "Python.h" + +#include "clinic/_testclinic.c.h" + + +/* Pack arguments to a tuple, implicitly increase all the arguments' refcount. + * NULL arguments will be replaced to Py_None. */ +static PyObject * +pack_arguments_newref(int argc, ...) +{ + assert(!PyErr_Occurred()); + PyObject *tuple = PyTuple_New(argc); + if (!tuple) { + return NULL; + } + + va_list vargs; + va_start(vargs, argc); + for (int i = 0; i < argc; i++) { + PyObject *arg = va_arg(vargs, PyObject *); + if (arg) { + if (_PyObject_IsFreed(arg)) { + PyErr_Format(PyExc_AssertionError, + "argument %d at %p is freed or corrupted!", + i, arg); + va_end(vargs); + Py_DECREF(tuple); + return NULL; + } + } + else { + arg = Py_None; + } + PyTuple_SET_ITEM(tuple, i, Py_NewRef(arg)); + } + va_end(vargs); + return tuple; +} + +/* Pack arguments to a tuple. + * `wrapper` is function which converts primitive type to PyObject. + * `arg_type` is type that arguments should be converted to before wrapped. */ +#define RETURN_PACKED_ARGS(argc, wrapper, arg_type, ...) do { \ + assert(!PyErr_Occurred()); \ + arg_type in[argc] = {__VA_ARGS__}; \ + PyObject *out[argc] = {NULL,}; \ + for (int _i = 0; _i < argc; _i++) { \ + out[_i] = wrapper(in[_i]); \ + assert(out[_i] || PyErr_Occurred()); \ + if (!out[_i]) { \ + for (int _j = 0; _j < _i; _j++) { \ + Py_DECREF(out[_j]); \ + } \ + return NULL; \ + } \ + } \ + PyObject *tuple = PyTuple_New(argc); \ + if (!tuple) { \ + for (int _i = 0; _i < argc; _i++) { \ + Py_DECREF(out[_i]); \ + } \ + return NULL; \ + } \ + for (int _i = 0; _i < argc; _i++) { \ + PyTuple_SET_ITEM(tuple, _i, out[_i]); \ + } \ + return tuple; \ + } while (0) + + +/*[clinic input] +module _testclinic +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=d4981b80d6efdb12]*/ + + +/*[clinic input] +test_empty_function + +[clinic start generated code]*/ + +static PyObject * +test_empty_function_impl(PyObject *module) +/*[clinic end generated code: output=0f8aeb3ddced55cb input=0dd7048651ad4ae4]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +objects_converter + + a: object + b: object = NULL + / + +[clinic start generated code]*/ + +static PyObject * +objects_converter_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=3f9c9415ec86c695 input=1533b1bd94187de4]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +bytes_object_converter + + a: PyBytesObject + / + +[clinic start generated code]*/ + +static PyObject * +bytes_object_converter_impl(PyObject *module, PyBytesObject *a) +/*[clinic end generated code: output=7732da869d74b784 input=94211751e7996236]*/ +{ + if (!PyBytes_Check(a)) { + PyErr_SetString(PyExc_AssertionError, + "argument a is not a PyBytesObject"); + return NULL; + } + return pack_arguments_newref(1, a); +} + + +/*[clinic input] +byte_array_object_converter + + a: PyByteArrayObject + / + +[clinic start generated code]*/ + +static PyObject * +byte_array_object_converter_impl(PyObject *module, PyByteArrayObject *a) +/*[clinic end generated code: output=51f15c76f302b1f7 input=b04d253db51c6f56]*/ +{ + if (!PyByteArray_Check(a)) { + PyErr_SetString(PyExc_AssertionError, + "argument a is not a PyByteArrayObject"); + return NULL; + } + return pack_arguments_newref(1, a); +} + + +/*[clinic input] +unicode_converter + + a: unicode + / + +[clinic start generated code]*/ + +static PyObject * +unicode_converter_impl(PyObject *module, PyObject *a) +/*[clinic end generated code: output=1b4a4adbb6ac6e34 input=de7b5adbf07435ba]*/ +{ + if (!PyUnicode_Check(a)) { + PyErr_SetString(PyExc_AssertionError, + "argument a is not a unicode object"); + return NULL; + } + return pack_arguments_newref(1, a); +} + + +/*[clinic input] +bool_converter + + a: bool = True + b: bool(accept={object}) = True + c: bool(accept={int}) = True + / + +[clinic start generated code]*/ + +static PyObject * +bool_converter_impl(PyObject *module, int a, int b, int c) +/*[clinic end generated code: output=17005b0c29afd590 input=7f6537705b2f32f4]*/ +{ + PyObject *obj_a = a ? Py_True : Py_False; + PyObject *obj_b = b ? Py_True : Py_False; + PyObject *obj_c = c ? Py_True : Py_False; + return pack_arguments_newref(3, obj_a, obj_b, obj_c); +} + + +/*[clinic input] +char_converter + + a: char = b'A' + b: char = b'\a' + c: char = b'\b' + d: char = b'\t' + e: char = b'\n' + f: char = b'\v' + g: char = b'\f' + h: char = b'\r' + i: char = b'"' + j: char = b"'" + k: char = b'?' + l: char = b'\\' + m: char = b'\000' + n: char = b'\377' + / + +[clinic start generated code]*/ + +static PyObject * +char_converter_impl(PyObject *module, char a, char b, char c, char d, char e, + char f, char g, char h, char i, char j, char k, char l, + char m, char n) +/*[clinic end generated code: output=f929dbd2e55a9871 input=b601bc5bc7fe85e3]*/ +{ + RETURN_PACKED_ARGS(14, PyLong_FromUnsignedLong, unsigned char, + a, b, c, d, e, f, g, h, i, j, k, l, m, n); +} + + +/*[clinic input] +unsigned_char_converter + + a: unsigned_char = 12 + b: unsigned_char(bitwise=False) = 34 + c: unsigned_char(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_char_converter_impl(PyObject *module, unsigned char a, + unsigned char b, unsigned char c) +/*[clinic end generated code: output=490af3b39ce0b199 input=e859502fbe0b3185]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLong, unsigned char, a, b, c); +} + + +/*[clinic input] +short_converter + + a: short = 12 + / + +[clinic start generated code]*/ + +static PyObject * +short_converter_impl(PyObject *module, short a) +/*[clinic end generated code: output=1ebb7ddb64248988 input=b4e2309a66f650ae]*/ +{ + RETURN_PACKED_ARGS(1, PyLong_FromLong, long, a); +} + + +/*[clinic input] +unsigned_short_converter + + a: unsigned_short = 12 + b: unsigned_short(bitwise=False) = 34 + c: unsigned_short(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_short_converter_impl(PyObject *module, unsigned short a, + unsigned short b, unsigned short c) +/*[clinic end generated code: output=5f92cc72fc8707a7 input=9d15cd11e741d0c6]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLong, unsigned long, a, b, c); +} + + +/*[clinic input] +int_converter + + a: int = 12 + b: int(accept={int}) = 34 + c: int(accept={str}) = 45 + / + +[clinic start generated code]*/ + +static PyObject * +int_converter_impl(PyObject *module, int a, int b, int c) +/*[clinic end generated code: output=8e56b59be7d0c306 input=a1dbc6344853db7a]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromLong, long, a, b, c); +} + + +/*[clinic input] +unsigned_int_converter + + a: unsigned_int = 12 + b: unsigned_int(bitwise=False) = 34 + c: unsigned_int(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_int_converter_impl(PyObject *module, unsigned int a, unsigned int b, + unsigned int c) +/*[clinic end generated code: output=399a57a05c494cc7 input=8427ed9a3f96272d]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLong, unsigned long, a, b, c); +} + + +/*[clinic input] +long_converter + + a: long = 12 + / + +[clinic start generated code]*/ + +static PyObject * +long_converter_impl(PyObject *module, long a) +/*[clinic end generated code: output=9663d936a652707a input=84ad0ef28f24bd85]*/ +{ + RETURN_PACKED_ARGS(1, PyLong_FromLong, long, a); +} + + +/*[clinic input] +unsigned_long_converter + + a: unsigned_long = 12 + b: unsigned_long(bitwise=False) = 34 + c: unsigned_long(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_long_converter_impl(PyObject *module, unsigned long a, + unsigned long b, unsigned long c) +/*[clinic end generated code: output=120b82ea9ebd93a8 input=440dd6f1817f5d91]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLong, unsigned long, a, b, c); +} + + +/*[clinic input] +long_long_converter + + a: long_long = 12 + / + +[clinic start generated code]*/ + +static PyObject * +long_long_converter_impl(PyObject *module, long long a) +/*[clinic end generated code: output=5fb5f2220770c3e1 input=730fcb3eecf4d993]*/ +{ + RETURN_PACKED_ARGS(1, PyLong_FromLongLong, long long, a); +} + + +/*[clinic input] +unsigned_long_long_converter + + a: unsigned_long_long = 12 + b: unsigned_long_long(bitwise=False) = 34 + c: unsigned_long_long(bitwise=True) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +unsigned_long_long_converter_impl(PyObject *module, unsigned long long a, + unsigned long long b, unsigned long long c) +/*[clinic end generated code: output=65b7273e63501762 input=300737b0bdb230e9]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromUnsignedLongLong, unsigned long long, + a, b, c); +} + + +/*[clinic input] +py_ssize_t_converter + + a: Py_ssize_t = 12 + b: Py_ssize_t(accept={int}) = 34 + c: Py_ssize_t(accept={int, NoneType}) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +py_ssize_t_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, + Py_ssize_t c) +/*[clinic end generated code: output=ce252143e0ed0372 input=76d0f342e9317a1f]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromSsize_t, Py_ssize_t, a, b, c); +} + + +/*[clinic input] +slice_index_converter + + a: slice_index = 12 + b: slice_index(accept={int}) = 34 + c: slice_index(accept={int, NoneType}) = 56 + / + +[clinic start generated code]*/ + +static PyObject * +slice_index_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, + Py_ssize_t c) +/*[clinic end generated code: output=923c6cac77666a6b input=64f99f3f83265e47]*/ +{ + RETURN_PACKED_ARGS(3, PyLong_FromSsize_t, Py_ssize_t, a, b, c); +} + + +/*[clinic input] +size_t_converter + + a: size_t = 12 + / + +[clinic start generated code]*/ + +static PyObject * +size_t_converter_impl(PyObject *module, size_t a) +/*[clinic end generated code: output=412b5b7334ab444d input=83ae7d9171fbf208]*/ +{ + RETURN_PACKED_ARGS(1, PyLong_FromSize_t, size_t, a); +} + + +/*[clinic input] +float_converter + + a: float = 12.5 + / + +[clinic start generated code]*/ + +static PyObject * +float_converter_impl(PyObject *module, float a) +/*[clinic end generated code: output=1c98f64f2cf1d55c input=a625b59ad68047d8]*/ +{ + RETURN_PACKED_ARGS(1, PyFloat_FromDouble, double, a); +} + + +/*[clinic input] +double_converter + + a: double = 12.5 + / + +[clinic start generated code]*/ + +static PyObject * +double_converter_impl(PyObject *module, double a) +/*[clinic end generated code: output=a4e8532d284d035d input=098df188f24e7c62]*/ +{ + RETURN_PACKED_ARGS(1, PyFloat_FromDouble, double, a); +} + + +/*[clinic input] +py_complex_converter + + a: Py_complex + / + +[clinic start generated code]*/ + +static PyObject * +py_complex_converter_impl(PyObject *module, Py_complex a) +/*[clinic end generated code: output=9e6ca2eb53b14846 input=e9148a8ca1dbf195]*/ +{ + RETURN_PACKED_ARGS(1, PyComplex_FromCComplex, Py_complex, a); +} + + +/*[clinic input] +str_converter + + a: str = "a" + b: str(accept={robuffer}) = "b" + c: str(accept={robuffer, str}, zeroes=True) = "c" + / + +[clinic start generated code]*/ + +static PyObject * +str_converter_impl(PyObject *module, const char *a, const char *b, + const char *c, Py_ssize_t c_length) +/*[clinic end generated code: output=475bea40548c8cd6 input=bff2656c92ee25de]*/ +{ + assert(!PyErr_Occurred()); + PyObject *out[3] = {NULL,}; + int i = 0; + PyObject *arg; + + arg = PyUnicode_FromString(a); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + arg = PyUnicode_FromString(b); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + arg = PyUnicode_FromStringAndSize(c, c_length); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + PyObject *tuple = PyTuple_New(3); + if (!tuple) { + goto error; + } + for (int j = 0; j < 3; j++) { + PyTuple_SET_ITEM(tuple, j, out[j]); + } + return tuple; + +error: + for (int j = 0; j < i; j++) { + Py_DECREF(out[j]); + } + return NULL; +} + + +/*[clinic input] +str_converter_encoding + + a: str(encoding="idna") + b: str(encoding="idna", accept={bytes, bytearray, str}) + c: str(encoding="idna", accept={bytes, bytearray, str}, zeroes=True) + / + +[clinic start generated code]*/ + +static PyObject * +str_converter_encoding_impl(PyObject *module, char *a, char *b, char *c, + Py_ssize_t c_length) +/*[clinic end generated code: output=af68766049248a1c input=0c5cf5159d0e870d]*/ +{ + assert(!PyErr_Occurred()); + PyObject *out[3] = {NULL,}; + int i = 0; + PyObject *arg; + + arg = PyUnicode_FromString(a); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + arg = PyUnicode_FromString(b); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + arg = PyUnicode_FromStringAndSize(c, c_length); + assert(arg || PyErr_Occurred()); + if (!arg) { + goto error; + } + out[i++] = arg; + + PyObject *tuple = PyTuple_New(3); + if (!tuple) { + goto error; + } + for (int j = 0; j < 3; j++) { + PyTuple_SET_ITEM(tuple, j, out[j]); + } + return tuple; + +error: + for (int j = 0; j < i; j++) { + Py_DECREF(out[j]); + } + return NULL; +} + + +static PyObject * +bytes_from_buffer(Py_buffer *buf) +{ + PyObject *bytes_obj = PyBytes_FromStringAndSize(NULL, buf->len); + if (!bytes_obj) { + return NULL; + } + void *bytes_obj_buf = ((PyBytesObject *)bytes_obj)->ob_sval; + if (PyBuffer_ToContiguous(bytes_obj_buf, buf, buf->len, 'C') < 0) { + Py_DECREF(bytes_obj); + return NULL; + } + return bytes_obj; +} + +/*[clinic input] +py_buffer_converter + + a: Py_buffer(accept={str, buffer, NoneType}) + b: Py_buffer(accept={rwbuffer}) + / + +[clinic start generated code]*/ + +static PyObject * +py_buffer_converter_impl(PyObject *module, Py_buffer *a, Py_buffer *b) +/*[clinic end generated code: output=52fb13311e3d6d03 input=775de727de5c7421]*/ +{ + RETURN_PACKED_ARGS(2, bytes_from_buffer, Py_buffer *, a, b); +} + + +/*[clinic input] +keywords + + a: object + b: object + +[clinic start generated code]*/ + +static PyObject * +keywords_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=850aaed53e26729e input=f44b89e718c1a93b]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +keywords_kwonly + + a: object + * + b: object + +[clinic start generated code]*/ + +static PyObject * +keywords_kwonly_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=a45c48241da584dc input=1f08e39c3312b015]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +keywords_opt + + a: object + b: object = None + c: object = None + +[clinic start generated code]*/ + +static PyObject * +keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, PyObject *c) +/*[clinic end generated code: output=25e4b67d91c76a66 input=b0ba0e4f04904556]*/ +{ + return pack_arguments_newref(3, a, b, c); +} + + +/*[clinic input] +keywords_opt_kwonly + + a: object + b: object = None + * + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +keywords_opt_kwonly_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=6aa5b655a6e9aeb0 input=f79da689d6c51076]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +keywords_kwonly_opt + + a: object + * + b: object = None + c: object = None + +[clinic start generated code]*/ + +static PyObject * +keywords_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c) +/*[clinic end generated code: output=707f78eb0f55c2b1 input=e0fa1a0e46dca791]*/ +{ + return pack_arguments_newref(3, a, b, c); +} + + +/*[clinic input] +posonly_keywords + + a: object + / + b: object + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=6ac88f4a5f0bfc8d input=fde0a2f79fe82b06]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +posonly_kwonly + + a: object + / + * + b: object + +[clinic start generated code]*/ + +static PyObject * +posonly_kwonly_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=483e6790d3482185 input=78b3712768da9a19]*/ +{ + return pack_arguments_newref(2, a, b); +} + + +/*[clinic input] +posonly_keywords_kwonly + + a: object + / + b: object + * + c: object + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_kwonly_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c) +/*[clinic end generated code: output=2fae573e8cc3fad8 input=a1ad5d2295eb803c]*/ +{ + return pack_arguments_newref(3, a, b, c); +} + + +/*[clinic input] +posonly_keywords_opt + + a: object + / + b: object + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=f5eb66241bcf68fb input=51c10de2a120e279]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +posonly_opt_keywords_opt + + a: object + b: object = None + / + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_opt_keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=d54a30e549296ffd input=f408a1de7dfaf31f]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +posonly_kwonly_opt + + a: object + / + * + b: object + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=a20503fe36b4fd62 input=3494253975272f52]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +posonly_opt_kwonly_opt + + a: object + b: object = None + / + * + c: object = None + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_opt_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=64f3204a3a0413b6 input=d17516581e478412]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +posonly_keywords_kwonly_opt + + a: object + / + b: object + * + c: object + d: object = None + e: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d, PyObject *e) +/*[clinic end generated code: output=dbd7e7ddd6257fa0 input=33529f29e97e5adb]*/ +{ + return pack_arguments_newref(5, a, b, c, d, e); +} + + +/*[clinic input] +posonly_keywords_opt_kwonly_opt + + a: object + / + b: object + c: object = None + * + d: object = None + e: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_keywords_opt_kwonly_opt_impl(PyObject *module, PyObject *a, + PyObject *b, PyObject *c, PyObject *d, + PyObject *e) +/*[clinic end generated code: output=775d12ae44653045 input=4d4cc62f11441301]*/ +{ + return pack_arguments_newref(5, a, b, c, d, e); +} + + +/*[clinic input] +posonly_opt_keywords_opt_kwonly_opt + + a: object + b: object = None + / + c: object = None + * + d: object = None + +[clinic start generated code]*/ + +static PyObject * +posonly_opt_keywords_opt_kwonly_opt_impl(PyObject *module, PyObject *a, + PyObject *b, PyObject *c, + PyObject *d) +/*[clinic end generated code: output=40c6dc422591eade input=3964960a68622431]*/ +{ + return pack_arguments_newref(4, a, b, c, d); +} + + +/*[clinic input] +keyword_only_parameter + + * + a: object + +[clinic start generated code]*/ + +static PyObject * +keyword_only_parameter_impl(PyObject *module, PyObject *a) +/*[clinic end generated code: output=c454b6ce98232787 input=8d2868b8d0b27bdb]*/ +{ + return pack_arguments_newref(1, a); +} + + +/*[clinic input] +posonly_vararg + + a: object + / + b: object + *args: object + +[clinic start generated code]*/ + +static PyObject * +posonly_vararg_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *args) +/*[clinic end generated code: output=ee6713acda6b954e input=783427fe7ec2b67a]*/ +{ + return pack_arguments_newref(3, a, b, args); +} + + +/*[clinic input] +vararg_and_posonly + + a: object + *args: object + / + +[clinic start generated code]*/ + +static PyObject * +vararg_and_posonly_impl(PyObject *module, PyObject *a, PyObject *args) +/*[clinic end generated code: output=42792f799465a14d input=defe017b19ba52e8]*/ +{ + return pack_arguments_newref(2, a, args); +} + + +/*[clinic input] +vararg + + a: object + *args: object + +[clinic start generated code]*/ + +static PyObject * +vararg_impl(PyObject *module, PyObject *a, PyObject *args) +/*[clinic end generated code: output=91ab7a0efc52dd5e input=02c0f772d05f591e]*/ +{ + return pack_arguments_newref(2, a, args); +} + + +/*[clinic input] +vararg_with_default + + a: object + *args: object + b: bool = False + +[clinic start generated code]*/ + +static PyObject * +vararg_with_default_impl(PyObject *module, PyObject *a, PyObject *args, + int b) +/*[clinic end generated code: output=182c01035958ce92 input=68cafa6a79f89e36]*/ +{ + PyObject *obj_b = b ? Py_True : Py_False; + return pack_arguments_newref(3, a, args, obj_b); +} + + +/*[clinic input] +vararg_with_only_defaults + + *args: object + b: object = None + +[clinic start generated code]*/ + +static PyObject * +vararg_with_only_defaults_impl(PyObject *module, PyObject *args, PyObject *b) +/*[clinic end generated code: output=c06b1826d91f2f7b input=678c069bc67550e1]*/ +{ + return pack_arguments_newref(2, args, b); +} + + + +/*[clinic input] +gh_32092_oob + + pos1: object + pos2: object + *varargs: object + kw1: object = None + kw2: object = None + +Proof-of-concept of GH-32092 OOB bug. + +[clinic start generated code]*/ + +static PyObject * +gh_32092_oob_impl(PyObject *module, PyObject *pos1, PyObject *pos2, + PyObject *varargs, PyObject *kw1, PyObject *kw2) +/*[clinic end generated code: output=ee259c130054653f input=46d15c881608f8ff]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +gh_32092_kw_pass + + pos: object + *args: object + kw: object = None + +Proof-of-concept of GH-32092 keyword args passing bug. + +[clinic start generated code]*/ + +static PyObject * +gh_32092_kw_pass_impl(PyObject *module, PyObject *pos, PyObject *args, + PyObject *kw) +/*[clinic end generated code: output=4a2bbe4f7c8604e9 input=5c0bd5b9079a0cce]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +gh_99233_refcount + + *args: object + / + +Proof-of-concept of GH-99233 refcount error bug. + +[clinic start generated code]*/ + +static PyObject * +gh_99233_refcount_impl(PyObject *module, PyObject *args) +/*[clinic end generated code: output=585855abfbca9a7f input=85f5fb47ac91a626]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +gh_99240_double_free + + a: str(encoding="idna") + b: str(encoding="idna") + / + +Proof-of-concept of GH-99240 double-free bug. + +[clinic start generated code]*/ + +static PyObject * +gh_99240_double_free_impl(PyObject *module, char *a, char *b) +/*[clinic end generated code: output=586dc714992fe2ed input=23db44aa91870fc7]*/ +{ + Py_RETURN_NONE; +} + + +static PyMethodDef tester_methods[] = { + TEST_EMPTY_FUNCTION_METHODDEF + OBJECTS_CONVERTER_METHODDEF + BYTES_OBJECT_CONVERTER_METHODDEF + BYTE_ARRAY_OBJECT_CONVERTER_METHODDEF + UNICODE_CONVERTER_METHODDEF + BOOL_CONVERTER_METHODDEF + CHAR_CONVERTER_METHODDEF + UNSIGNED_CHAR_CONVERTER_METHODDEF + SHORT_CONVERTER_METHODDEF + UNSIGNED_SHORT_CONVERTER_METHODDEF + INT_CONVERTER_METHODDEF + UNSIGNED_INT_CONVERTER_METHODDEF + LONG_CONVERTER_METHODDEF + UNSIGNED_LONG_CONVERTER_METHODDEF + LONG_LONG_CONVERTER_METHODDEF + UNSIGNED_LONG_LONG_CONVERTER_METHODDEF + PY_SSIZE_T_CONVERTER_METHODDEF + SLICE_INDEX_CONVERTER_METHODDEF + SIZE_T_CONVERTER_METHODDEF + FLOAT_CONVERTER_METHODDEF + DOUBLE_CONVERTER_METHODDEF + PY_COMPLEX_CONVERTER_METHODDEF + STR_CONVERTER_METHODDEF + STR_CONVERTER_ENCODING_METHODDEF + PY_BUFFER_CONVERTER_METHODDEF + KEYWORDS_METHODDEF + KEYWORDS_KWONLY_METHODDEF + KEYWORDS_OPT_METHODDEF + KEYWORDS_OPT_KWONLY_METHODDEF + KEYWORDS_KWONLY_OPT_METHODDEF + POSONLY_KEYWORDS_METHODDEF + POSONLY_KWONLY_METHODDEF + POSONLY_KEYWORDS_KWONLY_METHODDEF + POSONLY_KEYWORDS_OPT_METHODDEF + POSONLY_OPT_KEYWORDS_OPT_METHODDEF + POSONLY_KWONLY_OPT_METHODDEF + POSONLY_OPT_KWONLY_OPT_METHODDEF + POSONLY_KEYWORDS_KWONLY_OPT_METHODDEF + POSONLY_KEYWORDS_OPT_KWONLY_OPT_METHODDEF + POSONLY_OPT_KEYWORDS_OPT_KWONLY_OPT_METHODDEF + KEYWORD_ONLY_PARAMETER_METHODDEF + POSONLY_VARARG_METHODDEF + VARARG_AND_POSONLY_METHODDEF + VARARG_METHODDEF + VARARG_WITH_DEFAULT_METHODDEF + VARARG_WITH_ONLY_DEFAULTS_METHODDEF + GH_32092_OOB_METHODDEF + GH_32092_KW_PASS_METHODDEF + GH_99233_REFCOUNT_METHODDEF + GH_99240_DOUBLE_FREE_METHODDEF + {NULL, NULL} +}; + +static struct PyModuleDef _testclinic_module = { + PyModuleDef_HEAD_INIT, + .m_name = "_testclinic", + .m_size = 0, + .m_methods = tester_methods, +}; + +PyMODINIT_FUNC +PyInit__testclinic(void) +{ + return PyModule_Create(&_testclinic_module); +} + +#undef RETURN_PACKED_ARGS diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 5724bd5f200f4c..b14b8ac3c74054 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -14,7 +14,7 @@ #include "Python.h" #include "pycore_atomic_funcs.h" // _Py_atomic_int_get() #include "pycore_bitutils.h" // _Py_bswap32() -#include "pycore_compile.h" // _PyCompile_OptimizeCfg() +#include "pycore_compile.h" // _PyCompile_CodeGen, _PyCompile_OptimizeCfg #include "pycore_fileutils.h" // _Py_normpath #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_gc.h" // PyGC_Head @@ -523,13 +523,31 @@ set_eval_frame_record(PyObject *self, PyObject *list) PyErr_SetString(PyExc_TypeError, "argument must be a list"); return NULL; } - Py_CLEAR(record_list); - Py_INCREF(list); - record_list = list; + Py_XSETREF(record_list, Py_NewRef(list)); _PyInterpreterState_SetEvalFrameFunc(PyInterpreterState_Get(), record_eval); Py_RETURN_NONE; } +/*[clinic input] + +_testinternalcapi.compiler_codegen -> object + + ast: object + filename: object + optimize: int + +Apply compiler code generation to an AST. +[clinic start generated code]*/ + +static PyObject * +_testinternalcapi_compiler_codegen_impl(PyObject *module, PyObject *ast, + PyObject *filename, int optimize) +/*[clinic end generated code: output=fbbbbfb34700c804 input=e9fbe6562f7f75e4]*/ +{ + PyCompilerFlags *flags = NULL; + return _PyCompile_CodeGen(ast, filename, flags, optimize); +} + /*[clinic input] @@ -550,6 +568,51 @@ _testinternalcapi_optimize_cfg_impl(PyObject *module, PyObject *instructions, } +static PyObject * +get_interp_settings(PyObject *self, PyObject *args) +{ + int interpid = -1; + if (!PyArg_ParseTuple(args, "|i:get_interp_settings", &interpid)) { + return NULL; + } + + PyInterpreterState *interp = NULL; + if (interpid < 0) { + PyThreadState *tstate = _PyThreadState_GET(); + interp = tstate ? tstate->interp : _PyInterpreterState_Main(); + } + else if (interpid == 0) { + interp = _PyInterpreterState_Main(); + } + else { + PyErr_Format(PyExc_NotImplementedError, + "%zd", interpid); + return NULL; + } + assert(interp != NULL); + + PyObject *settings = PyDict_New(); + if (settings == NULL) { + return NULL; + } + + /* Add the feature flags. */ + PyObject *flags = PyLong_FromUnsignedLong(interp->feature_flags); + if (flags == NULL) { + Py_DECREF(settings); + return NULL; + } + int res = PyDict_SetItemString(settings, "feature_flags", flags); + Py_DECREF(flags); + if (res != 0) { + Py_DECREF(settings); + return NULL; + } + + return settings; +} + + static PyMethodDef TestMethods[] = { {"get_configs", get_configs, METH_NOARGS}, {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, @@ -568,7 +631,9 @@ static PyMethodDef TestMethods[] = { {"DecodeLocaleEx", decode_locale_ex, METH_VARARGS}, {"set_eval_frame_default", set_eval_frame_default, METH_NOARGS, NULL}, {"set_eval_frame_record", set_eval_frame_record, METH_O, NULL}, + _TESTINTERNALCAPI_COMPILER_CODEGEN_METHODDEF _TESTINTERNALCAPI_OPTIMIZE_CFG_METHODDEF + {"get_interp_settings", get_interp_settings, METH_VARARGS, NULL}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index b8993a29ae95df..e34854f7025798 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -57,8 +57,7 @@ Example_demo(ExampleObject *self, PyObject *args) if (!PyArg_ParseTuple(args, "|O:demo", &o)) return NULL; if (o != NULL && PyUnicode_Check(o)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } Py_RETURN_NONE; } @@ -77,8 +76,7 @@ Example_getattro(ExampleObject *self, PyObject *name) if (self->x_attr != NULL) { PyObject *v = PyDict_GetItemWithError(self->x_attr, name); if (v != NULL) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; @@ -151,8 +149,7 @@ _testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject * return NULL; } assert(PyType_GetModuleByDef(Py_TYPE(self), &def_meth_state_access) == retval); - Py_INCREF(retval); - return retval; + return Py_NewRef(retval); } /*[clinic input] diff --git a/Modules/_testsinglephase.c b/Modules/_testsinglephase.c new file mode 100644 index 00000000000000..3bfe159e54fe49 --- /dev/null +++ b/Modules/_testsinglephase.c @@ -0,0 +1,78 @@ + +/* Testing module for single-phase initialization of extension modules + */ +#ifndef Py_BUILD_CORE_BUILTIN +# define Py_BUILD_CORE_MODULE 1 +#endif + +#include "Python.h" +#include "pycore_namespace.h" // _PyNamespace_New() + + +/* Function of two integers returning integer */ + +PyDoc_STRVAR(testexport_foo_doc, +"foo(i,j)\n\ +\n\ +Return the sum of i and j."); + +static PyObject * +testexport_foo(PyObject *self, PyObject *args) +{ + long i, j; + long res; + if (!PyArg_ParseTuple(args, "ll:foo", &i, &j)) + return NULL; + res = i + j; + return PyLong_FromLong(res); +} + + +static PyMethodDef TestMethods[] = { + {"foo", testexport_foo, METH_VARARGS, + testexport_foo_doc}, + {NULL, NULL} /* sentinel */ +}; + + +static struct PyModuleDef _testsinglephase = { + PyModuleDef_HEAD_INIT, + .m_name = "_testsinglephase", + .m_doc = PyDoc_STR("Test module _testsinglephase (main)"), + .m_size = -1, // no module state + .m_methods = TestMethods, +}; + + +PyMODINIT_FUNC +PyInit__testsinglephase(void) +{ + PyObject *module = PyModule_Create(&_testsinglephase); + if (module == NULL) { + return NULL; + } + + /* Add an exception type */ + PyObject *temp = PyErr_NewException("_testsinglephase.error", NULL, NULL); + if (temp == NULL) { + goto error; + } + if (PyModule_AddObject(module, "error", temp) != 0) { + Py_DECREF(temp); + goto error; + } + + if (PyModule_AddIntConstant(module, "int_const", 1969) != 0) { + goto error; + } + + if (PyModule_AddStringConstant(module, "str_const", "something different") != 0) { + goto error; + } + + return module; + +error: + Py_DECREF(module); + return NULL; +} diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index 4ac90dc8068934..ec8b6d881124da 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -1102,6 +1102,24 @@ thread_run(void *boot_raw) // to open the libgcc_s.so library (ex: EMFILE error). } +static PyObject * +thread_daemon_threads_allowed(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + PyInterpreterState *interp = _PyInterpreterState_Get(); + if (interp->feature_flags & Py_RTFLAGS_DAEMON_THREADS) { + Py_RETURN_TRUE; + } + else { + Py_RETURN_FALSE; + } +} + +PyDoc_STRVAR(daemon_threads_allowed_doc, +"daemon_threads_allowed()\n\ +\n\ +Return True if daemon threads are allowed in the current interpreter,\n\ +and False otherwise.\n"); + static PyObject * thread_PyThread_start_new_thread(PyObject *self, PyObject *fargs) { @@ -1127,8 +1145,13 @@ thread_PyThread_start_new_thread(PyObject *self, PyObject *fargs) return NULL; } + if (PySys_Audit("_thread.start_new_thread", "OOO", + func, args, kwargs ? kwargs : Py_None) < 0) { + return NULL; + } + PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->config._isolated_interpreter) { + if (!_PyInterpreterState_HasFeature(interp, Py_RTFLAGS_THREADS)) { PyErr_SetString(PyExc_RuntimeError, "thread is not supported for isolated subinterpreters"); return NULL; @@ -1142,7 +1165,10 @@ thread_PyThread_start_new_thread(PyObject *self, PyObject *fargs) boot->tstate = _PyThreadState_Prealloc(boot->interp); if (boot->tstate == NULL) { PyMem_Free(boot); - return PyErr_NoMemory(); + if (!PyErr_Occurred()) { + return PyErr_NoMemory(); + } + return NULL; } boot->runtime = runtime; boot->func = Py_NewRef(func); @@ -1543,6 +1569,8 @@ static PyMethodDef thread_methods[] = { METH_VARARGS, start_new_doc}, {"start_new", (PyCFunction)thread_PyThread_start_new_thread, METH_VARARGS, start_new_doc}, + {"daemon_threads_allowed", (PyCFunction)thread_daemon_threads_allowed, + METH_NOARGS, daemon_threads_allowed_doc}, {"allocate_lock", thread_PyThread_allocate_lock, METH_NOARGS, allocate_doc}, {"allocate", thread_PyThread_allocate_lock, diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index 4807ad59f6da2a..93d4474f65d62c 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -784,16 +784,14 @@ PyTclObject_string(PyTclObject *self, void *ignored) if (!self->string) return NULL; } - Py_INCREF(self->string); - return self->string; + return Py_NewRef(self->string); } static PyObject * PyTclObject_str(PyTclObject *self) { if (self->string) { - Py_INCREF(self->string); - return self->string; + return Py_NewRef(self->string); } /* XXX Could cache result if it is non-ASCII. */ return unicodeFromTclObj(self->value); @@ -1107,8 +1105,7 @@ fromBignumObj(TkappObject *tkapp, Tcl_Obj *value) PyMem_Free(bytes); if (res != NULL && bigValue.sign == MP_NEG) { PyObject *res2 = PyNumber_Negative(res); - Py_DECREF(res); - res = res2; + Py_SETREF(res, res2); } mp_clear(&bigValue); return res; @@ -1736,8 +1733,7 @@ SetVar(TkappObject *self, PyObject *args, int flags) if (!ok) Tkinter_Error(self); else { - res = Py_None; - Py_INCREF(res); + res = Py_NewRef(Py_None); } LEAVE_OVERLAP_TCL break; @@ -1755,8 +1751,7 @@ SetVar(TkappObject *self, PyObject *args, int flags) if (!ok) Tkinter_Error(self); else { - res = Py_None; - Py_INCREF(res); + res = Py_NewRef(Py_None); } LEAVE_OVERLAP_TCL break; @@ -1842,8 +1837,7 @@ UnsetVar(TkappObject *self, PyObject *args, int flags) if (code == TCL_ERROR) res = Tkinter_Error(self); else { - Py_INCREF(Py_None); - res = Py_None; + res = Py_NewRef(Py_None); } LEAVE_OVERLAP_TCL return res; @@ -1883,8 +1877,7 @@ _tkinter_tkapp_getint(TkappObject *self, PyObject *arg) PyObject *result; if (PyLong_Check(arg)) { - Py_INCREF(arg); - return arg; + return Py_NewRef(arg); } if (PyTclObject_Check(arg)) { @@ -1928,8 +1921,7 @@ _tkinter_tkapp_getdouble(TkappObject *self, PyObject *arg) double v; if (PyFloat_Check(arg)) { - Py_INCREF(arg); - return arg; + return Py_NewRef(arg); } if (PyNumber_Check(arg)) { @@ -2145,8 +2137,7 @@ _tkinter_tkapp_splitlist(TkappObject *self, PyObject *arg) return v; } if (PyTuple_Check(arg)) { - Py_INCREF(arg); - return arg; + return Py_NewRef(arg); } if (PyList_Check(arg)) { return PySequence_Tuple(arg); @@ -2172,8 +2163,7 @@ _tkinter_tkapp_splitlist(TkappObject *self, PyObject *arg) for (i = 0; i < argc; i++) { PyObject *s = unicodeFromTclString(argv[i]); if (!s) { - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); goto finally; } PyTuple_SET_ITEM(v, i, s); @@ -2322,10 +2312,8 @@ _tkinter_tkapp_createcommand_impl(TkappObject *self, const char *name, data = PyMem_NEW(PythonCmd_ClientData, 1); if (!data) return PyErr_NoMemory(); - Py_INCREF(self); - Py_INCREF(func); - data->self = (PyObject *) self; - data->func = func; + data->self = Py_NewRef(self); + data->func = Py_NewRef(func); if (self->threaded && self->thread_id != Tcl_GetCurrentThread()) { Tcl_Condition cond = NULL; CommandEvent *ev = (CommandEvent*)attemptckalloc(sizeof(CommandEvent)); @@ -2430,10 +2418,8 @@ NewFHCD(PyObject *func, PyObject *file, int id) FileHandler_ClientData *p; p = PyMem_NEW(FileHandler_ClientData, 1); if (p != NULL) { - Py_XINCREF(func); - Py_XINCREF(file); - p->func = func; - p->file = file; + p->func = Py_XNewRef(func); + p->file = Py_XNewRef(file); p->id = id; p->next = HeadFHCD; HeadFHCD = p; @@ -2591,13 +2577,11 @@ Tktt_New(PyObject *func) if (v == NULL) return NULL; - Py_INCREF(func); v->token = NULL; - v->func = func; + v->func = Py_NewRef(func); /* Extra reference, deleted when called or when handler is deleted */ - Py_INCREF(v); - return v; + return (TkttObject*)Py_NewRef(v); } static void @@ -2940,9 +2924,8 @@ _flatten1(FlattenContext* context, PyObject* item, int depth) if (context->size + 1 > context->maxsize && !_bump(context, 1)) return 0; - Py_INCREF(o); PyTuple_SET_ITEM(context->tuple, - context->size++, o); + context->size++, Py_NewRef(o)); } } } else { @@ -3266,8 +3249,7 @@ PyInit__tkinter(void) Py_DECREF(m); return NULL; } - Py_INCREF(o); - if (PyModule_AddObject(m, "TclError", o)) { + if (PyModule_AddObject(m, "TclError", Py_NewRef(o))) { Py_DECREF(o); Py_DECREF(m); return NULL; diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 44a1f7b673c0eb..0d70f0cf34c8d6 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -16,6 +16,8 @@ module _tracemalloc [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=708a98302fc46e5f]*/ +#define tracemalloc_config _PyRuntime.tracemalloc.config + _Py_DECLARE_STR(anon_unknown, ""); /* Trace memory blocks allocated by PyMem_RawMalloc() */ @@ -347,8 +349,8 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) else { /* tracemalloc_filenames is responsible to keep a reference to the filename */ - Py_INCREF(filename); - if (_Py_hashtable_set(tracemalloc_filenames, filename, NULL) < 0) { + if (_Py_hashtable_set(tracemalloc_filenames, Py_NewRef(filename), + NULL) < 0) { Py_DECREF(filename); #ifdef TRACE_DEBUG tracemalloc_error("failed to intern the filename"); @@ -407,7 +409,7 @@ traceback_get_frames(traceback_t *traceback) if (pyframe == NULL) { break; } - if (traceback->nframe < _Py_tracemalloc_config.max_nframe) { + if (traceback->nframe < tracemalloc_config.max_nframe) { tracemalloc_get_frame(pyframe, &traceback->frames[traceback->nframe]); assert(traceback->frames[traceback->nframe].filename != NULL); traceback->nframe++; @@ -505,7 +507,7 @@ tracemalloc_get_traces_table(unsigned int domain) static void tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) { - assert(_Py_tracemalloc_config.tracing); + assert(tracemalloc_config.tracing); _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); if (!traces) { @@ -529,7 +531,7 @@ static int tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, size_t size) { - assert(_Py_tracemalloc_config.tracing); + assert(tracemalloc_config.tracing); traceback_t *traceback = traceback_new(); if (traceback == NULL) { @@ -863,13 +865,13 @@ tracemalloc_clear_traces(void) static int tracemalloc_init(void) { - if (_Py_tracemalloc_config.initialized == TRACEMALLOC_FINALIZED) { + if (tracemalloc_config.initialized == TRACEMALLOC_FINALIZED) { PyErr_SetString(PyExc_RuntimeError, "the tracemalloc module has been unloaded"); return -1; } - if (_Py_tracemalloc_config.initialized == TRACEMALLOC_INITIALIZED) + if (tracemalloc_config.initialized == TRACEMALLOC_INITIALIZED) return 0; PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &allocators.raw); @@ -919,7 +921,7 @@ tracemalloc_init(void) tracemalloc_empty_traceback.frames[0].lineno = 0; tracemalloc_empty_traceback.hash = traceback_hash(&tracemalloc_empty_traceback); - _Py_tracemalloc_config.initialized = TRACEMALLOC_INITIALIZED; + tracemalloc_config.initialized = TRACEMALLOC_INITIALIZED; return 0; } @@ -927,9 +929,9 @@ tracemalloc_init(void) static void tracemalloc_deinit(void) { - if (_Py_tracemalloc_config.initialized != TRACEMALLOC_INITIALIZED) + if (tracemalloc_config.initialized != TRACEMALLOC_INITIALIZED) return; - _Py_tracemalloc_config.initialized = TRACEMALLOC_FINALIZED; + tracemalloc_config.initialized = TRACEMALLOC_FINALIZED; tracemalloc_stop(); @@ -969,12 +971,12 @@ tracemalloc_start(int max_nframe) return -1; } - if (_Py_tracemalloc_config.tracing) { + if (tracemalloc_config.tracing) { /* hook already installed: do nothing */ return 0; } - _Py_tracemalloc_config.max_nframe = max_nframe; + tracemalloc_config.max_nframe = max_nframe; /* allocate a buffer to store a new traceback */ size = TRACEBACK_SIZE(max_nframe); @@ -1010,7 +1012,7 @@ tracemalloc_start(int max_nframe) PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); /* everything is ready: start tracing Python memory allocations */ - _Py_tracemalloc_config.tracing = 1; + tracemalloc_config.tracing = 1; return 0; } @@ -1019,11 +1021,11 @@ tracemalloc_start(int max_nframe) static void tracemalloc_stop(void) { - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) return; /* stop tracing Python memory allocations */ - _Py_tracemalloc_config.tracing = 0; + tracemalloc_config.tracing = 0; /* unregister the hook on memory allocators */ #ifdef TRACE_RAW_MALLOC @@ -1051,7 +1053,7 @@ static PyObject * _tracemalloc_is_tracing_impl(PyObject *module) /*[clinic end generated code: output=2d763b42601cd3ef input=af104b0a00192f63]*/ { - return PyBool_FromLong(_Py_tracemalloc_config.tracing); + return PyBool_FromLong(tracemalloc_config.tracing); } @@ -1065,7 +1067,7 @@ static PyObject * _tracemalloc_clear_traces_impl(PyObject *module) /*[clinic end generated code: output=a86080ee41b84197 input=0dab5b6c785183a5]*/ { - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) Py_RETURN_NONE; set_reentrant(1); @@ -1085,8 +1087,7 @@ frame_to_pyobject(frame_t *frame) if (frame_obj == NULL) return NULL; - Py_INCREF(frame->filename); - PyTuple_SET_ITEM(frame_obj, 0, frame->filename); + PyTuple_SET_ITEM(frame_obj, 0, Py_NewRef(frame->filename)); lineno_obj = PyLong_FromUnsignedLong(frame->lineno); if (lineno_obj == NULL) { @@ -1107,8 +1108,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) if (intern_table != NULL) { frames = _Py_hashtable_get(intern_table, (const void *)traceback); if (frames) { - Py_INCREF(frames); - return frames; + return Py_NewRef(frames); } } @@ -1347,7 +1347,7 @@ _tracemalloc__get_traces_impl(PyObject *module) if (get_traces.list == NULL) goto error; - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) return get_traces.list; /* the traceback hash table is used temporarily to intern traceback tuple @@ -1420,7 +1420,7 @@ static traceback_t* tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) { - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) return NULL; trace_t *trace; @@ -1500,7 +1500,7 @@ _PyMem_DumpTraceback(int fd, const void *ptr) traceback_t *traceback; int i; - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { PUTS(fd, "Enable tracemalloc to get the memory block " "allocation traceback\n\n"); return; @@ -1574,7 +1574,7 @@ static PyObject * _tracemalloc_get_traceback_limit_impl(PyObject *module) /*[clinic end generated code: output=d556d9306ba95567 input=da3cd977fc68ae3b]*/ { - return PyLong_FromLong(_Py_tracemalloc_config.max_nframe); + return PyLong_FromLong(tracemalloc_config.max_nframe); } @@ -1632,7 +1632,7 @@ _tracemalloc_get_traced_memory_impl(PyObject *module) { Py_ssize_t size, peak_size; - if (!_Py_tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) return Py_BuildValue("ii", 0, 0); TABLES_LOCK(); @@ -1656,7 +1656,7 @@ static PyObject * _tracemalloc_reset_peak_impl(PyObject *module) /*[clinic end generated code: output=140c2870f691dbb2 input=18afd0635066e9ce]*/ { - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { Py_RETURN_NONE; } @@ -1737,7 +1737,7 @@ PyTraceMalloc_Track(unsigned int domain, uintptr_t ptr, int res; PyGILState_STATE gil_state; - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { /* tracemalloc is not tracing: do nothing */ return -2; } @@ -1756,7 +1756,7 @@ PyTraceMalloc_Track(unsigned int domain, uintptr_t ptr, int PyTraceMalloc_Untrack(unsigned int domain, uintptr_t ptr) { - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { /* tracemalloc is not tracing: do nothing */ return -2; } @@ -1779,7 +1779,7 @@ _PyTraceMalloc_NewReference(PyObject *op) { assert(PyGILState_Check()); - if (!_Py_tracemalloc_config.tracing) { + if (!tracemalloc_config.tracing) { /* tracemalloc is not tracing: do nothing */ return -1; } diff --git a/Modules/_typingmodule.c b/Modules/_typingmodule.c index 8b6faa646d6187..262dddb63fd5fe 100644 --- a/Modules/_typingmodule.c +++ b/Modules/_typingmodule.c @@ -23,8 +23,7 @@ static PyObject * _typing__idfunc(PyObject *module, PyObject *x) /*[clinic end generated code: output=63c38be4a6ec5f2c input=49f17284b43de451]*/ { - Py_INCREF(x); - return x; + return Py_NewRef(x); } diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 4845b4e6d4ad7c..bb4514c36bc7d0 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -291,8 +291,7 @@ _winapi_Overlapped_getbuffer_impl(OverlappedObject *self) return NULL; } res = self->read_buffer ? self->read_buffer : Py_None; - Py_INCREF(res); - return res; + return Py_NewRef(res); } /*[clinic input] @@ -1089,14 +1088,6 @@ _winapi_CreateProcess_impl(PyObject *module, return NULL; } - PyInterpreterState *interp = PyInterpreterState_Get(); - const PyConfig *config = _PyInterpreterState_GetConfig(interp); - if (config->_isolated_interpreter) { - PyErr_SetString(PyExc_RuntimeError, - "subprocess not supported for isolated subinterpreters"); - return NULL; - } - ZeroMemory(&si, sizeof(si)); si.StartupInfo.cb = sizeof(si); @@ -1402,6 +1393,30 @@ _winapi_MapViewOfFile_impl(PyObject *module, HANDLE file_map, return address; } +/*[clinic input] +_winapi.UnmapViewOfFile + + address: LPCVOID + / +[clinic start generated code]*/ + +static PyObject * +_winapi_UnmapViewOfFile_impl(PyObject *module, LPCVOID address) +/*[clinic end generated code: output=4f7e18ac75d19744 input=8c4b6119ad9288a3]*/ +{ + BOOL success; + + Py_BEGIN_ALLOW_THREADS + success = UnmapViewOfFile(address); + Py_END_ALLOW_THREADS + + if (!success) { + return PyErr_SetFromWindowsErr(0); + } + + Py_RETURN_NONE; +} + /*[clinic input] _winapi.OpenFileMapping -> HANDLE @@ -2071,6 +2086,7 @@ static PyMethodDef winapi_functions[] = { _WINAPI_READFILE_METHODDEF _WINAPI_SETNAMEDPIPEHANDLESTATE_METHODDEF _WINAPI_TERMINATEPROCESS_METHODDEF + _WINAPI_UNMAPVIEWOFFILE_METHODDEF _WINAPI_VIRTUALQUERYSIZE_METHODDEF _WINAPI_WAITNAMEDPIPE_METHODDEF _WINAPI_WAITFORMULTIPLEOBJECTS_METHODDEF diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index f40601ad3a1a72..2c9e0cda1ab048 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1722,8 +1722,7 @@ _channelid_shared(PyObject *obj, _PyCrossInterpreterData *data) xid->resolve = ((channelid *)obj)->resolve; data->data = xid; - Py_INCREF(obj); - data->obj = obj; + data->obj = Py_NewRef(obj); data->new_object = _channelid_from_xid; data->free = PyMem_Free; return 0; @@ -2003,8 +2002,11 @@ interp_create(PyObject *self, PyObject *args, PyObject *kwds) // Create and initialize the new interpreter. PyThreadState *save_tstate = _PyThreadState_GET(); + const _PyInterpreterConfig config = isolated + ? (_PyInterpreterConfig)_PyInterpreterConfig_INIT + : (_PyInterpreterConfig)_PyInterpreterConfig_LEGACY_INIT; // XXX Possible GILState issues? - PyThreadState *tstate = _Py_NewInterpreter(isolated); + PyThreadState *tstate = _Py_NewInterpreterFromConfig(&config); PyThreadState_Swap(save_tstate); if (tstate == NULL) { /* Since no new thread state was created, there is no exception to @@ -2318,8 +2320,7 @@ channel_list_all(PyObject *self, PyObject *Py_UNUSED(ignored)) PyObject *id = (PyObject *)newchannelid(&ChannelIDtype, *cur, 0, &_globals.channels, 0, 0); if (id == NULL) { - Py_DECREF(ids); - ids = NULL; + Py_SETREF(ids, NULL); break; } PyList_SET_ITEM(ids, (Py_ssize_t)i, id); @@ -2381,8 +2382,7 @@ channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds) goto finally; except: - Py_XDECREF(ids); - ids = NULL; + Py_CLEAR(ids); finally: return ids; @@ -2631,12 +2631,12 @@ PyInit__xxsubinterpreters(void) } /* Add other types */ - Py_INCREF(&ChannelIDtype); - if (PyDict_SetItemString(ns, "ChannelID", (PyObject *)&ChannelIDtype) != 0) { + if (PyDict_SetItemString(ns, "ChannelID", + Py_NewRef(&ChannelIDtype)) != 0) { return NULL; } - Py_INCREF(&_PyInterpreterID_Type); - if (PyDict_SetItemString(ns, "InterpreterID", (PyObject *)&_PyInterpreterID_Type) != 0) { + if (PyDict_SetItemString(ns, "InterpreterID", + Py_NewRef(&_PyInterpreterID_Type)) != 0) { return NULL; } diff --git a/Modules/_xxtestfuzz/fuzzer.c b/Modules/_xxtestfuzz/fuzzer.c index 366e81a54519a7..fb0c191d2c494d 100644 --- a/Modules/_xxtestfuzz/fuzzer.c +++ b/Modules/_xxtestfuzz/fuzzer.c @@ -142,7 +142,7 @@ static int fuzz_struct_unpack(const char* data, size_t size) { } -#define MAX_JSON_TEST_SIZE 0x10000 +#define MAX_JSON_TEST_SIZE 0x100000 PyObject* json_loads_method = NULL; /* Called by LLVMFuzzerTestOneInput for initialization */ @@ -335,7 +335,7 @@ static int fuzz_sre_match(const char* data, size_t size) { return 0; } -#define MAX_CSV_TEST_SIZE 0x10000 +#define MAX_CSV_TEST_SIZE 0x100000 PyObject* csv_module = NULL; PyObject* csv_error = NULL; /* Called by LLVMFuzzerTestOneInput for initialization */ @@ -393,7 +393,7 @@ static int fuzz_csv_reader(const char* data, size_t size) { return 0; } -#define MAX_AST_LITERAL_EVAL_TEST_SIZE 0x10000 +#define MAX_AST_LITERAL_EVAL_TEST_SIZE 0x100000 PyObject* ast_literal_eval_method = NULL; /* Called by LLVMFuzzerTestOneInput for initialization */ static int init_ast_literal_eval(void) { @@ -459,6 +459,9 @@ int LLVMFuzzerInitialize(int *argc, char ***argv) { PyConfig config; PyConfig_InitPythonConfig(&config); config.install_signal_handlers = 0; + /* Raise the limit above the default allows exercising larger things + * now that we fall back to the _pylong module for large values. */ + config.int_max_str_digits = 8086; PyStatus status; status = PyConfig_SetBytesString(&config, &config.program_name, *argv[0]); if (PyStatus_Exception(status)) { diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index 207340adec152f..9d38589ea3d1b0 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -12,6 +12,13 @@ #include "datetime.h" +#include "clinic/_zoneinfo.c.h" +/*[clinic input] +module zoneinfo +class zoneinfo.ZoneInfo "PyObject *" "PyTypeObject *" +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=d12c73c0eef36df8]*/ + // Imports static PyObject *io_open = NULL; static PyObject *_tzpath_find_tzfile = NULL; @@ -213,20 +220,17 @@ zoneinfo_new_instance(PyTypeObject *type, PyObject *key) } PyObject *rv = PyObject_CallMethod(file_obj, "close", NULL); - Py_DECREF(file_obj); - file_obj = NULL; + Py_SETREF(file_obj, NULL); if (rv == NULL) { goto error; } Py_DECREF(rv); - ((PyZoneInfo_ZoneInfo *)self)->key = key; - Py_INCREF(key); + ((PyZoneInfo_ZoneInfo *)self)->key = Py_NewRef(key); goto cleanup; error: - Py_XDECREF(self); - self = NULL; + Py_CLEAR(self); cleanup: if (file_obj != NULL) { PyObject *exc, *val, *tb; @@ -338,20 +342,25 @@ zoneinfo_dealloc(PyObject *obj_self) Py_TYPE(self)->tp_free((PyObject *)self); } +/*[clinic input] +@classmethod +zoneinfo.ZoneInfo.from_file + + file_obj: object + / + key: object = None + +Create a ZoneInfo file from a file object. +[clinic start generated code]*/ + static PyObject * -zoneinfo_from_file(PyTypeObject *type, PyObject *args, PyObject *kwargs) +zoneinfo_ZoneInfo_from_file_impl(PyTypeObject *type, PyObject *file_obj, + PyObject *key) +/*[clinic end generated code: output=68ed2022404ae5be input=ccfe73708133d2e4]*/ { - PyObject *file_obj = NULL; PyObject *file_repr = NULL; - PyObject *key = Py_None; PyZoneInfo_ZoneInfo *self = NULL; - static char *kwlist[] = {"", "key", NULL}; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O", kwlist, &file_obj, - &key)) { - return NULL; - } - PyObject *obj_self = (PyObject *)(type->tp_alloc(type, 0)); self = (PyZoneInfo_ZoneInfo *)obj_self; if (self == NULL) { @@ -369,26 +378,29 @@ zoneinfo_from_file(PyTypeObject *type, PyObject *args, PyObject *kwargs) self->source = SOURCE_FILE; self->file_repr = file_repr; - self->key = key; - Py_INCREF(key); - + self->key = Py_NewRef(key); return obj_self; + error: Py_XDECREF(file_repr); Py_XDECREF(self); return NULL; } +/*[clinic input] +@classmethod +zoneinfo.ZoneInfo.no_cache + + key: object + +Get a new instance of ZoneInfo, bypassing the cache. +[clinic start generated code]*/ + static PyObject * -zoneinfo_no_cache(PyTypeObject *cls, PyObject *args, PyObject *kwargs) +zoneinfo_ZoneInfo_no_cache_impl(PyTypeObject *type, PyObject *key) +/*[clinic end generated code: output=751c6894ad66f91b input=bb24afd84a80ba46]*/ { - static char *kwlist[] = {"key", NULL}; - PyObject *key = NULL; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O", kwlist, &key)) { - return NULL; - } - - PyObject *out = zoneinfo_new_instance(cls, key); + PyObject *out = zoneinfo_new_instance(type, key); if (out != NULL) { ((PyZoneInfo_ZoneInfo *)out)->source = SOURCE_NOCACHE; } @@ -396,18 +408,20 @@ zoneinfo_no_cache(PyTypeObject *cls, PyObject *args, PyObject *kwargs) return out; } -static PyObject * -zoneinfo_clear_cache(PyObject *cls, PyObject *args, PyObject *kwargs) -{ - PyObject *only_keys = NULL; - static char *kwlist[] = {"only_keys", NULL}; +/*[clinic input] +@classmethod +zoneinfo.ZoneInfo.clear_cache - if (!(PyArg_ParseTupleAndKeywords(args, kwargs, "|$O", kwlist, - &only_keys))) { - return NULL; - } + * + only_keys: object = None + +Clear the ZoneInfo cache. +[clinic start generated code]*/ - PyTypeObject *type = (PyTypeObject *)cls; +static PyObject * +zoneinfo_ZoneInfo_clear_cache_impl(PyTypeObject *type, PyObject *only_keys) +/*[clinic end generated code: output=eec0a3276f07bd90 input=8cff0182a95f295b]*/ +{ PyObject *weak_cache = get_weak_cache(type); if (only_keys == NULL || only_keys == Py_None) { @@ -466,8 +480,7 @@ zoneinfo_utcoffset(PyObject *self, PyObject *dt) if (tti == NULL) { return NULL; } - Py_INCREF(tti->utcoff); - return tti->utcoff; + return Py_NewRef(tti->utcoff); } static PyObject * @@ -477,8 +490,7 @@ zoneinfo_dst(PyObject *self, PyObject *dt) if (tti == NULL) { return NULL; } - Py_INCREF(tti->dstoff); - return tti->dstoff; + return Py_NewRef(tti->dstoff); } static PyObject * @@ -488,8 +500,7 @@ zoneinfo_tzname(PyObject *self, PyObject *dt) if (tti == NULL) { return NULL; } - Py_INCREF(tti->tzname); - return tti->tzname; + return Py_NewRef(tti->tzname); } #define GET_DT_TZINFO PyDateTime_DATE_GET_TZINFO @@ -633,8 +644,7 @@ static PyObject * zoneinfo_str(PyZoneInfo_ZoneInfo *self) { if (!(self->key == Py_None)) { - Py_INCREF(self->key); - return self->key; + return Py_NewRef(self->key); } else { return zoneinfo_repr(self); @@ -775,8 +785,7 @@ build_ttinfo(long utcoffset, long dstoffset, PyObject *tzname, _ttinfo *out) return -1; } - out->tzname = tzname; - Py_INCREF(tzname); + out->tzname = Py_NewRef(tzname); return 0; } @@ -1063,9 +1072,7 @@ load_data(PyZoneInfo_ZoneInfo *self, PyObject *file_obj) // that the dstoff is set correctly in that case. if (PyObject_IsTrue(tti->dstoff)) { _ttinfo *tti_after = &(self->tzrule_after.std); - Py_DECREF(tti_after->dstoff); - tti_after->dstoff = tti->dstoff; - Py_INCREF(tti_after->dstoff); + Py_SETREF(tti_after->dstoff, Py_NewRef(tti->dstoff)); } } @@ -2267,13 +2274,10 @@ strong_cache_node_new(PyObject *key, PyObject *zone) return NULL; } - Py_INCREF(key); - Py_INCREF(zone); - node->next = NULL; node->prev = NULL; - node->key = key; - node->zone = zone; + node->key = Py_NewRef(key); + node->zone = Py_NewRef(zone); return node; } @@ -2425,8 +2429,7 @@ zone_from_strong_cache(const PyTypeObject *const type, PyObject *const key) if (node != NULL) { move_strong_cache_node_to_front(&ZONEINFO_STRONG_CACHE, node); - Py_INCREF(node->zone); - return node->zone; + return Py_NewRef(node->zone); } return NULL; // Cache miss @@ -2545,15 +2548,9 @@ zoneinfo_init_subclass(PyTypeObject *cls, PyObject *args, PyObject **kwargs) ///// // Specify the ZoneInfo type static PyMethodDef zoneinfo_methods[] = { - {"clear_cache", (PyCFunction)(void (*)(void))zoneinfo_clear_cache, - METH_VARARGS | METH_KEYWORDS | METH_CLASS, - PyDoc_STR("Clear the ZoneInfo cache.")}, - {"no_cache", (PyCFunction)(void (*)(void))zoneinfo_no_cache, - METH_VARARGS | METH_KEYWORDS | METH_CLASS, - PyDoc_STR("Get a new instance of ZoneInfo, bypassing the cache.")}, - {"from_file", (PyCFunction)(void (*)(void))zoneinfo_from_file, - METH_VARARGS | METH_KEYWORDS | METH_CLASS, - PyDoc_STR("Create a ZoneInfo file from a file object.")}, + ZONEINFO_ZONEINFO_CLEAR_CACHE_METHODDEF + ZONEINFO_ZONEINFO_NO_CACHE_METHODDEF + ZONEINFO_ZONEINFO_FROM_FILE_METHODDEF {"utcoffset", (PyCFunction)zoneinfo_utcoffset, METH_O, PyDoc_STR("Retrieve a timedelta representing the UTC offset in a zone at " "the given datetime.")}, @@ -2607,14 +2604,9 @@ static PyMethodDef module_methods[] = {{NULL, NULL}}; static void module_free(void *m) { - Py_XDECREF(_tzpath_find_tzfile); - _tzpath_find_tzfile = NULL; - - Py_XDECREF(_common_mod); - _common_mod = NULL; - - Py_XDECREF(io_open); - io_open = NULL; + Py_CLEAR(_tzpath_find_tzfile); + Py_CLEAR(_common_mod); + Py_CLEAR(io_open); xdecref_ttinfo(&NO_TTINFO); @@ -2645,8 +2637,9 @@ zoneinfomodule_exec(PyObject *m) goto error; } - Py_INCREF(&PyZoneInfo_ZoneInfoType); - PyModule_AddObject(m, "ZoneInfo", (PyObject *)&PyZoneInfo_ZoneInfoType); + if (PyModule_AddObjectRef(m, "ZoneInfo", (PyObject *)&PyZoneInfo_ZoneInfoType) < 0) { + goto error; + } /* Populate imports */ _tzpath_find_tzfile = @@ -2666,13 +2659,9 @@ zoneinfomodule_exec(PyObject *m) } if (NO_TTINFO.utcoff == NULL) { - NO_TTINFO.utcoff = Py_None; - NO_TTINFO.dstoff = Py_None; - NO_TTINFO.tzname = Py_None; - - for (size_t i = 0; i < 3; ++i) { - Py_INCREF(Py_None); - } + NO_TTINFO.utcoff = Py_NewRef(Py_None); + NO_TTINFO.dstoff = Py_NewRef(Py_None); + NO_TTINFO.tzname = Py_NewRef(Py_None); } if (initialize_caches()) { diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 924fbf29bfb889..dcf510e9788ee5 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -58,6 +58,8 @@ typedef struct { PyTypeObject *ArrayType; PyTypeObject *ArrayIterType; + PyObject *array_reconstructor; + PyObject *str_read; PyObject *str_write; PyObject *str___dict__; @@ -707,8 +709,7 @@ array_richcompare(PyObject *v, PyObject *w, int op) res = Py_False; else res = Py_True; - Py_INCREF(res); - return res; + return Py_NewRef(res); } if (va->ob_descr == wa->ob_descr && va->ob_descr->compareitems != NULL) { @@ -731,8 +732,7 @@ array_richcompare(PyObject *v, PyObject *w, int op) default: return NULL; /* cannot happen */ } PyObject *res = cmp ? Py_True : Py_False; - Py_INCREF(res); - return res; + return Py_NewRef(res); } @@ -776,18 +776,15 @@ array_richcompare(PyObject *v, PyObject *w, int op) res = Py_True; else res = Py_False; - Py_INCREF(res); - return res; + return Py_NewRef(res); } /* We have an item that differs. First, shortcuts for EQ/NE */ if (op == Py_EQ) { - Py_INCREF(Py_False); - res = Py_False; + res = Py_NewRef(Py_False); } else if (op == Py_NE) { - Py_INCREF(Py_True); - res = Py_True; + res = Py_NewRef(Py_True); } else { /* Compare the final item again using the proper operator */ @@ -1058,8 +1055,7 @@ array_inplace_concat(arrayobject *self, PyObject *bb) } if (array_do_extend(state, self, bb) == -1) return NULL; - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static PyObject * @@ -1083,8 +1079,7 @@ array_inplace_repeat(arrayobject *self, Py_ssize_t n) _PyBytes_Repeat(self->ob_item, n*size, self->ob_item, size); } - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } @@ -1945,9 +1940,8 @@ make_array(PyTypeObject *arraytype, char typecode, PyObject *items) Py_DECREF(typecode_obj); return NULL; } - Py_INCREF(items); PyTuple_SET_ITEM(new_args, 0, typecode_obj); - PyTuple_SET_ITEM(new_args, 1, items); + PyTuple_SET_ITEM(new_args, 1, Py_NewRef(items)); array_obj = array_new(arraytype, new_args, NULL); Py_DECREF(new_args); @@ -2191,17 +2185,17 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, PyObject *array_str; int typecode = self->ob_descr->typecode; int mformat_code; - static PyObject *array_reconstructor = NULL; long protocol; array_state *state = get_array_state_by_class(cls); assert(state != NULL); - if (array_reconstructor == NULL) { - array_reconstructor = _PyImport_GetModuleAttrString( + if (state->array_reconstructor == NULL) { + state->array_reconstructor = _PyImport_GetModuleAttrString( "array", "_array_reconstructor"); - if (array_reconstructor == NULL) + if (state->array_reconstructor == NULL) { return NULL; + } } if (!PyLong_Check(value)) { @@ -2217,8 +2211,7 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, return NULL; } if (dict == NULL) { - dict = Py_None; - Py_INCREF(dict); + dict = Py_NewRef(Py_None); } mformat_code = typecode_to_mformat_code(typecode); @@ -2252,8 +2245,10 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, Py_DECREF(dict); return NULL; } + + assert(state->array_reconstructor != NULL); result = Py_BuildValue( - "O(OCiN)O", array_reconstructor, Py_TYPE(self), typecode, + "O(OCiN)O", state->array_reconstructor, Py_TYPE(self), typecode, mformat_code, array_str, dict); Py_DECREF(dict); return result; @@ -2303,6 +2298,7 @@ static PyMethodDef array_methods[] = { ARRAY_ARRAY_TOBYTES_METHODDEF ARRAY_ARRAY_TOUNICODE_METHODDEF ARRAY_ARRAY___SIZEOF___METHODDEF + {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, {NULL, NULL} /* sentinel */ }; @@ -2567,8 +2563,7 @@ array_buffer_getbuf(arrayobject *self, Py_buffer *view, int flags) } view->buf = (void *)self->ob_item; - view->obj = (PyObject*)self; - Py_INCREF(self); + view->obj = Py_NewRef(self); if (view->buf == NULL) view->buf = (void *)emptybuf; view->len = Py_SIZE(self) * self->ob_descr->itemsize; @@ -2880,8 +2875,7 @@ array_iter(arrayobject *ao) if (it == NULL) return NULL; - Py_INCREF(ao); - it->ao = ao; + it->ao = (arrayobject*)Py_NewRef(ao); it->index = 0; it->getitem = ao->ob_descr->getitem; PyObject_GC_Track(it); @@ -3012,6 +3006,7 @@ array_traverse(PyObject *module, visitproc visit, void *arg) array_state *state = get_array_state(module); Py_VISIT(state->ArrayType); Py_VISIT(state->ArrayIterType); + Py_VISIT(state->array_reconstructor); return 0; } @@ -3021,6 +3016,7 @@ array_clear(PyObject *module) array_state *state = get_array_state(module); Py_CLEAR(state->ArrayType); Py_CLEAR(state->ArrayIterType); + Py_CLEAR(state->array_reconstructor); Py_CLEAR(state->str_read); Py_CLEAR(state->str_write); Py_CLEAR(state->str___dict__); @@ -3065,6 +3061,7 @@ array_modexec(PyObject *m) PyObject *typecodes; const struct arraydescr *descr; + state->array_reconstructor = NULL; /* Add interned strings */ ADD_INTERNED(state, read); ADD_INTERNED(state, write); @@ -3075,8 +3072,8 @@ array_modexec(PyObject *m) CREATE_TYPE(m, state->ArrayIterType, &arrayiter_spec); Py_SET_TYPE(state->ArrayIterType, &PyType_Type); - Py_INCREF((PyObject *)state->ArrayType); - if (PyModule_AddObject(m, "ArrayType", (PyObject *)state->ArrayType) < 0) { + if (PyModule_AddObject(m, "ArrayType", + Py_NewRef((PyObject *)state->ArrayType)) < 0) { Py_DECREF((PyObject *)state->ArrayType); return -1; } diff --git a/Modules/audioop.c b/Modules/audioop.c index d74e634ac44889..9325f82f9a17e0 100644 --- a/Modules/audioop.c +++ b/Modules/audioop.c @@ -1,3 +1,33 @@ +/* The audioop module uses the code base in g777.c file of the Sox project. + * Source: https://web.archive.org/web/19970716121258/http://www.spies.com/Sox/Archive/soxgamma.tar.gz + * Programming the AdLib/Sound Blaster + * FM Music Chips + * Version 2.0 (24 Feb 1992) + * + * Copyright (c) 1991, 1992 by Jeffrey S. Lee + * + * jlee@smylex.uucp + * + * + * + * Warranty and Copyright Policy + * + * This document is provided on an "as-is" basis, and its author makes + * no warranty or representation, express or implied, with respect to + * its quality performance or fitness for a particular purpose. In no + * event will the author of this document be liable for direct, indirect, + * special, incidental, or consequential damages arising out of the use + * or inability to use the information contained within. Use of this + * document is at your own risk. + * + * This file may be used and copied freely so long as the applicable + * copyright notices are retained, and no modifications are made to the + * text of the document. No money shall be charged for its distribution + * beyond reasonable shipping, handling and duplication costs, nor shall + * proprietary changes be made to this document so that it cannot be + * distributed freely. This document may not be included in published + * material or commercial packages without the written consent of its + * author. */ /* audioopmodule - Module to detect peak values in arrays */ @@ -28,20 +58,6 @@ fbound(double val, double minval, double maxval) } -/* Code shamelessly stolen from sox, 12.17.7, g711.c -** (c) Craig Reese, Joe Campbell and Jeff Poskanzer 1989 */ - -/* From g711.c: - * - * December 30, 1994: - * Functions linear2alaw, linear2ulaw have been updated to correctly - * convert unquantized 16 bit values. - * Tables for direct u- to A-law and A- to u-law conversions have been - * corrected. - * Borge Lindberg, Center for PersonKommunikation, Aalborg University. - * bli@cpk.auc.dk - * - */ #define BIAS 0x84 /* define the add-in bias for 16 bit samples */ #define CLIP 32635 #define SIGN_BIT (0x80) /* Sign bit for an A-law byte. */ @@ -59,6 +75,8 @@ static const int16_t seg_uend[8] = { static int16_t search(int16_t val, const int16_t *table, int size) { + assert(0 <= size); + assert(size < INT16_MAX); int i; for (i = 0; i < size; i++) { @@ -170,6 +188,7 @@ st_14linear2ulaw(int16_t pcm_val) /* 2's complement (14-bit range) */ if (seg >= 8) /* out of range, return maximum value. */ return (unsigned char) (0x7F ^ mask); else { + assert(seg >= 0); uval = (unsigned char) (seg << 4) | ((pcm_val >> (seg + 1)) & 0xF); return (uval ^ mask); } @@ -300,13 +319,13 @@ static const int stepsizeTable[89] = { #ifdef WORDS_BIGENDIAN #define GETINT24(cp, i) ( \ ((unsigned char *)(cp) + (i))[2] + \ - (((unsigned char *)(cp) + (i))[1] << 8) + \ - (((signed char *)(cp) + (i))[0] << 16) ) + (((unsigned char *)(cp) + (i))[1] * (1 << 8)) + \ + (((signed char *)(cp) + (i))[0] * (1 << 16)) ) #else #define GETINT24(cp, i) ( \ ((unsigned char *)(cp) + (i))[0] + \ - (((unsigned char *)(cp) + (i))[1] << 8) + \ - (((signed char *)(cp) + (i))[2] << 16) ) + (((unsigned char *)(cp) + (i))[1] * (1 << 8)) + \ + (((signed char *)(cp) + (i))[2] * (1 << 16)) ) #endif @@ -347,10 +366,10 @@ static const int stepsizeTable[89] = { } while(0) -#define GETSAMPLE32(size, cp, i) ( \ - (size == 1) ? (int)GETINT8((cp), (i)) << 24 : \ - (size == 2) ? (int)GETINT16((cp), (i)) << 16 : \ - (size == 3) ? (int)GETINT24((cp), (i)) << 8 : \ +#define GETSAMPLE32(size, cp, i) ( \ + (size == 1) ? (int)GETINT8((cp), (i)) * (1 << 24) : \ + (size == 2) ? (int)GETINT16((cp), (i)) * (1 << 16) : \ + (size == 3) ? (int)GETINT24((cp), (i)) * (1 << 8) : \ (int)GETINT32((cp), (i))) #define SETSAMPLE32(size, cp, i, val) do { \ @@ -1452,8 +1471,7 @@ audioop_ratecv_impl(PyObject *module, Py_buffer *fragment, int width, len = (Py_ssize_t)(ncp - PyBytes_AsString(str)); rv = PyBytes_FromStringAndSize (PyBytes_AsString(str), len); - Py_DECREF(str); - str = rv; + Py_SETREF(str, rv); if (str == NULL) goto exit; rv = Py_BuildValue("(O(iO))", str, d, samps); @@ -1558,7 +1576,7 @@ audioop_ulaw2lin_impl(PyObject *module, Py_buffer *fragment, int width) cp = fragment->buf; for (i = 0; i < fragment->len*width; i += width) { - int val = st_ulaw2linear16(*cp++) << 16; + int val = st_ulaw2linear16(*cp++) * (1 << 16); SETSAMPLE32(width, ncp, i, val); } return rv; @@ -1632,7 +1650,7 @@ audioop_alaw2lin_impl(PyObject *module, Py_buffer *fragment, int width) cp = fragment->buf; for (i = 0; i < fragment->len*width; i += width) { - val = st_alaw2linear16(*cp++) << 16; + val = st_alaw2linear16(*cp++) * (1 << 16); SETSAMPLE32(width, ncp, i, val); } return rv; @@ -1757,7 +1775,7 @@ audioop_lin2adpcm_impl(PyObject *module, Py_buffer *fragment, int width, /* Step 6 - Output value */ if ( bufferstep ) { - outputbuffer = (delta << 4) & 0xf0; + outputbuffer = (delta * (1 << 4)) & 0xf0; } else { *ncp++ = (delta & 0x0f) | outputbuffer; } @@ -1875,7 +1893,7 @@ audioop_adpcm2lin_impl(PyObject *module, Py_buffer *fragment, int width, step = stepsizeTable[index]; /* Step 6 - Output value */ - SETSAMPLE32(width, ncp, i, valpred << 16); + SETSAMPLE32(width, ncp, i, valpred * (1 << 16)); } rv = Py_BuildValue("(O(ii))", str, valpred, index); diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c index 4769ab26b1b9e0..1d77fd33ac3b87 100644 --- a/Modules/cjkcodecs/multibytecodec.c +++ b/Modules/cjkcodecs/multibytecodec.c @@ -141,8 +141,7 @@ codecctx_errors_get(MultibyteStatefulCodecContext *self, void *Py_UNUSED(ignored else if (self->errors == ERROR_REPLACE) errors = "replace"; else { - Py_INCREF(self->errors); - return self->errors; + return Py_NewRef(self->errors); } return PyUnicode_FromString(errors); @@ -341,8 +340,7 @@ multibytecodec_encerror(MultibyteCodec *codec, goto errorexit; } else { - Py_INCREF(tobj); - retstr = tobj; + retstr = Py_NewRef(tobj); } assert(PyBytes_Check(retstr)); @@ -786,11 +784,9 @@ encoder_encode_stateful(MultibyteStatefulEncoderContext *ctx, if (ctx->pending) { PyObject *inbuf_tmp; - Py_INCREF(ctx->pending); - origpending = ctx->pending; + origpending = Py_NewRef(ctx->pending); - Py_INCREF(ctx->pending); - inbuf_tmp = ctx->pending; + inbuf_tmp = Py_NewRef(ctx->pending); PyUnicode_Append(&inbuf_tmp, unistr); if (inbuf_tmp == NULL) goto errorexit; @@ -800,8 +796,7 @@ encoder_encode_stateful(MultibyteStatefulEncoderContext *ctx, else { origpending = NULL; - Py_INCREF(unistr); - inbuf = unistr; + inbuf = Py_NewRef(unistr); } if (PyUnicode_READY(inbuf) < 0) goto errorexit; @@ -985,8 +980,7 @@ _multibytecodec_MultibyteIncrementalEncoder_setstate_impl(MultibyteIncrementalEn goto errorexit; } - Py_CLEAR(self->pending); - self->pending = pending; + Py_XSETREF(self->pending, pending); memcpy(self->state.c, statebytes+1+statebytes[0], sizeof(self->state.c)); @@ -1443,8 +1437,7 @@ mbstreamreader_iread(MultibyteStreamReaderObject *self, memcpy(ctrdata + self->pendingsize, PyBytes_AS_STRING(cres), PyBytes_GET_SIZE(cres)); - Py_DECREF(cres); - cres = ctr; + Py_SETREF(cres, ctr); self->pendingsize = 0; } @@ -1470,8 +1463,7 @@ mbstreamreader_iread(MultibyteStreamReaderObject *self, goto errorexit; } - Py_DECREF(cres); - cres = NULL; + Py_SETREF(cres, NULL); if (sizehint < 0 || buf.writer.pos != 0 || rsize == 0) break; @@ -1645,8 +1637,7 @@ mbstreamreader_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } self->codec = ((MultibyteCodecObject *)codec)->codec; - self->stream = stream; - Py_INCREF(stream); + self->stream = Py_NewRef(stream); self->pendingsize = 0; self->errors = internal_error_callback(errors); if (self->errors == NULL) @@ -1869,8 +1860,7 @@ mbstreamwriter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } self->codec = ((MultibyteCodecObject *)codec)->codec; - self->stream = stream; - Py_INCREF(stream); + self->stream = Py_NewRef(stream); self->pending = NULL; self->errors = internal_error_callback(errors); if (self->errors == NULL) diff --git a/Modules/clinic/_functoolsmodule.c.h b/Modules/clinic/_functoolsmodule.c.h new file mode 100644 index 00000000000000..9c79e6430413bf --- /dev/null +++ b/Modules/clinic/_functoolsmodule.c.h @@ -0,0 +1,104 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif + + +PyDoc_STRVAR(_functools_cmp_to_key__doc__, +"cmp_to_key($module, /, mycmp)\n" +"--\n" +"\n" +"Convert a cmp= function into a key= function.\n" +"\n" +" mycmp\n" +" Function that compares two objects."); + +#define _FUNCTOOLS_CMP_TO_KEY_METHODDEF \ + {"cmp_to_key", _PyCFunction_CAST(_functools_cmp_to_key), METH_FASTCALL|METH_KEYWORDS, _functools_cmp_to_key__doc__}, + +static PyObject * +_functools_cmp_to_key_impl(PyObject *module, PyObject *mycmp); + +static PyObject * +_functools_cmp_to_key(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(mycmp), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"mycmp", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "cmp_to_key", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *mycmp; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + mycmp = args[0]; + return_value = _functools_cmp_to_key_impl(module, mycmp); + +exit: + return return_value; +} + +PyDoc_STRVAR(_functools__lru_cache_wrapper_cache_info__doc__, +"cache_info($self, /)\n" +"--\n" +"\n" +"Report cache statistics"); + +#define _FUNCTOOLS__LRU_CACHE_WRAPPER_CACHE_INFO_METHODDEF \ + {"cache_info", (PyCFunction)_functools__lru_cache_wrapper_cache_info, METH_NOARGS, _functools__lru_cache_wrapper_cache_info__doc__}, + +static PyObject * +_functools__lru_cache_wrapper_cache_info_impl(PyObject *self); + +static PyObject * +_functools__lru_cache_wrapper_cache_info(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return _functools__lru_cache_wrapper_cache_info_impl(self); +} + +PyDoc_STRVAR(_functools__lru_cache_wrapper_cache_clear__doc__, +"cache_clear($self, /)\n" +"--\n" +"\n" +"Clear the cache and cache statistics"); + +#define _FUNCTOOLS__LRU_CACHE_WRAPPER_CACHE_CLEAR_METHODDEF \ + {"cache_clear", (PyCFunction)_functools__lru_cache_wrapper_cache_clear, METH_NOARGS, _functools__lru_cache_wrapper_cache_clear__doc__}, + +static PyObject * +_functools__lru_cache_wrapper_cache_clear_impl(PyObject *self); + +static PyObject * +_functools__lru_cache_wrapper_cache_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return _functools__lru_cache_wrapper_cache_clear_impl(self); +} +/*[clinic end generated code: output=7e7f3bcf9ed61f23 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testclinic.c.h b/Modules/clinic/_testclinic.c.h new file mode 100644 index 00000000000000..21bde529470294 --- /dev/null +++ b/Modules/clinic/_testclinic.c.h @@ -0,0 +1,2814 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif + + +PyDoc_STRVAR(test_empty_function__doc__, +"test_empty_function($module, /)\n" +"--\n" +"\n"); + +#define TEST_EMPTY_FUNCTION_METHODDEF \ + {"test_empty_function", (PyCFunction)test_empty_function, METH_NOARGS, test_empty_function__doc__}, + +static PyObject * +test_empty_function_impl(PyObject *module); + +static PyObject * +test_empty_function(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return test_empty_function_impl(module); +} + +PyDoc_STRVAR(objects_converter__doc__, +"objects_converter($module, a, b=, /)\n" +"--\n" +"\n"); + +#define OBJECTS_CONVERTER_METHODDEF \ + {"objects_converter", _PyCFunction_CAST(objects_converter), METH_FASTCALL, objects_converter__doc__}, + +static PyObject * +objects_converter_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +objects_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *a; + PyObject *b = NULL; + + if (!_PyArg_CheckPositional("objects_converter", nargs, 1, 2)) { + goto exit; + } + a = args[0]; + if (nargs < 2) { + goto skip_optional; + } + b = args[1]; +skip_optional: + return_value = objects_converter_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(bytes_object_converter__doc__, +"bytes_object_converter($module, a, /)\n" +"--\n" +"\n"); + +#define BYTES_OBJECT_CONVERTER_METHODDEF \ + {"bytes_object_converter", (PyCFunction)bytes_object_converter, METH_O, bytes_object_converter__doc__}, + +static PyObject * +bytes_object_converter_impl(PyObject *module, PyBytesObject *a); + +static PyObject * +bytes_object_converter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyBytesObject *a; + + if (!PyBytes_Check(arg)) { + _PyArg_BadArgument("bytes_object_converter", "argument", "bytes", arg); + goto exit; + } + a = (PyBytesObject *)arg; + return_value = bytes_object_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(byte_array_object_converter__doc__, +"byte_array_object_converter($module, a, /)\n" +"--\n" +"\n"); + +#define BYTE_ARRAY_OBJECT_CONVERTER_METHODDEF \ + {"byte_array_object_converter", (PyCFunction)byte_array_object_converter, METH_O, byte_array_object_converter__doc__}, + +static PyObject * +byte_array_object_converter_impl(PyObject *module, PyByteArrayObject *a); + +static PyObject * +byte_array_object_converter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyByteArrayObject *a; + + if (!PyByteArray_Check(arg)) { + _PyArg_BadArgument("byte_array_object_converter", "argument", "bytearray", arg); + goto exit; + } + a = (PyByteArrayObject *)arg; + return_value = byte_array_object_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(unicode_converter__doc__, +"unicode_converter($module, a, /)\n" +"--\n" +"\n"); + +#define UNICODE_CONVERTER_METHODDEF \ + {"unicode_converter", (PyCFunction)unicode_converter, METH_O, unicode_converter__doc__}, + +static PyObject * +unicode_converter_impl(PyObject *module, PyObject *a); + +static PyObject * +unicode_converter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + PyObject *a; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("unicode_converter", "argument", "str", arg); + goto exit; + } + if (PyUnicode_READY(arg) == -1) { + goto exit; + } + a = arg; + return_value = unicode_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(bool_converter__doc__, +"bool_converter($module, a=True, b=True, c=True, /)\n" +"--\n" +"\n"); + +#define BOOL_CONVERTER_METHODDEF \ + {"bool_converter", _PyCFunction_CAST(bool_converter), METH_FASTCALL, bool_converter__doc__}, + +static PyObject * +bool_converter_impl(PyObject *module, int a, int b, int c); + +static PyObject * +bool_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int a = 1; + int b = 1; + int c = 1; + + if (!_PyArg_CheckPositional("bool_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + a = PyObject_IsTrue(args[0]); + if (a < 0) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + b = PyObject_IsTrue(args[1]); + if (b < 0) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + c = _PyLong_AsInt(args[2]); + if (c == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = bool_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(char_converter__doc__, +"char_converter($module, a=b\'A\', b=b\'\\x07\', c=b\'\\x08\', d=b\'\\t\', e=b\'\\n\',\n" +" f=b\'\\x0b\', g=b\'\\x0c\', h=b\'\\r\', i=b\'\"\', j=b\"\'\", k=b\'?\',\n" +" l=b\'\\\\\', m=b\'\\x00\', n=b\'\\xff\', /)\n" +"--\n" +"\n"); + +#define CHAR_CONVERTER_METHODDEF \ + {"char_converter", _PyCFunction_CAST(char_converter), METH_FASTCALL, char_converter__doc__}, + +static PyObject * +char_converter_impl(PyObject *module, char a, char b, char c, char d, char e, + char f, char g, char h, char i, char j, char k, char l, + char m, char n); + +static PyObject * +char_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + char a = 'A'; + char b = '\x07'; + char c = '\x08'; + char d = '\t'; + char e = '\n'; + char f = '\x0b'; + char g = '\x0c'; + char h = '\r'; + char i = '"'; + char j = '\''; + char k = '?'; + char l = '\\'; + char m = '\x00'; + char n = '\xff'; + + if (!_PyArg_CheckPositional("char_converter", nargs, 0, 14)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (PyBytes_Check(args[0]) && PyBytes_GET_SIZE(args[0]) == 1) { + a = PyBytes_AS_STRING(args[0])[0]; + } + else if (PyByteArray_Check(args[0]) && PyByteArray_GET_SIZE(args[0]) == 1) { + a = PyByteArray_AS_STRING(args[0])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 1", "a byte string of length 1", args[0]); + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (PyBytes_Check(args[1]) && PyBytes_GET_SIZE(args[1]) == 1) { + b = PyBytes_AS_STRING(args[1])[0]; + } + else if (PyByteArray_Check(args[1]) && PyByteArray_GET_SIZE(args[1]) == 1) { + b = PyByteArray_AS_STRING(args[1])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 2", "a byte string of length 1", args[1]); + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (PyBytes_Check(args[2]) && PyBytes_GET_SIZE(args[2]) == 1) { + c = PyBytes_AS_STRING(args[2])[0]; + } + else if (PyByteArray_Check(args[2]) && PyByteArray_GET_SIZE(args[2]) == 1) { + c = PyByteArray_AS_STRING(args[2])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 3", "a byte string of length 1", args[2]); + goto exit; + } + if (nargs < 4) { + goto skip_optional; + } + if (PyBytes_Check(args[3]) && PyBytes_GET_SIZE(args[3]) == 1) { + d = PyBytes_AS_STRING(args[3])[0]; + } + else if (PyByteArray_Check(args[3]) && PyByteArray_GET_SIZE(args[3]) == 1) { + d = PyByteArray_AS_STRING(args[3])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 4", "a byte string of length 1", args[3]); + goto exit; + } + if (nargs < 5) { + goto skip_optional; + } + if (PyBytes_Check(args[4]) && PyBytes_GET_SIZE(args[4]) == 1) { + e = PyBytes_AS_STRING(args[4])[0]; + } + else if (PyByteArray_Check(args[4]) && PyByteArray_GET_SIZE(args[4]) == 1) { + e = PyByteArray_AS_STRING(args[4])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 5", "a byte string of length 1", args[4]); + goto exit; + } + if (nargs < 6) { + goto skip_optional; + } + if (PyBytes_Check(args[5]) && PyBytes_GET_SIZE(args[5]) == 1) { + f = PyBytes_AS_STRING(args[5])[0]; + } + else if (PyByteArray_Check(args[5]) && PyByteArray_GET_SIZE(args[5]) == 1) { + f = PyByteArray_AS_STRING(args[5])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 6", "a byte string of length 1", args[5]); + goto exit; + } + if (nargs < 7) { + goto skip_optional; + } + if (PyBytes_Check(args[6]) && PyBytes_GET_SIZE(args[6]) == 1) { + g = PyBytes_AS_STRING(args[6])[0]; + } + else if (PyByteArray_Check(args[6]) && PyByteArray_GET_SIZE(args[6]) == 1) { + g = PyByteArray_AS_STRING(args[6])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 7", "a byte string of length 1", args[6]); + goto exit; + } + if (nargs < 8) { + goto skip_optional; + } + if (PyBytes_Check(args[7]) && PyBytes_GET_SIZE(args[7]) == 1) { + h = PyBytes_AS_STRING(args[7])[0]; + } + else if (PyByteArray_Check(args[7]) && PyByteArray_GET_SIZE(args[7]) == 1) { + h = PyByteArray_AS_STRING(args[7])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 8", "a byte string of length 1", args[7]); + goto exit; + } + if (nargs < 9) { + goto skip_optional; + } + if (PyBytes_Check(args[8]) && PyBytes_GET_SIZE(args[8]) == 1) { + i = PyBytes_AS_STRING(args[8])[0]; + } + else if (PyByteArray_Check(args[8]) && PyByteArray_GET_SIZE(args[8]) == 1) { + i = PyByteArray_AS_STRING(args[8])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 9", "a byte string of length 1", args[8]); + goto exit; + } + if (nargs < 10) { + goto skip_optional; + } + if (PyBytes_Check(args[9]) && PyBytes_GET_SIZE(args[9]) == 1) { + j = PyBytes_AS_STRING(args[9])[0]; + } + else if (PyByteArray_Check(args[9]) && PyByteArray_GET_SIZE(args[9]) == 1) { + j = PyByteArray_AS_STRING(args[9])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 10", "a byte string of length 1", args[9]); + goto exit; + } + if (nargs < 11) { + goto skip_optional; + } + if (PyBytes_Check(args[10]) && PyBytes_GET_SIZE(args[10]) == 1) { + k = PyBytes_AS_STRING(args[10])[0]; + } + else if (PyByteArray_Check(args[10]) && PyByteArray_GET_SIZE(args[10]) == 1) { + k = PyByteArray_AS_STRING(args[10])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 11", "a byte string of length 1", args[10]); + goto exit; + } + if (nargs < 12) { + goto skip_optional; + } + if (PyBytes_Check(args[11]) && PyBytes_GET_SIZE(args[11]) == 1) { + l = PyBytes_AS_STRING(args[11])[0]; + } + else if (PyByteArray_Check(args[11]) && PyByteArray_GET_SIZE(args[11]) == 1) { + l = PyByteArray_AS_STRING(args[11])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 12", "a byte string of length 1", args[11]); + goto exit; + } + if (nargs < 13) { + goto skip_optional; + } + if (PyBytes_Check(args[12]) && PyBytes_GET_SIZE(args[12]) == 1) { + m = PyBytes_AS_STRING(args[12])[0]; + } + else if (PyByteArray_Check(args[12]) && PyByteArray_GET_SIZE(args[12]) == 1) { + m = PyByteArray_AS_STRING(args[12])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 13", "a byte string of length 1", args[12]); + goto exit; + } + if (nargs < 14) { + goto skip_optional; + } + if (PyBytes_Check(args[13]) && PyBytes_GET_SIZE(args[13]) == 1) { + n = PyBytes_AS_STRING(args[13])[0]; + } + else if (PyByteArray_Check(args[13]) && PyByteArray_GET_SIZE(args[13]) == 1) { + n = PyByteArray_AS_STRING(args[13])[0]; + } + else { + _PyArg_BadArgument("char_converter", "argument 14", "a byte string of length 1", args[13]); + goto exit; + } +skip_optional: + return_value = char_converter_impl(module, a, b, c, d, e, f, g, h, i, j, k, l, m, n); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_char_converter__doc__, +"unsigned_char_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_CHAR_CONVERTER_METHODDEF \ + {"unsigned_char_converter", _PyCFunction_CAST(unsigned_char_converter), METH_FASTCALL, unsigned_char_converter__doc__}, + +static PyObject * +unsigned_char_converter_impl(PyObject *module, unsigned char a, + unsigned char b, unsigned char c); + +static PyObject * +unsigned_char_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned char a = 12; + unsigned char b = 34; + unsigned char c = 56; + + if (!_PyArg_CheckPositional("unsigned_char_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + { + long ival = PyLong_AsLong(args[0]); + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + else if (ival < 0) { + PyErr_SetString(PyExc_OverflowError, + "unsigned byte integer is less than minimum"); + goto exit; + } + else if (ival > UCHAR_MAX) { + PyErr_SetString(PyExc_OverflowError, + "unsigned byte integer is greater than maximum"); + goto exit; + } + else { + a = (unsigned char) ival; + } + } + if (nargs < 2) { + goto skip_optional; + } + { + long ival = PyLong_AsLong(args[1]); + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + else if (ival < 0) { + PyErr_SetString(PyExc_OverflowError, + "unsigned byte integer is less than minimum"); + goto exit; + } + else if (ival > UCHAR_MAX) { + PyErr_SetString(PyExc_OverflowError, + "unsigned byte integer is greater than maximum"); + goto exit; + } + else { + b = (unsigned char) ival; + } + } + if (nargs < 3) { + goto skip_optional; + } + { + unsigned long ival = PyLong_AsUnsignedLongMask(args[2]); + if (ival == (unsigned long)-1 && PyErr_Occurred()) { + goto exit; + } + else { + c = (unsigned char) ival; + } + } +skip_optional: + return_value = unsigned_char_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(short_converter__doc__, +"short_converter($module, a=12, /)\n" +"--\n" +"\n"); + +#define SHORT_CONVERTER_METHODDEF \ + {"short_converter", _PyCFunction_CAST(short_converter), METH_FASTCALL, short_converter__doc__}, + +static PyObject * +short_converter_impl(PyObject *module, short a); + +static PyObject * +short_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + short a = 12; + + if (!_PyArg_CheckPositional("short_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + { + long ival = PyLong_AsLong(args[0]); + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + else if (ival < SHRT_MIN) { + PyErr_SetString(PyExc_OverflowError, + "signed short integer is less than minimum"); + goto exit; + } + else if (ival > SHRT_MAX) { + PyErr_SetString(PyExc_OverflowError, + "signed short integer is greater than maximum"); + goto exit; + } + else { + a = (short) ival; + } + } +skip_optional: + return_value = short_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_short_converter__doc__, +"unsigned_short_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_SHORT_CONVERTER_METHODDEF \ + {"unsigned_short_converter", _PyCFunction_CAST(unsigned_short_converter), METH_FASTCALL, unsigned_short_converter__doc__}, + +static PyObject * +unsigned_short_converter_impl(PyObject *module, unsigned short a, + unsigned short b, unsigned short c); + +static PyObject * +unsigned_short_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned short a = 12; + unsigned short b = 34; + unsigned short c = 56; + + if (!_PyArg_CheckPositional("unsigned_short_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_UnsignedShort_Converter(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyLong_UnsignedShort_Converter(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + c = (unsigned short)PyLong_AsUnsignedLongMask(args[2]); + if (c == (unsigned short)-1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = unsigned_short_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(int_converter__doc__, +"int_converter($module, a=12, b=34, c=45, /)\n" +"--\n" +"\n"); + +#define INT_CONVERTER_METHODDEF \ + {"int_converter", _PyCFunction_CAST(int_converter), METH_FASTCALL, int_converter__doc__}, + +static PyObject * +int_converter_impl(PyObject *module, int a, int b, int c); + +static PyObject * +int_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int a = 12; + int b = 34; + int c = 45; + + if (!_PyArg_CheckPositional("int_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + a = _PyLong_AsInt(args[0]); + if (a == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + b = _PyLong_AsInt(args[1]); + if (b == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (!PyUnicode_Check(args[2])) { + _PyArg_BadArgument("int_converter", "argument 3", "a unicode character", args[2]); + goto exit; + } + if (PyUnicode_READY(args[2])) { + goto exit; + } + if (PyUnicode_GET_LENGTH(args[2]) != 1) { + _PyArg_BadArgument("int_converter", "argument 3", "a unicode character", args[2]); + goto exit; + } + c = PyUnicode_READ_CHAR(args[2], 0); +skip_optional: + return_value = int_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_int_converter__doc__, +"unsigned_int_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_INT_CONVERTER_METHODDEF \ + {"unsigned_int_converter", _PyCFunction_CAST(unsigned_int_converter), METH_FASTCALL, unsigned_int_converter__doc__}, + +static PyObject * +unsigned_int_converter_impl(PyObject *module, unsigned int a, unsigned int b, + unsigned int c); + +static PyObject * +unsigned_int_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned int a = 12; + unsigned int b = 34; + unsigned int c = 56; + + if (!_PyArg_CheckPositional("unsigned_int_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_UnsignedInt_Converter(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyLong_UnsignedInt_Converter(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + c = (unsigned int)PyLong_AsUnsignedLongMask(args[2]); + if (c == (unsigned int)-1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = unsigned_int_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(long_converter__doc__, +"long_converter($module, a=12, /)\n" +"--\n" +"\n"); + +#define LONG_CONVERTER_METHODDEF \ + {"long_converter", _PyCFunction_CAST(long_converter), METH_FASTCALL, long_converter__doc__}, + +static PyObject * +long_converter_impl(PyObject *module, long a); + +static PyObject * +long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + long a = 12; + + if (!_PyArg_CheckPositional("long_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + a = PyLong_AsLong(args[0]); + if (a == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = long_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_long_converter__doc__, +"unsigned_long_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_LONG_CONVERTER_METHODDEF \ + {"unsigned_long_converter", _PyCFunction_CAST(unsigned_long_converter), METH_FASTCALL, unsigned_long_converter__doc__}, + +static PyObject * +unsigned_long_converter_impl(PyObject *module, unsigned long a, + unsigned long b, unsigned long c); + +static PyObject * +unsigned_long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned long a = 12; + unsigned long b = 34; + unsigned long c = 56; + + if (!_PyArg_CheckPositional("unsigned_long_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_UnsignedLong_Converter(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyLong_UnsignedLong_Converter(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (!PyLong_Check(args[2])) { + _PyArg_BadArgument("unsigned_long_converter", "argument 3", "int", args[2]); + goto exit; + } + c = PyLong_AsUnsignedLongMask(args[2]); +skip_optional: + return_value = unsigned_long_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(long_long_converter__doc__, +"long_long_converter($module, a=12, /)\n" +"--\n" +"\n"); + +#define LONG_LONG_CONVERTER_METHODDEF \ + {"long_long_converter", _PyCFunction_CAST(long_long_converter), METH_FASTCALL, long_long_converter__doc__}, + +static PyObject * +long_long_converter_impl(PyObject *module, long long a); + +static PyObject * +long_long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + long long a = 12; + + if (!_PyArg_CheckPositional("long_long_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + a = PyLong_AsLongLong(args[0]); + if (a == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = long_long_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(unsigned_long_long_converter__doc__, +"unsigned_long_long_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define UNSIGNED_LONG_LONG_CONVERTER_METHODDEF \ + {"unsigned_long_long_converter", _PyCFunction_CAST(unsigned_long_long_converter), METH_FASTCALL, unsigned_long_long_converter__doc__}, + +static PyObject * +unsigned_long_long_converter_impl(PyObject *module, unsigned long long a, + unsigned long long b, unsigned long long c); + +static PyObject * +unsigned_long_long_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + unsigned long long a = 12; + unsigned long long b = 34; + unsigned long long c = 56; + + if (!_PyArg_CheckPositional("unsigned_long_long_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_UnsignedLongLong_Converter(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyLong_UnsignedLongLong_Converter(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (!PyLong_Check(args[2])) { + _PyArg_BadArgument("unsigned_long_long_converter", "argument 3", "int", args[2]); + goto exit; + } + c = PyLong_AsUnsignedLongLongMask(args[2]); +skip_optional: + return_value = unsigned_long_long_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(py_ssize_t_converter__doc__, +"py_ssize_t_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define PY_SSIZE_T_CONVERTER_METHODDEF \ + {"py_ssize_t_converter", _PyCFunction_CAST(py_ssize_t_converter), METH_FASTCALL, py_ssize_t_converter__doc__}, + +static PyObject * +py_ssize_t_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, + Py_ssize_t c); + +static PyObject * +py_ssize_t_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + Py_ssize_t a = 12; + Py_ssize_t b = 34; + Py_ssize_t c = 56; + + if (!_PyArg_CheckPositional("py_ssize_t_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = _PyNumber_Index(args[0]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + a = ival; + } + if (nargs < 2) { + goto skip_optional; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = _PyNumber_Index(args[1]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + b = ival; + } + if (nargs < 3) { + goto skip_optional; + } + if (!_Py_convert_optional_to_ssize_t(args[2], &c)) { + goto exit; + } +skip_optional: + return_value = py_ssize_t_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(slice_index_converter__doc__, +"slice_index_converter($module, a=12, b=34, c=56, /)\n" +"--\n" +"\n"); + +#define SLICE_INDEX_CONVERTER_METHODDEF \ + {"slice_index_converter", _PyCFunction_CAST(slice_index_converter), METH_FASTCALL, slice_index_converter__doc__}, + +static PyObject * +slice_index_converter_impl(PyObject *module, Py_ssize_t a, Py_ssize_t b, + Py_ssize_t c); + +static PyObject * +slice_index_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + Py_ssize_t a = 12; + Py_ssize_t b = 34; + Py_ssize_t c = 56; + + if (!_PyArg_CheckPositional("slice_index_converter", nargs, 0, 3)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyEval_SliceIndex(args[0], &a)) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!_PyEval_SliceIndexNotNone(args[1], &b)) { + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (!_PyEval_SliceIndex(args[2], &c)) { + goto exit; + } +skip_optional: + return_value = slice_index_converter_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(size_t_converter__doc__, +"size_t_converter($module, a=12, /)\n" +"--\n" +"\n"); + +#define SIZE_T_CONVERTER_METHODDEF \ + {"size_t_converter", _PyCFunction_CAST(size_t_converter), METH_FASTCALL, size_t_converter__doc__}, + +static PyObject * +size_t_converter_impl(PyObject *module, size_t a); + +static PyObject * +size_t_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + size_t a = 12; + + if (!_PyArg_CheckPositional("size_t_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (!_PyLong_Size_t_Converter(args[0], &a)) { + goto exit; + } +skip_optional: + return_value = size_t_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(float_converter__doc__, +"float_converter($module, a=12.5, /)\n" +"--\n" +"\n"); + +#define FLOAT_CONVERTER_METHODDEF \ + {"float_converter", _PyCFunction_CAST(float_converter), METH_FASTCALL, float_converter__doc__}, + +static PyObject * +float_converter_impl(PyObject *module, float a); + +static PyObject * +float_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + float a = 12.5; + + if (!_PyArg_CheckPositional("float_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (PyFloat_CheckExact(args[0])) { + a = (float) (PyFloat_AS_DOUBLE(args[0])); + } + else + { + a = (float) PyFloat_AsDouble(args[0]); + if (a == -1.0 && PyErr_Occurred()) { + goto exit; + } + } +skip_optional: + return_value = float_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(double_converter__doc__, +"double_converter($module, a=12.5, /)\n" +"--\n" +"\n"); + +#define DOUBLE_CONVERTER_METHODDEF \ + {"double_converter", _PyCFunction_CAST(double_converter), METH_FASTCALL, double_converter__doc__}, + +static PyObject * +double_converter_impl(PyObject *module, double a); + +static PyObject * +double_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + double a = 12.5; + + if (!_PyArg_CheckPositional("double_converter", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (PyFloat_CheckExact(args[0])) { + a = PyFloat_AS_DOUBLE(args[0]); + } + else + { + a = PyFloat_AsDouble(args[0]); + if (a == -1.0 && PyErr_Occurred()) { + goto exit; + } + } +skip_optional: + return_value = double_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(py_complex_converter__doc__, +"py_complex_converter($module, a, /)\n" +"--\n" +"\n"); + +#define PY_COMPLEX_CONVERTER_METHODDEF \ + {"py_complex_converter", (PyCFunction)py_complex_converter, METH_O, py_complex_converter__doc__}, + +static PyObject * +py_complex_converter_impl(PyObject *module, Py_complex a); + +static PyObject * +py_complex_converter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + Py_complex a; + + a = PyComplex_AsCComplex(arg); + if (PyErr_Occurred()) { + goto exit; + } + return_value = py_complex_converter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(str_converter__doc__, +"str_converter($module, a=\'a\', b=\'b\', c=\'c\', /)\n" +"--\n" +"\n"); + +#define STR_CONVERTER_METHODDEF \ + {"str_converter", _PyCFunction_CAST(str_converter), METH_FASTCALL, str_converter__doc__}, + +static PyObject * +str_converter_impl(PyObject *module, const char *a, const char *b, + const char *c, Py_ssize_t c_length); + +static PyObject * +str_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + const char *a = "a"; + const char *b = "b"; + const char *c = "c"; + Py_ssize_t c_length; + + if (!_PyArg_ParseStack(args, nargs, "|sys#:str_converter", + &a, &b, &c, &c_length)) { + goto exit; + } + return_value = str_converter_impl(module, a, b, c, c_length); + +exit: + return return_value; +} + +PyDoc_STRVAR(str_converter_encoding__doc__, +"str_converter_encoding($module, a, b, c, /)\n" +"--\n" +"\n"); + +#define STR_CONVERTER_ENCODING_METHODDEF \ + {"str_converter_encoding", _PyCFunction_CAST(str_converter_encoding), METH_FASTCALL, str_converter_encoding__doc__}, + +static PyObject * +str_converter_encoding_impl(PyObject *module, char *a, char *b, char *c, + Py_ssize_t c_length); + +static PyObject * +str_converter_encoding(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + char *a = NULL; + char *b = NULL; + char *c = NULL; + Py_ssize_t c_length; + + if (!_PyArg_ParseStack(args, nargs, "esetet#:str_converter_encoding", + "idna", &a, "idna", &b, "idna", &c, &c_length)) { + goto exit; + } + return_value = str_converter_encoding_impl(module, a, b, c, c_length); + /* Post parse cleanup for a */ + PyMem_FREE(a); + /* Post parse cleanup for b */ + PyMem_FREE(b); + /* Post parse cleanup for c */ + PyMem_FREE(c); + +exit: + return return_value; +} + +PyDoc_STRVAR(py_buffer_converter__doc__, +"py_buffer_converter($module, a, b, /)\n" +"--\n" +"\n"); + +#define PY_BUFFER_CONVERTER_METHODDEF \ + {"py_buffer_converter", _PyCFunction_CAST(py_buffer_converter), METH_FASTCALL, py_buffer_converter__doc__}, + +static PyObject * +py_buffer_converter_impl(PyObject *module, Py_buffer *a, Py_buffer *b); + +static PyObject * +py_buffer_converter(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + Py_buffer a = {NULL, NULL}; + Py_buffer b = {NULL, NULL}; + + if (!_PyArg_ParseStack(args, nargs, "z*w*:py_buffer_converter", + &a, &b)) { + goto exit; + } + return_value = py_buffer_converter_impl(module, &a, &b); + +exit: + /* Cleanup for a */ + if (a.obj) { + PyBuffer_Release(&a); + } + /* Cleanup for b */ + if (b.obj) { + PyBuffer_Release(&b); + } + + return return_value; +} + +PyDoc_STRVAR(keywords__doc__, +"keywords($module, /, a, b)\n" +"--\n" +"\n"); + +#define KEYWORDS_METHODDEF \ + {"keywords", _PyCFunction_CAST(keywords), METH_FASTCALL|METH_KEYWORDS, keywords__doc__}, + +static PyObject * +keywords_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +keywords(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = keywords_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(keywords_kwonly__doc__, +"keywords_kwonly($module, /, a, *, b)\n" +"--\n" +"\n"); + +#define KEYWORDS_KWONLY_METHODDEF \ + {"keywords_kwonly", _PyCFunction_CAST(keywords_kwonly), METH_FASTCALL|METH_KEYWORDS, keywords_kwonly__doc__}, + +static PyObject * +keywords_kwonly_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +keywords_kwonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords_kwonly", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = keywords_kwonly_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(keywords_opt__doc__, +"keywords_opt($module, /, a, b=None, c=None)\n" +"--\n" +"\n"); + +#define KEYWORDS_OPT_METHODDEF \ + {"keywords_opt", _PyCFunction_CAST(keywords_opt), METH_FASTCALL|METH_KEYWORDS, keywords_opt__doc__}, + +static PyObject * +keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, PyObject *c); + +static PyObject * +keywords_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 3, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[1]) { + b = args[1]; + if (!--noptargs) { + goto skip_optional_pos; + } + } + c = args[2]; +skip_optional_pos: + return_value = keywords_opt_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(keywords_opt_kwonly__doc__, +"keywords_opt_kwonly($module, /, a, b=None, *, c=None, d=None)\n" +"--\n" +"\n"); + +#define KEYWORDS_OPT_KWONLY_METHODDEF \ + {"keywords_opt_kwonly", _PyCFunction_CAST(keywords_opt_kwonly), METH_FASTCALL|METH_KEYWORDS, keywords_opt_kwonly__doc__}, + +static PyObject * +keywords_opt_kwonly_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +keywords_opt_kwonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords_opt_kwonly", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[1]) { + b = args[1]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + d = args[3]; +skip_optional_kwonly: + return_value = keywords_opt_kwonly_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(keywords_kwonly_opt__doc__, +"keywords_kwonly_opt($module, /, a, *, b=None, c=None)\n" +"--\n" +"\n"); + +#define KEYWORDS_KWONLY_OPT_METHODDEF \ + {"keywords_kwonly_opt", _PyCFunction_CAST(keywords_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, keywords_kwonly_opt__doc__}, + +static PyObject * +keywords_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c); + +static PyObject * +keywords_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keywords_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[1]) { + b = args[1]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + c = args[2]; +skip_optional_kwonly: + return_value = keywords_kwonly_opt_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords__doc__, +"posonly_keywords($module, a, /, b)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_METHODDEF \ + {"posonly_keywords", _PyCFunction_CAST(posonly_keywords), METH_FASTCALL|METH_KEYWORDS, posonly_keywords__doc__}, + +static PyObject * +posonly_keywords_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +posonly_keywords(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = posonly_keywords_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_kwonly__doc__, +"posonly_kwonly($module, a, /, *, b)\n" +"--\n" +"\n"); + +#define POSONLY_KWONLY_METHODDEF \ + {"posonly_kwonly", _PyCFunction_CAST(posonly_kwonly), METH_FASTCALL|METH_KEYWORDS, posonly_kwonly__doc__}, + +static PyObject * +posonly_kwonly_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +posonly_kwonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_kwonly", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = posonly_kwonly_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords_kwonly__doc__, +"posonly_keywords_kwonly($module, a, /, b, *, c)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_KWONLY_METHODDEF \ + {"posonly_keywords_kwonly", _PyCFunction_CAST(posonly_keywords_kwonly), METH_FASTCALL|METH_KEYWORDS, posonly_keywords_kwonly__doc__}, + +static PyObject * +posonly_keywords_kwonly_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c); + +static PyObject * +posonly_keywords_kwonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords_kwonly", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *a; + PyObject *b; + PyObject *c; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + c = args[2]; + return_value = posonly_keywords_kwonly_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords_opt__doc__, +"posonly_keywords_opt($module, a, /, b, c=None, d=None)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_OPT_METHODDEF \ + {"posonly_keywords_opt", _PyCFunction_CAST(posonly_keywords_opt), METH_FASTCALL|METH_KEYWORDS, posonly_keywords_opt__doc__}, + +static PyObject * +posonly_keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +posonly_keywords_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *b; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 4, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_pos; + } + } + d = args[3]; +skip_optional_pos: + return_value = posonly_keywords_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_opt_keywords_opt__doc__, +"posonly_opt_keywords_opt($module, a, b=None, /, c=None, d=None)\n" +"--\n" +"\n"); + +#define POSONLY_OPT_KEYWORDS_OPT_METHODDEF \ + {"posonly_opt_keywords_opt", _PyCFunction_CAST(posonly_opt_keywords_opt), METH_FASTCALL|METH_KEYWORDS, posonly_opt_keywords_opt__doc__}, + +static PyObject * +posonly_opt_keywords_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +posonly_opt_keywords_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_opt_keywords_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 4, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (nargs < 2) { + goto skip_optional_posonly; + } + noptargs--; + b = args[1]; +skip_optional_posonly: + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_pos; + } + } + d = args[3]; +skip_optional_pos: + return_value = posonly_opt_keywords_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_kwonly_opt__doc__, +"posonly_kwonly_opt($module, a, /, *, b, c=None, d=None)\n" +"--\n" +"\n"); + +#define POSONLY_KWONLY_OPT_METHODDEF \ + {"posonly_kwonly_opt", _PyCFunction_CAST(posonly_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_kwonly_opt__doc__}, + +static PyObject * +posonly_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +posonly_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *b; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + d = args[3]; +skip_optional_kwonly: + return_value = posonly_kwonly_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_opt_kwonly_opt__doc__, +"posonly_opt_kwonly_opt($module, a, b=None, /, *, c=None, d=None)\n" +"--\n" +"\n"); + +#define POSONLY_OPT_KWONLY_OPT_METHODDEF \ + {"posonly_opt_kwonly_opt", _PyCFunction_CAST(posonly_opt_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_opt_kwonly_opt__doc__}, + +static PyObject * +posonly_opt_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +posonly_opt_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_opt_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (nargs < 2) { + goto skip_optional_posonly; + } + noptargs--; + b = args[1]; +skip_optional_posonly: + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + d = args[3]; +skip_optional_kwonly: + return_value = posonly_opt_kwonly_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords_kwonly_opt__doc__, +"posonly_keywords_kwonly_opt($module, a, /, b, *, c, d=None, e=None)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_KWONLY_OPT_METHODDEF \ + {"posonly_keywords_kwonly_opt", _PyCFunction_CAST(posonly_keywords_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_keywords_kwonly_opt__doc__}, + +static PyObject * +posonly_keywords_kwonly_opt_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d, PyObject *e); + +static PyObject * +posonly_keywords_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), &_Py_ID(e), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", "d", "e", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[5]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 3; + PyObject *a; + PyObject *b; + PyObject *c; + PyObject *d = Py_None; + PyObject *e = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + c = args[2]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[3]) { + d = args[3]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + e = args[4]; +skip_optional_kwonly: + return_value = posonly_keywords_kwonly_opt_impl(module, a, b, c, d, e); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_keywords_opt_kwonly_opt__doc__, +"posonly_keywords_opt_kwonly_opt($module, a, /, b, c=None, *, d=None,\n" +" e=None)\n" +"--\n" +"\n"); + +#define POSONLY_KEYWORDS_OPT_KWONLY_OPT_METHODDEF \ + {"posonly_keywords_opt_kwonly_opt", _PyCFunction_CAST(posonly_keywords_opt_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_keywords_opt_kwonly_opt__doc__}, + +static PyObject * +posonly_keywords_opt_kwonly_opt_impl(PyObject *module, PyObject *a, + PyObject *b, PyObject *c, PyObject *d, + PyObject *e); + +static PyObject * +posonly_keywords_opt_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), &_Py_ID(e), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", "c", "d", "e", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_keywords_opt_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[5]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *a; + PyObject *b; + PyObject *c = Py_None; + PyObject *d = Py_None; + PyObject *e = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 3, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[3]) { + d = args[3]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + e = args[4]; +skip_optional_kwonly: + return_value = posonly_keywords_opt_kwonly_opt_impl(module, a, b, c, d, e); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_opt_keywords_opt_kwonly_opt__doc__, +"posonly_opt_keywords_opt_kwonly_opt($module, a, b=None, /, c=None, *,\n" +" d=None)\n" +"--\n" +"\n"); + +#define POSONLY_OPT_KEYWORDS_OPT_KWONLY_OPT_METHODDEF \ + {"posonly_opt_keywords_opt_kwonly_opt", _PyCFunction_CAST(posonly_opt_keywords_opt_kwonly_opt), METH_FASTCALL|METH_KEYWORDS, posonly_opt_keywords_opt_kwonly_opt__doc__}, + +static PyObject * +posonly_opt_keywords_opt_kwonly_opt_impl(PyObject *module, PyObject *a, + PyObject *b, PyObject *c, + PyObject *d); + +static PyObject * +posonly_opt_keywords_opt_kwonly_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_opt_keywords_opt_kwonly_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + PyObject *d = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 3, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (nargs < 2) { + goto skip_optional_posonly; + } + noptargs--; + b = args[1]; +skip_optional_posonly: + if (!noptargs) { + goto skip_optional_pos; + } + if (args[2]) { + c = args[2]; + if (!--noptargs) { + goto skip_optional_pos; + } + } +skip_optional_pos: + if (!noptargs) { + goto skip_optional_kwonly; + } + d = args[3]; +skip_optional_kwonly: + return_value = posonly_opt_keywords_opt_kwonly_opt_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(keyword_only_parameter__doc__, +"keyword_only_parameter($module, /, *, a)\n" +"--\n" +"\n"); + +#define KEYWORD_ONLY_PARAMETER_METHODDEF \ + {"keyword_only_parameter", _PyCFunction_CAST(keyword_only_parameter), METH_FASTCALL|METH_KEYWORDS, keyword_only_parameter__doc__}, + +static PyObject * +keyword_only_parameter_impl(PyObject *module, PyObject *a); + +static PyObject * +keyword_only_parameter(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "keyword_only_parameter", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *a; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + return_value = keyword_only_parameter_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(posonly_vararg__doc__, +"posonly_vararg($module, a, /, b, *args)\n" +"--\n" +"\n"); + +#define POSONLY_VARARG_METHODDEF \ + {"posonly_vararg", _PyCFunction_CAST(posonly_vararg), METH_FASTCALL|METH_KEYWORDS, posonly_vararg__doc__}, + +static PyObject * +posonly_vararg_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *args); + +static PyObject * +posonly_vararg(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "posonly_vararg", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *a; + PyObject *b; + PyObject *__clinic_args = NULL; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, 2, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + __clinic_args = args[2]; + return_value = posonly_vararg_impl(module, a, b, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(vararg_and_posonly__doc__, +"vararg_and_posonly($module, a, /, *args)\n" +"--\n" +"\n"); + +#define VARARG_AND_POSONLY_METHODDEF \ + {"vararg_and_posonly", _PyCFunction_CAST(vararg_and_posonly), METH_FASTCALL, vararg_and_posonly__doc__}, + +static PyObject * +vararg_and_posonly_impl(PyObject *module, PyObject *a, PyObject *args); + +static PyObject * +vararg_and_posonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *a; + PyObject *__clinic_args = NULL; + + if (!_PyArg_CheckPositional("vararg_and_posonly", nargs, 1, PY_SSIZE_T_MAX)) { + goto exit; + } + a = args[0]; + __clinic_args = PyTuple_New(nargs - 1); + for (Py_ssize_t i = 0; i < nargs - 1; ++i) { + PyTuple_SET_ITEM(__clinic_args, i, Py_NewRef(args[1 + i])); + } + return_value = vararg_and_posonly_impl(module, a, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(vararg__doc__, +"vararg($module, /, a, *args)\n" +"--\n" +"\n"); + +#define VARARG_METHODDEF \ + {"vararg", _PyCFunction_CAST(vararg), METH_FASTCALL|METH_KEYWORDS, vararg__doc__}, + +static PyObject * +vararg_impl(PyObject *module, PyObject *a, PyObject *args); + +static PyObject * +vararg(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "vararg", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *__clinic_args = NULL; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + __clinic_args = args[1]; + return_value = vararg_impl(module, a, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(vararg_with_default__doc__, +"vararg_with_default($module, /, a, *args, b=False)\n" +"--\n" +"\n"); + +#define VARARG_WITH_DEFAULT_METHODDEF \ + {"vararg_with_default", _PyCFunction_CAST(vararg_with_default), METH_FASTCALL|METH_KEYWORDS, vararg_with_default__doc__}, + +static PyObject * +vararg_with_default_impl(PyObject *module, PyObject *a, PyObject *args, + int b); + +static PyObject * +vararg_with_default(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "vararg_with_default", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = Py_MIN(nargs, 1) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *__clinic_args = NULL; + int b = 0; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + __clinic_args = args[1]; + if (!noptargs) { + goto skip_optional_kwonly; + } + b = PyObject_IsTrue(args[2]); + if (b < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = vararg_with_default_impl(module, a, __clinic_args, b); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(vararg_with_only_defaults__doc__, +"vararg_with_only_defaults($module, /, *args, b=None)\n" +"--\n" +"\n"); + +#define VARARG_WITH_ONLY_DEFAULTS_METHODDEF \ + {"vararg_with_only_defaults", _PyCFunction_CAST(vararg_with_only_defaults), METH_FASTCALL|METH_KEYWORDS, vararg_with_only_defaults__doc__}, + +static PyObject * +vararg_with_only_defaults_impl(PyObject *module, PyObject *args, PyObject *b); + +static PyObject * +vararg_with_only_defaults(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "vararg_with_only_defaults", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = 0 + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *__clinic_args = NULL; + PyObject *b = Py_None; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, 0, argsbuf); + if (!args) { + goto exit; + } + __clinic_args = args[0]; + if (!noptargs) { + goto skip_optional_kwonly; + } + b = args[1]; +skip_optional_kwonly: + return_value = vararg_with_only_defaults_impl(module, __clinic_args, b); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(gh_32092_oob__doc__, +"gh_32092_oob($module, /, pos1, pos2, *varargs, kw1=None, kw2=None)\n" +"--\n" +"\n" +"Proof-of-concept of GH-32092 OOB bug."); + +#define GH_32092_OOB_METHODDEF \ + {"gh_32092_oob", _PyCFunction_CAST(gh_32092_oob), METH_FASTCALL|METH_KEYWORDS, gh_32092_oob__doc__}, + +static PyObject * +gh_32092_oob_impl(PyObject *module, PyObject *pos1, PyObject *pos2, + PyObject *varargs, PyObject *kw1, PyObject *kw2); + +static PyObject * +gh_32092_oob(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(pos1), &_Py_ID(pos2), &_Py_ID(kw1), &_Py_ID(kw2), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"pos1", "pos2", "kw1", "kw2", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "gh_32092_oob", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[5]; + Py_ssize_t noptargs = Py_MIN(nargs, 2) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; + PyObject *pos1; + PyObject *pos2; + PyObject *varargs = NULL; + PyObject *kw1 = Py_None; + PyObject *kw2 = Py_None; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, 2, argsbuf); + if (!args) { + goto exit; + } + pos1 = args[0]; + pos2 = args[1]; + varargs = args[2]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[3]) { + kw1 = args[3]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + kw2 = args[4]; +skip_optional_kwonly: + return_value = gh_32092_oob_impl(module, pos1, pos2, varargs, kw1, kw2); + +exit: + Py_XDECREF(varargs); + return return_value; +} + +PyDoc_STRVAR(gh_32092_kw_pass__doc__, +"gh_32092_kw_pass($module, /, pos, *args, kw=None)\n" +"--\n" +"\n" +"Proof-of-concept of GH-32092 keyword args passing bug."); + +#define GH_32092_KW_PASS_METHODDEF \ + {"gh_32092_kw_pass", _PyCFunction_CAST(gh_32092_kw_pass), METH_FASTCALL|METH_KEYWORDS, gh_32092_kw_pass__doc__}, + +static PyObject * +gh_32092_kw_pass_impl(PyObject *module, PyObject *pos, PyObject *args, + PyObject *kw); + +static PyObject * +gh_32092_kw_pass(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(pos), &_Py_ID(kw), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"pos", "kw", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "gh_32092_kw_pass", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = Py_MIN(nargs, 1) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *pos; + PyObject *__clinic_args = NULL; + PyObject *kw = Py_None; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, 1, argsbuf); + if (!args) { + goto exit; + } + pos = args[0]; + __clinic_args = args[1]; + if (!noptargs) { + goto skip_optional_kwonly; + } + kw = args[2]; +skip_optional_kwonly: + return_value = gh_32092_kw_pass_impl(module, pos, __clinic_args, kw); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(gh_99233_refcount__doc__, +"gh_99233_refcount($module, /, *args)\n" +"--\n" +"\n" +"Proof-of-concept of GH-99233 refcount error bug."); + +#define GH_99233_REFCOUNT_METHODDEF \ + {"gh_99233_refcount", _PyCFunction_CAST(gh_99233_refcount), METH_FASTCALL, gh_99233_refcount__doc__}, + +static PyObject * +gh_99233_refcount_impl(PyObject *module, PyObject *args); + +static PyObject * +gh_99233_refcount(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *__clinic_args = NULL; + + if (!_PyArg_CheckPositional("gh_99233_refcount", nargs, 0, PY_SSIZE_T_MAX)) { + goto exit; + } + __clinic_args = PyTuple_New(nargs - 0); + for (Py_ssize_t i = 0; i < nargs - 0; ++i) { + PyTuple_SET_ITEM(__clinic_args, i, Py_NewRef(args[0 + i])); + } + return_value = gh_99233_refcount_impl(module, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(gh_99240_double_free__doc__, +"gh_99240_double_free($module, a, b, /)\n" +"--\n" +"\n" +"Proof-of-concept of GH-99240 double-free bug."); + +#define GH_99240_DOUBLE_FREE_METHODDEF \ + {"gh_99240_double_free", _PyCFunction_CAST(gh_99240_double_free), METH_FASTCALL, gh_99240_double_free__doc__}, + +static PyObject * +gh_99240_double_free_impl(PyObject *module, char *a, char *b); + +static PyObject * +gh_99240_double_free(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + char *a = NULL; + char *b = NULL; + + if (!_PyArg_ParseStack(args, nargs, "eses:gh_99240_double_free", + "idna", &a, "idna", &b)) { + goto exit; + } + return_value = gh_99240_double_free_impl(module, a, b); + /* Post parse cleanup for a */ + PyMem_FREE(a); + /* Post parse cleanup for b */ + PyMem_FREE(b); + +exit: + return return_value; +} +/*[clinic end generated code: output=9a5ca5909c087102 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testinternalcapi.c.h b/Modules/clinic/_testinternalcapi.c.h index 8113fff37997af..e8d5681b194916 100644 --- a/Modules/clinic/_testinternalcapi.c.h +++ b/Modules/clinic/_testinternalcapi.c.h @@ -8,6 +8,69 @@ preserve #endif +PyDoc_STRVAR(_testinternalcapi_compiler_codegen__doc__, +"compiler_codegen($module, /, ast, filename, optimize)\n" +"--\n" +"\n" +"Apply compiler code generation to an AST."); + +#define _TESTINTERNALCAPI_COMPILER_CODEGEN_METHODDEF \ + {"compiler_codegen", _PyCFunction_CAST(_testinternalcapi_compiler_codegen), METH_FASTCALL|METH_KEYWORDS, _testinternalcapi_compiler_codegen__doc__}, + +static PyObject * +_testinternalcapi_compiler_codegen_impl(PyObject *module, PyObject *ast, + PyObject *filename, int optimize); + +static PyObject * +_testinternalcapi_compiler_codegen(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(ast), &_Py_ID(filename), &_Py_ID(optimize), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"ast", "filename", "optimize", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "compiler_codegen", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *ast; + PyObject *filename; + int optimize; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 3, 0, argsbuf); + if (!args) { + goto exit; + } + ast = args[0]; + filename = args[1]; + optimize = _PyLong_AsInt(args[2]); + if (optimize == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testinternalcapi_compiler_codegen_impl(module, ast, filename, optimize); + +exit: + return return_value; +} + PyDoc_STRVAR(_testinternalcapi_optimize_cfg__doc__, "optimize_cfg($module, /, instructions, consts)\n" "--\n" @@ -65,4 +128,4 @@ _testinternalcapi_optimize_cfg(PyObject *module, PyObject *const *args, Py_ssize exit: return return_value; } -/*[clinic end generated code: output=3b1fd713290f68a9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=efe95836482fd542 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h index cc1a5881e0bfd6..13bf8b482cd69e 100644 --- a/Modules/clinic/_winapi.c.h +++ b/Modules/clinic/_winapi.c.h @@ -742,6 +742,32 @@ _winapi_MapViewOfFile(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } +PyDoc_STRVAR(_winapi_UnmapViewOfFile__doc__, +"UnmapViewOfFile($module, address, /)\n" +"--\n" +"\n"); + +#define _WINAPI_UNMAPVIEWOFFILE_METHODDEF \ + {"UnmapViewOfFile", (PyCFunction)_winapi_UnmapViewOfFile, METH_O, _winapi_UnmapViewOfFile__doc__}, + +static PyObject * +_winapi_UnmapViewOfFile_impl(PyObject *module, LPCVOID address); + +static PyObject * +_winapi_UnmapViewOfFile(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + LPCVOID address; + + if (!PyArg_Parse(arg, "" F_POINTER ":UnmapViewOfFile", &address)) { + goto exit; + } + return_value = _winapi_UnmapViewOfFile_impl(module, address); + +exit: + return return_value; +} + PyDoc_STRVAR(_winapi_OpenFileMapping__doc__, "OpenFileMapping($module, desired_access, inherit_handle, name, /)\n" "--\n" @@ -1345,4 +1371,4 @@ _winapi__mimetypes_read_windows_registry(PyObject *module, PyObject *const *args exit: return return_value; } -/*[clinic end generated code: output=83c4a3f0e70e7775 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=23ea9e176d86e026 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_zoneinfo.c.h b/Modules/clinic/_zoneinfo.c.h new file mode 100644 index 00000000000000..78fcbfa9411bb8 --- /dev/null +++ b/Modules/clinic/_zoneinfo.c.h @@ -0,0 +1,188 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif + + +PyDoc_STRVAR(zoneinfo_ZoneInfo_from_file__doc__, +"from_file($type, file_obj, /, key=None)\n" +"--\n" +"\n" +"Create a ZoneInfo file from a file object."); + +#define ZONEINFO_ZONEINFO_FROM_FILE_METHODDEF \ + {"from_file", _PyCFunction_CAST(zoneinfo_ZoneInfo_from_file), METH_FASTCALL|METH_KEYWORDS|METH_CLASS, zoneinfo_ZoneInfo_from_file__doc__}, + +static PyObject * +zoneinfo_ZoneInfo_from_file_impl(PyTypeObject *type, PyObject *file_obj, + PyObject *key); + +static PyObject * +zoneinfo_ZoneInfo_from_file(PyTypeObject *type, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(key), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "key", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "from_file", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *file_obj; + PyObject *key = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { + goto exit; + } + file_obj = args[0]; + if (!noptargs) { + goto skip_optional_pos; + } + key = args[1]; +skip_optional_pos: + return_value = zoneinfo_ZoneInfo_from_file_impl(type, file_obj, key); + +exit: + return return_value; +} + +PyDoc_STRVAR(zoneinfo_ZoneInfo_no_cache__doc__, +"no_cache($type, /, key)\n" +"--\n" +"\n" +"Get a new instance of ZoneInfo, bypassing the cache."); + +#define ZONEINFO_ZONEINFO_NO_CACHE_METHODDEF \ + {"no_cache", _PyCFunction_CAST(zoneinfo_ZoneInfo_no_cache), METH_FASTCALL|METH_KEYWORDS|METH_CLASS, zoneinfo_ZoneInfo_no_cache__doc__}, + +static PyObject * +zoneinfo_ZoneInfo_no_cache_impl(PyTypeObject *type, PyObject *key); + +static PyObject * +zoneinfo_ZoneInfo_no_cache(PyTypeObject *type, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(key), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"key", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "no_cache", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *key; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + key = args[0]; + return_value = zoneinfo_ZoneInfo_no_cache_impl(type, key); + +exit: + return return_value; +} + +PyDoc_STRVAR(zoneinfo_ZoneInfo_clear_cache__doc__, +"clear_cache($type, /, *, only_keys=None)\n" +"--\n" +"\n" +"Clear the ZoneInfo cache."); + +#define ZONEINFO_ZONEINFO_CLEAR_CACHE_METHODDEF \ + {"clear_cache", _PyCFunction_CAST(zoneinfo_ZoneInfo_clear_cache), METH_FASTCALL|METH_KEYWORDS|METH_CLASS, zoneinfo_ZoneInfo_clear_cache__doc__}, + +static PyObject * +zoneinfo_ZoneInfo_clear_cache_impl(PyTypeObject *type, PyObject *only_keys); + +static PyObject * +zoneinfo_ZoneInfo_clear_cache(PyTypeObject *type, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(only_keys), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"only_keys", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "clear_cache", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *only_keys = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_kwonly; + } + only_keys = args[0]; +skip_optional_kwonly: + return_value = zoneinfo_ZoneInfo_clear_cache_impl(type, only_keys); + +exit: + return return_value; +} +/*[clinic end generated code: output=d2da73ef66146b83 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/itertoolsmodule.c.h b/Modules/clinic/itertoolsmodule.c.h index 8806606d85befe..17f9ebb249390f 100644 --- a/Modules/clinic/itertoolsmodule.c.h +++ b/Modules/clinic/itertoolsmodule.c.h @@ -8,6 +8,85 @@ preserve #endif +PyDoc_STRVAR(batched_new__doc__, +"batched(iterable, n)\n" +"--\n" +"\n" +"Batch data into lists of length n. The last batch may be shorter than n.\n" +"\n" +"Loops over the input iterable and accumulates data into lists\n" +"up to size n. The input is consumed lazily, just enough to\n" +"fill a list. The result is yielded as soon as a batch is full\n" +"or when the input iterable is exhausted.\n" +"\n" +" >>> for batch in batched(\'ABCDEFG\', 3):\n" +" ... print(batch)\n" +" ...\n" +" [\'A\', \'B\', \'C\']\n" +" [\'D\', \'E\', \'F\']\n" +" [\'G\']"); + +static PyObject * +batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n); + +static PyObject * +batched_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(iterable), &_Py_ID(n), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"iterable", "n", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "batched", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject * const *fastargs; + Py_ssize_t nargs = PyTuple_GET_SIZE(args); + PyObject *iterable; + Py_ssize_t n; + + fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 2, 2, 0, argsbuf); + if (!fastargs) { + goto exit; + } + iterable = fastargs[0]; + { + Py_ssize_t ival = -1; + PyObject *iobj = _PyNumber_Index(fastargs[1]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + n = ival; + } + return_value = batched_new_impl(type, iterable, n); + +exit: + return return_value; +} + PyDoc_STRVAR(pairwise_new__doc__, "pairwise(iterable, /)\n" "--\n" @@ -834,4 +913,4 @@ itertools_count(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=b1056d63f68a9059 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=efea8cd1e647bd17 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/overlapped.c.h b/Modules/clinic/overlapped.c.h index e8c2fe5653d10d..9d9f2cbf6afdc2 100644 --- a/Modules/clinic/overlapped.c.h +++ b/Modules/clinic/overlapped.c.h @@ -1093,6 +1093,10 @@ _overlapped_WSAConnect(PyObject *module, PyObject *const *args, Py_ssize_t nargs if (!ConnectSocket && PyErr_Occurred()) { goto exit; } + if (!PyTuple_Check(args[1])) { + _PyArg_BadArgument("WSAConnect", "argument 2", "tuple", args[1]); + goto exit; + } AddressObj = args[1]; return_value = _overlapped_WSAConnect_impl(module, ConnectSocket, AddressObj); @@ -1140,6 +1144,10 @@ _overlapped_Overlapped_WSASendTo(OverlappedObject *self, PyObject *const *args, if (!_PyLong_UnsignedLong_Converter(args[2], &flags)) { goto exit; } + if (!PyTuple_Check(args[3])) { + _PyArg_BadArgument("WSASendTo", "argument 4", "tuple", args[3]); + goto exit; + } AddressObj = args[3]; return_value = _overlapped_Overlapped_WSASendTo_impl(self, handle, &bufobj, flags, AddressObj); @@ -1254,4 +1262,4 @@ _overlapped_Overlapped_WSARecvFromInto(OverlappedObject *self, PyObject *const * return return_value; } -/*[clinic end generated code: output=e0f866222bd5873b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=b2e89694b8de3d00 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index 7fac07804686b4..f9f6ca372ec6c7 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -1849,7 +1849,8 @@ PyDoc_STRVAR(os_mkdir__doc__, "dir_fd may not be implemented on your platform.\n" " If it is unavailable, using it will raise a NotImplementedError.\n" "\n" -"The mode argument is ignored on Windows."); +"The mode argument is ignored on Windows. Where it is used, the current umask\n" +"value is first masked out."); #define OS_MKDIR_METHODDEF \ {"mkdir", _PyCFunction_CAST(os_mkdir), METH_FASTCALL|METH_KEYWORDS, os_mkdir__doc__}, @@ -5210,6 +5211,147 @@ os_pidfd_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec #endif /* (defined(__linux__) && defined(__NR_pidfd_open)) */ +#if defined(HAVE_SETNS) + +PyDoc_STRVAR(os_setns__doc__, +"setns($module, /, fd, nstype=0)\n" +"--\n" +"\n" +"Move the calling thread into different namespaces.\n" +"\n" +" fd\n" +" A file descriptor to a namespace.\n" +" nstype\n" +" Type of namespace."); + +#define OS_SETNS_METHODDEF \ + {"setns", _PyCFunction_CAST(os_setns), METH_FASTCALL|METH_KEYWORDS, os_setns__doc__}, + +static PyObject * +os_setns_impl(PyObject *module, int fd, int nstype); + +static PyObject * +os_setns(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(fd), &_Py_ID(nstype), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"fd", "nstype", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "setns", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + int fd; + int nstype = 0; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { + goto exit; + } + if (!_PyLong_FileDescriptor_Converter(args[0], &fd)) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + nstype = _PyLong_AsInt(args[1]); + if (nstype == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional_pos: + return_value = os_setns_impl(module, fd, nstype); + +exit: + return return_value; +} + +#endif /* defined(HAVE_SETNS) */ + +#if defined(HAVE_UNSHARE) + +PyDoc_STRVAR(os_unshare__doc__, +"unshare($module, /, flags)\n" +"--\n" +"\n" +"Disassociate parts of a process (or thread) execution context.\n" +"\n" +" flags\n" +" Namespaces to be unshared."); + +#define OS_UNSHARE_METHODDEF \ + {"unshare", _PyCFunction_CAST(os_unshare), METH_FASTCALL|METH_KEYWORDS, os_unshare__doc__}, + +static PyObject * +os_unshare_impl(PyObject *module, int flags); + +static PyObject * +os_unshare(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(flags), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"flags", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "unshare", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + int flags; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + flags = _PyLong_AsInt(args[0]); + if (flags == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = os_unshare_impl(module, flags); + +exit: + return return_value; +} + +#endif /* defined(HAVE_UNSHARE) */ + #if (defined(HAVE_READLINK) || defined(MS_WINDOWS)) PyDoc_STRVAR(os_readlink__doc__, @@ -10127,6 +10269,38 @@ os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *c return return_value; } +PyDoc_STRVAR(os_DirEntry_is_junction__doc__, +"is_junction($self, /)\n" +"--\n" +"\n" +"Return True if the entry is a junction; cached per entry."); + +#define OS_DIRENTRY_IS_JUNCTION_METHODDEF \ + {"is_junction", _PyCFunction_CAST(os_DirEntry_is_junction), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_junction__doc__}, + +static int +os_DirEntry_is_junction_impl(DirEntry *self, PyTypeObject *defining_class); + +static PyObject * +os_DirEntry_is_junction(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + int _return_value; + + if (nargs) { + PyErr_SetString(PyExc_TypeError, "is_junction() takes no arguments"); + goto exit; + } + _return_value = os_DirEntry_is_junction_impl(self, defining_class); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + return return_value; +} + PyDoc_STRVAR(os_DirEntry_stat__doc__, "stat($self, /, *, follow_symlinks=True)\n" "--\n" @@ -11084,6 +11258,14 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #define OS_PIDFD_OPEN_METHODDEF #endif /* !defined(OS_PIDFD_OPEN_METHODDEF) */ +#ifndef OS_SETNS_METHODDEF + #define OS_SETNS_METHODDEF +#endif /* !defined(OS_SETNS_METHODDEF) */ + +#ifndef OS_UNSHARE_METHODDEF + #define OS_UNSHARE_METHODDEF +#endif /* !defined(OS_UNSHARE_METHODDEF) */ + #ifndef OS_READLINK_METHODDEF #define OS_READLINK_METHODDEF #endif /* !defined(OS_READLINK_METHODDEF) */ @@ -11367,4 +11549,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=dd43d388b442c96d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4192d8e09e216300 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/signalmodule.c.h b/Modules/clinic/signalmodule.c.h index f0276c63df18f2..3b3c6ba150a1ad 100644 --- a/Modules/clinic/signalmodule.c.h +++ b/Modules/clinic/signalmodule.c.h @@ -211,8 +211,9 @@ PyDoc_STRVAR(signal_strsignal__doc__, "\n" "Return the system description of the given signal.\n" "\n" -"The return values can be such as \"Interrupt\", \"Segmentation fault\", etc.\n" -"Returns None if the signal is not recognized."); +"Returns the description of signal *signalnum*, such as \"Interrupt\"\n" +"for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no\n" +"description. Raises :exc:`ValueError` if *signalnum* is invalid."); #define SIGNAL_STRSIGNAL_METHODDEF \ {"strsignal", (PyCFunction)signal_strsignal, METH_O, signal_strsignal__doc__}, @@ -704,4 +705,4 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar #ifndef SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #define SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #endif /* !defined(SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF) */ -/*[clinic end generated code: output=f2a3321b32b0637c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2b54dc607f6e3146 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/syslogmodule.c.h b/Modules/clinic/syslogmodule.c.h new file mode 100644 index 00000000000000..0ce66ad4e1a490 --- /dev/null +++ b/Modules/clinic/syslogmodule.c.h @@ -0,0 +1,257 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif + + +PyDoc_STRVAR(syslog_openlog__doc__, +"openlog($module, /, ident=, logoption=0,\n" +" facility=LOG_USER)\n" +"--\n" +"\n" +"Set logging options of subsequent syslog() calls."); + +#define SYSLOG_OPENLOG_METHODDEF \ + {"openlog", _PyCFunction_CAST(syslog_openlog), METH_FASTCALL|METH_KEYWORDS, syslog_openlog__doc__}, + +static PyObject * +syslog_openlog_impl(PyObject *module, PyObject *ident, long logopt, + long facility); + +static PyObject * +syslog_openlog(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(ident), &_Py_ID(logoption), &_Py_ID(facility), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"ident", "logoption", "facility", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "openlog", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *ident = NULL; + long logopt = 0; + long facility = LOG_USER; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 3, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (args[0]) { + if (!PyUnicode_Check(args[0])) { + _PyArg_BadArgument("openlog", "argument 'ident'", "str", args[0]); + goto exit; + } + if (PyUnicode_READY(args[0]) == -1) { + goto exit; + } + ident = args[0]; + if (!--noptargs) { + goto skip_optional_pos; + } + } + if (args[1]) { + logopt = PyLong_AsLong(args[1]); + if (logopt == -1 && PyErr_Occurred()) { + goto exit; + } + if (!--noptargs) { + goto skip_optional_pos; + } + } + facility = PyLong_AsLong(args[2]); + if (facility == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional_pos: + return_value = syslog_openlog_impl(module, ident, logopt, facility); + +exit: + return return_value; +} + +PyDoc_STRVAR(syslog_syslog__doc__, +"syslog([priority=LOG_INFO,] message)\n" +"Send the string message to the system logger."); + +#define SYSLOG_SYSLOG_METHODDEF \ + {"syslog", (PyCFunction)syslog_syslog, METH_VARARGS, syslog_syslog__doc__}, + +static PyObject * +syslog_syslog_impl(PyObject *module, int group_left_1, int priority, + const char *message); + +static PyObject * +syslog_syslog(PyObject *module, PyObject *args) +{ + PyObject *return_value = NULL; + int group_left_1 = 0; + int priority = LOG_INFO; + const char *message; + + switch (PyTuple_GET_SIZE(args)) { + case 1: + if (!PyArg_ParseTuple(args, "s:syslog", &message)) { + goto exit; + } + break; + case 2: + if (!PyArg_ParseTuple(args, "is:syslog", &priority, &message)) { + goto exit; + } + group_left_1 = 1; + break; + default: + PyErr_SetString(PyExc_TypeError, "syslog.syslog requires 1 to 2 arguments"); + goto exit; + } + return_value = syslog_syslog_impl(module, group_left_1, priority, message); + +exit: + return return_value; +} + +PyDoc_STRVAR(syslog_closelog__doc__, +"closelog($module, /)\n" +"--\n" +"\n" +"Reset the syslog module values and call the system library closelog()."); + +#define SYSLOG_CLOSELOG_METHODDEF \ + {"closelog", (PyCFunction)syslog_closelog, METH_NOARGS, syslog_closelog__doc__}, + +static PyObject * +syslog_closelog_impl(PyObject *module); + +static PyObject * +syslog_closelog(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return syslog_closelog_impl(module); +} + +PyDoc_STRVAR(syslog_setlogmask__doc__, +"setlogmask($module, maskpri, /)\n" +"--\n" +"\n" +"Set the priority mask to maskpri and return the previous mask value."); + +#define SYSLOG_SETLOGMASK_METHODDEF \ + {"setlogmask", (PyCFunction)syslog_setlogmask, METH_O, syslog_setlogmask__doc__}, + +static long +syslog_setlogmask_impl(PyObject *module, long maskpri); + +static PyObject * +syslog_setlogmask(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + long maskpri; + long _return_value; + + maskpri = PyLong_AsLong(arg); + if (maskpri == -1 && PyErr_Occurred()) { + goto exit; + } + _return_value = syslog_setlogmask_impl(module, maskpri); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromLong(_return_value); + +exit: + return return_value; +} + +PyDoc_STRVAR(syslog_LOG_MASK__doc__, +"LOG_MASK($module, pri, /)\n" +"--\n" +"\n" +"Calculates the mask for the individual priority pri."); + +#define SYSLOG_LOG_MASK_METHODDEF \ + {"LOG_MASK", (PyCFunction)syslog_LOG_MASK, METH_O, syslog_LOG_MASK__doc__}, + +static long +syslog_LOG_MASK_impl(PyObject *module, long pri); + +static PyObject * +syslog_LOG_MASK(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + long pri; + long _return_value; + + pri = PyLong_AsLong(arg); + if (pri == -1 && PyErr_Occurred()) { + goto exit; + } + _return_value = syslog_LOG_MASK_impl(module, pri); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromLong(_return_value); + +exit: + return return_value; +} + +PyDoc_STRVAR(syslog_LOG_UPTO__doc__, +"LOG_UPTO($module, pri, /)\n" +"--\n" +"\n" +"Calculates the mask for all priorities up to and including pri."); + +#define SYSLOG_LOG_UPTO_METHODDEF \ + {"LOG_UPTO", (PyCFunction)syslog_LOG_UPTO, METH_O, syslog_LOG_UPTO__doc__}, + +static long +syslog_LOG_UPTO_impl(PyObject *module, long pri); + +static PyObject * +syslog_LOG_UPTO(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + long pri; + long _return_value; + + pri = PyLong_AsLong(arg); + if (pri == -1 && PyErr_Occurred()) { + goto exit; + } + _return_value = syslog_LOG_UPTO_impl(module, pri); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromLong(_return_value); + +exit: + return return_value; +} +/*[clinic end generated code: output=3b1bdb16565b8fda input=a9049054013a1b77]*/ diff --git a/Modules/clinic/zlibmodule.c.h b/Modules/clinic/zlibmodule.c.h index a04b954a57f353..65412b2435ade1 100644 --- a/Modules/clinic/zlibmodule.c.h +++ b/Modules/clinic/zlibmodule.c.h @@ -897,6 +897,104 @@ zlib_Decompress_flush(compobject *self, PyTypeObject *cls, PyObject *const *args return return_value; } +PyDoc_STRVAR(zlib_ZlibDecompressor_decompress__doc__, +"decompress($self, /, data, max_length=-1)\n" +"--\n" +"\n" +"Decompress *data*, returning uncompressed data as bytes.\n" +"\n" +"If *max_length* is nonnegative, returns at most *max_length* bytes of\n" +"decompressed data. If this limit is reached and further output can be\n" +"produced, *self.needs_input* will be set to ``False``. In this case, the next\n" +"call to *decompress()* may provide *data* as b\'\' to obtain more of the output.\n" +"\n" +"If all of the input data was decompressed and returned (either because this\n" +"was less than *max_length* bytes, or because *max_length* was negative),\n" +"*self.needs_input* will be set to True.\n" +"\n" +"Attempting to decompress data after the end of stream is reached raises an\n" +"EOFError. Any data found after the end of the stream is ignored and saved in\n" +"the unused_data attribute."); + +#define ZLIB_ZLIBDECOMPRESSOR_DECOMPRESS_METHODDEF \ + {"decompress", _PyCFunction_CAST(zlib_ZlibDecompressor_decompress), METH_FASTCALL|METH_KEYWORDS, zlib_ZlibDecompressor_decompress__doc__}, + +static PyObject * +zlib_ZlibDecompressor_decompress_impl(ZlibDecompressor *self, + Py_buffer *data, Py_ssize_t max_length); + +static PyObject * +zlib_ZlibDecompressor_decompress(ZlibDecompressor *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(data), &_Py_ID(max_length), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"data", "max_length", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "decompress", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + Py_buffer data = {NULL, NULL}; + Py_ssize_t max_length = -1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { + goto exit; + } + if (PyObject_GetBuffer(args[0], &data, PyBUF_SIMPLE) != 0) { + goto exit; + } + if (!PyBuffer_IsContiguous(&data, 'C')) { + _PyArg_BadArgument("decompress", "argument 'data'", "contiguous buffer", args[0]); + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + { + Py_ssize_t ival = -1; + PyObject *iobj = _PyNumber_Index(args[1]); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + max_length = ival; + } +skip_optional_pos: + return_value = zlib_ZlibDecompressor_decompress_impl(self, &data, max_length); + +exit: + /* Cleanup for data */ + if (data.obj) { + PyBuffer_Release(&data); + } + + return return_value; +} + PyDoc_STRVAR(zlib_adler32__doc__, "adler32($module, data, value=1, /)\n" "--\n" @@ -1031,4 +1129,4 @@ zlib_crc32(PyObject *module, PyObject *const *args, Py_ssize_t nargs) #ifndef ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #define ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #endif /* !defined(ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF) */ -/*[clinic end generated code: output=9e5f9911d0c273e1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=57ff7b511ab23132 input=a9049054013a1b77]*/ diff --git a/Modules/expat/expat.h b/Modules/expat/expat.h index 2b47ce2a8d3a6a..1c83563cbf68e7 100644 --- a/Modules/expat/expat.h +++ b/Modules/expat/expat.h @@ -1054,8 +1054,8 @@ XML_SetBillionLaughsAttackProtectionActivationThreshold( See http://semver.org. */ #define XML_MAJOR_VERSION 2 -#define XML_MINOR_VERSION 4 -#define XML_MICRO_VERSION 9 +#define XML_MINOR_VERSION 5 +#define XML_MICRO_VERSION 0 #ifdef __cplusplus } diff --git a/Modules/expat/xmlparse.c b/Modules/expat/xmlparse.c index c0bece51d7009e..b6c2eca97567ba 100644 --- a/Modules/expat/xmlparse.c +++ b/Modules/expat/xmlparse.c @@ -1,4 +1,4 @@ -/* 90815a2b2c80c03b2b889fe1d427bb2b9e3282aa065e42784e001db4f23de324 (2.4.9+) +/* 5ab094ffadd6edfc94c3eee53af44a86951f9f1f0933ada3114bbce2bfb02c99 (2.5.0+) __ __ _ ___\ \/ /_ __ __ _| |_ / _ \\ /| '_ \ / _` | __| @@ -35,6 +35,7 @@ Copyright (c) 2021 Dong-hee Na Copyright (c) 2022 Samanta Navarro Copyright (c) 2022 Jeffrey Walton + Copyright (c) 2022 Jann Horn Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -1068,6 +1069,14 @@ parserCreate(const XML_Char *encodingName, parserInit(parser, encodingName); if (encodingName && ! parser->m_protocolEncodingName) { + if (dtd) { + // We need to stop the upcoming call to XML_ParserFree from happily + // destroying parser->m_dtd because the DTD is shared with the parent + // parser and the only guard that keeps XML_ParserFree from destroying + // parser->m_dtd is parser->m_isParamEntity but it will be set to + // XML_TRUE only later in XML_ExternalEntityParserCreate (or not at all). + parser->m_dtd = NULL; + } XML_ParserFree(parser); return NULL; } @@ -3011,9 +3020,6 @@ doContent(XML_Parser parser, int startTagLevel, const ENCODING *enc, int len; const char *rawName; TAG *tag = parser->m_tagStack; - parser->m_tagStack = tag->parent; - tag->parent = parser->m_freeTagList; - parser->m_freeTagList = tag; rawName = s + enc->minBytesPerChar * 2; len = XmlNameLength(enc, rawName); if (len != tag->rawNameLength @@ -3021,6 +3027,9 @@ doContent(XML_Parser parser, int startTagLevel, const ENCODING *enc, *eventPP = rawName; return XML_ERROR_TAG_MISMATCH; } + parser->m_tagStack = tag->parent; + tag->parent = parser->m_freeTagList; + parser->m_freeTagList = tag; --parser->m_tagLevel; if (parser->m_endElementHandler) { const XML_Char *localPart; @@ -4975,10 +4984,10 @@ doProlog(XML_Parser parser, const ENCODING *enc, const char *s, const char *end, parser->m_handlerArg, parser->m_declElementType->name, parser->m_declAttributeId->name, parser->m_declAttributeType, 0, role == XML_ROLE_REQUIRED_ATTRIBUTE_VALUE); - poolClear(&parser->m_tempPool); handleDefault = XML_FALSE; } } + poolClear(&parser->m_tempPool); break; case XML_ROLE_DEFAULT_ATTRIBUTE_VALUE: case XML_ROLE_FIXED_ATTRIBUTE_VALUE: @@ -5386,7 +5395,7 @@ doProlog(XML_Parser parser, const ENCODING *enc, const char *s, const char *end, * * If 'standalone' is false, the DTD must have no * parameter entities or we wouldn't have passed the outer - * 'if' statement. That measn the only entity in the hash + * 'if' statement. That means the only entity in the hash * table is the external subset name "#" which cannot be * given as a parameter entity name in XML syntax, so the * lookup must have returned NULL and we don't even reach @@ -5798,19 +5807,27 @@ internalEntityProcessor(XML_Parser parser, const char *s, const char *end, if (result != XML_ERROR_NONE) return result; - else if (textEnd != next - && parser->m_parsingStatus.parsing == XML_SUSPENDED) { + + if (textEnd != next && parser->m_parsingStatus.parsing == XML_SUSPENDED) { entity->processed = (int)(next - (const char *)entity->textPtr); return result; - } else { + } + #ifdef XML_DTD - entityTrackingOnClose(parser, entity, __LINE__); + entityTrackingOnClose(parser, entity, __LINE__); #endif - entity->open = XML_FALSE; - parser->m_openInternalEntities = openEntity->next; - /* put openEntity back in list of free instances */ - openEntity->next = parser->m_freeInternalEntities; - parser->m_freeInternalEntities = openEntity; + entity->open = XML_FALSE; + parser->m_openInternalEntities = openEntity->next; + /* put openEntity back in list of free instances */ + openEntity->next = parser->m_freeInternalEntities; + parser->m_freeInternalEntities = openEntity; + + // If there are more open entities we want to stop right here and have the + // upcoming call to XML_ResumeParser continue with entity content, or it would + // be ignored altogether. + if (parser->m_openInternalEntities != NULL + && parser->m_parsingStatus.parsing == XML_SUSPENDED) { + return XML_ERROR_NONE; } #ifdef XML_DTD diff --git a/Modules/expat/xmltok_impl.h b/Modules/expat/xmltok_impl.h index c518aada013df2..3469c4ae138c95 100644 --- a/Modules/expat/xmltok_impl.h +++ b/Modules/expat/xmltok_impl.h @@ -45,7 +45,7 @@ enum { BT_LF, /* line feed = "\n" */ BT_GT, /* greater than = ">" */ BT_QUOT, /* quotation character = "\"" */ - BT_APOS, /* aposthrophe = "'" */ + BT_APOS, /* apostrophe = "'" */ BT_EQUALS, /* equal sign = "=" */ BT_QUEST, /* question mark = "?" */ BT_EXCL, /* exclamation mark = "!" */ diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index 2f6a2198ab3e32..fa4c2d0cccd1b6 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -333,14 +333,17 @@ faulthandler_fatal_error(int signum) size_t i; fault_handler_t *handler = NULL; int save_errno = errno; + int found = 0; if (!fatal_error.enabled) return; for (i=0; i < faulthandler_nsignals; i++) { handler = &faulthandler_handlers[i]; - if (handler->signum == signum) + if (handler->signum == signum) { + found = 1; break; + } } if (handler == NULL) { /* faulthandler_nsignals == 0 (unlikely) */ @@ -350,9 +353,18 @@ faulthandler_fatal_error(int signum) /* restore the previous handler */ faulthandler_disable_fatal_handler(handler); - PUTS(fd, "Fatal Python error: "); - PUTS(fd, handler->name); - PUTS(fd, "\n\n"); + if (found) { + PUTS(fd, "Fatal Python error: "); + PUTS(fd, handler->name); + PUTS(fd, "\n\n"); + } + else { + char unknown_signum[23] = {0,}; + snprintf(unknown_signum, 23, "%d", signum); + PUTS(fd, "Fatal Python error from unexpected signum: "); + PUTS(fd, unknown_signum); + PUTS(fd, "\n\n"); + } faulthandler_dump_traceback(fd, fatal_error.all_threads, fatal_error.interp); diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 97cb6e6e1efb1f..cacfad7335634c 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1870,8 +1870,7 @@ gc_is_tracked(PyObject *module, PyObject *obj) result = Py_True; else result = Py_False; - Py_INCREF(result); - return result; + return Py_NewRef(result); } /*[clinic input] @@ -2252,6 +2251,20 @@ PyObject_IS_GC(PyObject *obj) return _PyObject_IS_GC(obj); } +void +_Py_ScheduleGC(PyInterpreterState *interp) +{ + GCState *gcstate = &interp->gc; + if (gcstate->collecting == 1) { + return; + } + struct _ceval_state *ceval = &interp->ceval; + if (!_Py_atomic_load_relaxed(&ceval->gc_scheduled)) { + _Py_atomic_store_relaxed(&ceval->gc_scheduled, 1); + _Py_atomic_store_relaxed(&ceval->eval_breaker, 1); + } +} + void _PyObject_GC_Link(PyObject *op) { @@ -2269,12 +2282,19 @@ _PyObject_GC_Link(PyObject *op) !gcstate->collecting && !_PyErr_Occurred(tstate)) { - gcstate->collecting = 1; - gc_collect_generations(tstate); - gcstate->collecting = 0; + _Py_ScheduleGC(tstate->interp); } } +void +_Py_RunGC(PyThreadState *tstate) +{ + GCState *gcstate = &tstate->interp->gc; + gcstate->collecting = 1; + gc_collect_generations(tstate); + gcstate->collecting = 0; +} + static PyObject * gc_alloc(size_t basicsize, size_t presize) { diff --git a/Modules/getbuildinfo.c b/Modules/getbuildinfo.c index 7cb7397a22c8ab..a24750b76c09bc 100644 --- a/Modules/getbuildinfo.c +++ b/Modules/getbuildinfo.c @@ -31,12 +31,18 @@ #define GITBRANCH "" #endif +static int initialized = 0; +static char buildinfo[50 + sizeof(GITVERSION) + + ((sizeof(GITTAG) > sizeof(GITBRANCH)) ? + sizeof(GITTAG) : sizeof(GITBRANCH))]; + const char * Py_GetBuildInfo(void) { - static char buildinfo[50 + sizeof(GITVERSION) + - ((sizeof(GITTAG) > sizeof(GITBRANCH)) ? - sizeof(GITTAG) : sizeof(GITBRANCH))]; + if (initialized) { + return buildinfo; + } + initialized = 1; const char *revision = _Py_gitversion(); const char *sep = *revision ? ":" : ""; const char *gitid = _Py_gitidentifier(); diff --git a/Modules/getpath.c b/Modules/getpath.c index ceacf36d8968a1..13db010649fed8 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -125,8 +125,7 @@ getpath_isabs(PyObject *Py_UNUSED(self), PyObject *args) r = _Py_isabs(path) ? Py_True : Py_False; PyMem_Free((void *)path); } - Py_XINCREF(r); - return r; + return Py_XNewRef(r); } @@ -153,11 +152,10 @@ getpath_hassuffix(PyObject *Py_UNUSED(self), PyObject *args) wcscmp(&path[len - suffixLen], suffix) != 0 #endif ) { - r = Py_False; + r = Py_NewRef(Py_False); } else { - r = Py_True; + r = Py_NewRef(Py_True); } - Py_INCREF(r); PyMem_Free((void *)suffix); } PyMem_Free((void *)path); @@ -187,8 +185,7 @@ getpath_isdir(PyObject *Py_UNUSED(self), PyObject *args) #endif PyMem_Free((void *)path); } - Py_XINCREF(r); - return r; + return Py_XNewRef(r); } @@ -213,8 +210,7 @@ getpath_isfile(PyObject *Py_UNUSED(self), PyObject *args) #endif PyMem_Free((void *)path); } - Py_XINCREF(r); - return r; + return Py_XNewRef(r); } @@ -247,8 +243,7 @@ getpath_isxfile(PyObject *Py_UNUSED(self), PyObject *args) #endif PyMem_Free((void *)path); } - Py_XINCREF(r); - return r; + return Py_XNewRef(r); } @@ -488,8 +483,7 @@ getpath_realpath(PyObject *Py_UNUSED(self) , PyObject *args) goto done; } if (!S_ISLNK(st.st_mode)) { - Py_INCREF(pathobj); - r = pathobj; + r = Py_NewRef(pathobj); goto done; } wchar_t resolved[MAXPATHLEN+1]; @@ -504,8 +498,7 @@ getpath_realpath(PyObject *Py_UNUSED(self) , PyObject *args) return r; #endif - Py_INCREF(pathobj); - return pathobj; + return Py_NewRef(pathobj); } @@ -591,8 +584,7 @@ wchar_to_dict(PyObject *dict, const char *key, const wchar_t *s) return 0; } } else { - u = Py_None; - Py_INCREF(u); + u = Py_NewRef(Py_None); } r = PyDict_SetItemString(dict, key, u) == 0; Py_DECREF(u); @@ -617,8 +609,7 @@ decode_to_dict(PyObject *dict, const char *key, const char *s) return 0; } } else { - u = Py_None; - Py_INCREF(u); + u = Py_NewRef(Py_None); } r = PyDict_SetItemString(dict, key, u) == 0; Py_DECREF(u); diff --git a/Modules/getpath.py b/Modules/getpath.py index e3558bc49e389f..ab0d2dc0636ad4 100644 --- a/Modules/getpath.py +++ b/Modules/getpath.py @@ -375,6 +375,25 @@ def search_up(prefix, *landmarks, test=isfile): pass if not base_executable: base_executable = joinpath(executable_dir, basename(executable)) + # It's possible "python" is executed from within a posix venv but that + # "python" is not available in the "home" directory as the standard + # `make install` does not create it and distros often do not provide it. + # + # In this case, try to fall back to known alternatives + if os_name != 'nt' and not isfile(base_executable): + base_exe = basename(executable) + for candidate in (DEFAULT_PROGRAM_NAME, f'python{VERSION_MAJOR}.{VERSION_MINOR}'): + candidate += EXE_SUFFIX if EXE_SUFFIX else '' + if base_exe == candidate: + continue + candidate = joinpath(executable_dir, candidate) + # Only set base_executable if the candidate exists. + # If no candidate succeeds, subsequent errors related to + # base_executable (like FileNotFoundError) remain in the + # context of the original executable name + if isfile(candidate): + base_executable = candidate + break break else: venv_prefix = None @@ -579,15 +598,28 @@ def search_up(prefix, *landmarks, test=isfile): # Detect exec_prefix by searching from executable for the platstdlib_dir if PLATSTDLIB_LANDMARK and not exec_prefix: if executable_dir: - exec_prefix = search_up(executable_dir, PLATSTDLIB_LANDMARK, test=isdir) - if not exec_prefix: - if EXEC_PREFIX: - exec_prefix = EXEC_PREFIX - if not isdir(joinpath(exec_prefix, PLATSTDLIB_LANDMARK)): - warn('Could not find platform dependent libraries ') + if os_name == 'nt': + # QUIRK: For compatibility and security, do not search for DLLs + # directory. The fallback below will cover it + exec_prefix = executable_dir + else: + exec_prefix = search_up(executable_dir, PLATSTDLIB_LANDMARK, test=isdir) + if not exec_prefix and EXEC_PREFIX: + exec_prefix = EXEC_PREFIX + if not exec_prefix or not isdir(joinpath(exec_prefix, PLATSTDLIB_LANDMARK)): + if os_name == 'nt': + # QUIRK: If DLLs is missing on Windows, don't warn, just assume + # that it's all the same as prefix. + # gh-98790: We set platstdlib_dir here to avoid adding "DLLs" into + # sys.path when it doesn't exist, which would give site-packages + # precedence over executable_dir, which is *probably* where our PYDs + # live. Ideally, whoever changes our layout will tell us what the + # layout is, but in the past this worked, so it should keep working. + platstdlib_dir = exec_prefix = prefix else: warn('Could not find platform dependent libraries ') + # Fallback: assume exec_prefix == prefix if not exec_prefix: exec_prefix = prefix @@ -647,9 +679,8 @@ def search_up(prefix, *landmarks, test=isfile): else: library_dir = executable_dir pythonpath.append(joinpath(library_dir, ZIP_LANDMARK)) - elif build_prefix or venv_prefix: + elif build_prefix: # QUIRK: POSIX uses the default prefix when in the build directory - # or a venv pythonpath.append(joinpath(PREFIX, ZIP_LANDMARK)) else: pythonpath.append(joinpath(prefix, ZIP_LANDMARK)) @@ -689,7 +720,8 @@ def search_up(prefix, *landmarks, test=isfile): pythonpath.append(platstdlib_dir) if stdlib_dir: pythonpath.append(stdlib_dir) - pythonpath.append(executable_dir) + if executable_dir not in pythonpath: + pythonpath.append(executable_dir) else: if stdlib_dir: pythonpath.append(stdlib_dir) diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index 4a7a95730395e6..4b0a4d88c435c6 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -16,6 +16,7 @@ class itertools.groupby "groupbyobject *" "&groupby_type" class itertools._grouper "_grouperobject *" "&_grouper_type" class itertools.teedataobject "teedataobject *" "&teedataobject_type" class itertools._tee "teeobject *" "&tee_type" +class itertools.batched "batchedobject *" "&batched_type" class itertools.cycle "cycleobject *" "&cycle_type" class itertools.dropwhile "dropwhileobject *" "&dropwhile_type" class itertools.takewhile "takewhileobject *" "&takewhile_type" @@ -30,12 +31,13 @@ class itertools.filterfalse "filterfalseobject *" "&filterfalse_type" class itertools.count "countobject *" "&count_type" class itertools.pairwise "pairwiseobject *" "&pairwise_type" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=6498ed21fbe1bf94]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=1168b274011ce21b]*/ static PyTypeObject groupby_type; static PyTypeObject _grouper_type; static PyTypeObject teedataobject_type; static PyTypeObject tee_type; +static PyTypeObject batched_type; static PyTypeObject cycle_type; static PyTypeObject dropwhile_type; static PyTypeObject takewhile_type; @@ -51,6 +53,183 @@ static PyTypeObject pairwise_type; #include "clinic/itertoolsmodule.c.h" +/* batched object ************************************************************/ + +/* Note: The built-in zip() function includes a "strict" argument + that is needed because that function can silently truncate data + and there is no easy way for a user to detect that condition. + The same reasoning does not apply to batched() which never drops + data. Instead, it produces a shorter list which can be handled + as the user sees fit. + */ + +typedef struct { + PyObject_HEAD + PyObject *it; + Py_ssize_t batch_size; +} batchedobject; + +/*[clinic input] +@classmethod +itertools.batched.__new__ as batched_new + iterable: object + n: Py_ssize_t +Batch data into lists of length n. The last batch may be shorter than n. + +Loops over the input iterable and accumulates data into lists +up to size n. The input is consumed lazily, just enough to +fill a list. The result is yielded as soon as a batch is full +or when the input iterable is exhausted. + + >>> for batch in batched('ABCDEFG', 3): + ... print(batch) + ... + ['A', 'B', 'C'] + ['D', 'E', 'F'] + ['G'] + +[clinic start generated code]*/ + +static PyObject * +batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n) +/*[clinic end generated code: output=7ebc954d655371b6 input=f28fd12cb52365f0]*/ +{ + PyObject *it; + batchedobject *bo; + + if (n < 1) { + /* We could define the n==0 case to return an empty iterator + but that is at odds with the idea that batching should + never throw-away input data. + */ + PyErr_SetString(PyExc_ValueError, "n must be at least one"); + return NULL; + } + it = PyObject_GetIter(iterable); + if (it == NULL) { + return NULL; + } + + /* create batchedobject structure */ + bo = (batchedobject *)type->tp_alloc(type, 0); + if (bo == NULL) { + Py_DECREF(it); + return NULL; + } + bo->batch_size = n; + bo->it = it; + return (PyObject *)bo; +} + +static void +batched_dealloc(batchedobject *bo) +{ + PyObject_GC_UnTrack(bo); + Py_XDECREF(bo->it); + Py_TYPE(bo)->tp_free(bo); +} + +static int +batched_traverse(batchedobject *bo, visitproc visit, void *arg) +{ + if (bo->it != NULL) { + Py_VISIT(bo->it); + } + return 0; +} + +static PyObject * +batched_next(batchedobject *bo) +{ + Py_ssize_t i; + Py_ssize_t n = bo->batch_size; + PyObject *it = bo->it; + PyObject *item; + PyObject *result; + + if (it == NULL) { + return NULL; + } + result = PyList_New(n); + if (result == NULL) { + return NULL; + } + iternextfunc iternext = *Py_TYPE(it)->tp_iternext; + PyObject **items = _PyList_ITEMS(result); + for (i=0 ; i < n ; i++) { + item = iternext(it); + if (item == NULL) { + goto null_item; + } + items[i] = item; + } + return result; + + null_item: + if (PyErr_Occurred()) { + if (!PyErr_ExceptionMatches(PyExc_StopIteration)) { + /* Input raised an exception other than StopIteration */ + Py_CLEAR(bo->it); + Py_DECREF(result); + return NULL; + } + PyErr_Clear(); + } + if (i == 0) { + Py_CLEAR(bo->it); + Py_DECREF(result); + return NULL; + } + /* Elements in result[i:] are still NULL */ + Py_SET_SIZE(result, i); + return result; +} + +static PyTypeObject batched_type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + "itertools.batched", /* tp_name */ + sizeof(batchedobject), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)batched_dealloc, /* tp_dealloc */ + 0, /* tp_vectorcall_offset */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_as_async */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + PyObject_GenericGetAttr, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE, /* tp_flags */ + batched_new__doc__, /* tp_doc */ + (traverseproc)batched_traverse, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + PyObject_SelfIter, /* tp_iter */ + (iternextfunc)batched_next, /* tp_iternext */ + 0, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + PyType_GenericAlloc, /* tp_alloc */ + batched_new, /* tp_new */ + PyObject_GC_Del, /* tp_free */ +}; + + /* pairwise object ***********************************************************/ typedef struct { @@ -222,8 +401,7 @@ itertools_groupby_impl(PyTypeObject *type, PyObject *it, PyObject *keyfunc) gbo->tgtkey = NULL; gbo->currkey = NULL; gbo->currvalue = NULL; - gbo->keyfunc = keyfunc; - Py_INCREF(keyfunc); + gbo->keyfunc = Py_NewRef(keyfunc); gbo->it = PyObject_GetIter(it); if (gbo->it == NULL) { Py_DECREF(gbo); @@ -265,8 +443,7 @@ groupby_step(groupbyobject *gbo) return -1; if (gbo->keyfunc == Py_None) { - newkey = newvalue; - Py_INCREF(newvalue); + newkey = Py_NewRef(newvalue); } else { newkey = PyObject_CallOneArg(gbo->keyfunc, newvalue); if (newkey == NULL) { @@ -446,10 +623,8 @@ _grouper_create(groupbyobject *parent, PyObject *tgtkey) igo = PyObject_GC_New(_grouperobject, &_grouper_type); if (igo == NULL) return NULL; - igo->parent = (PyObject *)parent; - Py_INCREF(parent); - igo->tgtkey = tgtkey; - Py_INCREF(tgtkey); + igo->parent = Py_NewRef(parent); + igo->tgtkey = Py_NewRef(tgtkey); parent->currgrouper = igo; /* borrowed reference */ PyObject_GC_Track(igo); @@ -600,8 +775,7 @@ teedataobject_newinternal(PyObject *it) tdo->running = 0; tdo->numread = 0; tdo->nextlink = NULL; - Py_INCREF(it); - tdo->it = it; + tdo->it = Py_NewRef(it); PyObject_GC_Track(tdo); return (PyObject *)tdo; } @@ -611,8 +785,7 @@ teedataobject_jumplink(teedataobject *tdo) { if (tdo->nextlink == NULL) tdo->nextlink = teedataobject_newinternal(tdo->it); - Py_XINCREF(tdo->nextlink); - return tdo->nextlink; + return Py_XNewRef(tdo->nextlink); } static PyObject * @@ -639,8 +812,7 @@ teedataobject_getitem(teedataobject *tdo, int i) tdo->numread++; tdo->values[i] = value; } - Py_INCREF(value); - return value; + return Py_NewRef(value); } static int @@ -662,8 +834,7 @@ teedataobject_safe_decref(PyObject *obj) Py_REFCNT(obj) == 1) { PyObject *nextlink = ((teedataobject *)obj)->nextlink; ((teedataobject *)obj)->nextlink = NULL; - Py_DECREF(obj); - obj = nextlink; + Py_SETREF(obj, nextlink); } Py_XDECREF(obj); } @@ -748,8 +919,7 @@ itertools_teedataobject_impl(PyTypeObject *type, PyObject *it, if (!Py_IS_TYPE(next, &teedataobject_type)) goto err; assert(tdo->nextlink == NULL); - Py_INCREF(next); - tdo->nextlink = next; + tdo->nextlink = Py_NewRef(next); } } else { if (next != Py_None) @@ -847,8 +1017,7 @@ tee_copy(teeobject *to, PyObject *Py_UNUSED(ignored)) newto = PyObject_GC_New(teeobject, &tee_type); if (newto == NULL) return NULL; - Py_INCREF(to->dataobj); - newto->dataobj = to->dataobj; + newto->dataobj = (teedataobject*)Py_NewRef(to->dataobj); newto->index = to->index; newto->weakreflist = NULL; PyObject_GC_Track(newto); @@ -1164,8 +1333,7 @@ cycle_next(cycleobject *lz) lz->index++; if (lz->index >= PyList_GET_SIZE(lz->saved)) lz->index = 0; - Py_INCREF(item); - return item; + return Py_NewRef(item); } static PyObject * @@ -1301,8 +1469,7 @@ itertools_dropwhile_impl(PyTypeObject *type, PyObject *func, PyObject *seq) Py_DECREF(it); return NULL; } - Py_INCREF(func); - lz->func = func; + lz->func = Py_NewRef(func); lz->it = it; lz->start = 0; @@ -1464,8 +1631,7 @@ itertools_takewhile_impl(PyTypeObject *type, PyObject *func, PyObject *seq) Py_DECREF(it); return NULL; } - Py_INCREF(func); - lz->func = func; + lz->func = Py_NewRef(func); lz->it = it; lz->stop = 0; @@ -1761,8 +1927,7 @@ islice_reduce(isliceobject *lz, PyObject *Py_UNUSED(ignored)) return Py_BuildValue("O(Nn)n", Py_TYPE(lz), empty_it, 0, 0); } if (lz->stop == -1) { - stop = Py_None; - Py_INCREF(stop); + stop = Py_NewRef(Py_None); } else { stop = PyLong_FromSsize_t(lz->stop); if (stop == NULL) @@ -1883,8 +2048,7 @@ itertools_starmap_impl(PyTypeObject *type, PyObject *func, PyObject *seq) Py_DECREF(it); return NULL; } - Py_INCREF(func); - lz->func = func; + lz->func = Py_NewRef(func); lz->it = it; return (PyObject *)lz; @@ -2416,8 +2580,7 @@ product_next(productobject *lz) goto empty; } - Py_INCREF(result); - return result; + return Py_NewRef(result); empty: lz->stopped = 1; @@ -2747,8 +2910,7 @@ combinations_next(combinationsobject *co) } } - Py_INCREF(result); - return result; + return Py_NewRef(result); empty: co->stopped = 1; @@ -3078,8 +3240,7 @@ cwr_next(cwrobject *co) } } - Py_INCREF(result); - return result; + return Py_NewRef(result); empty: co->stopped = 1; @@ -3436,8 +3597,7 @@ permutations_next(permutationsobject *po) if (i < 0) goto empty; } - Py_INCREF(result); - return result; + return Py_NewRef(result); empty: po->stopped = 1; @@ -3640,13 +3800,11 @@ itertools_accumulate_impl(PyTypeObject *type, PyObject *iterable, } if (binop != Py_None) { - Py_XINCREF(binop); - lz->binop = binop; + lz->binop = Py_XNewRef(binop); } lz->total = NULL; lz->it = it; - Py_XINCREF(initial); - lz->initial = initial; + lz->initial = Py_XNewRef(initial); return (PyObject *)lz; } @@ -3678,18 +3836,15 @@ accumulate_next(accumulateobject *lz) if (lz->initial != Py_None) { lz->total = lz->initial; - Py_INCREF(Py_None); - lz->initial = Py_None; - Py_INCREF(lz->total); - return lz->total; + lz->initial = Py_NewRef(Py_None); + return Py_NewRef(lz->total); } val = (*Py_TYPE(lz->it)->tp_iternext)(lz->it); if (val == NULL) return NULL; if (lz->total == NULL) { - Py_INCREF(val); - lz->total = val; + lz->total = Py_NewRef(val); return lz->total; } @@ -4007,8 +4162,7 @@ itertools_filterfalse_impl(PyTypeObject *type, PyObject *func, PyObject *seq) Py_DECREF(it); return NULL; } - Py_INCREF(func); - lz->func = func; + lz->func = Py_NewRef(func); lz->it = it; return (PyObject *)lz; @@ -4402,8 +4556,7 @@ repeat_new(PyTypeObject *type, PyObject *args, PyObject *kwds) ro = (repeatobject *)type->tp_alloc(type, 0); if (ro == NULL) return NULL; - Py_INCREF(element); - ro->element = element; + ro->element = Py_NewRef(element); ro->cnt = cnt; return (PyObject *)ro; } @@ -4430,8 +4583,7 @@ repeat_next(repeatobject *ro) return NULL; if (ro->cnt > 0) ro->cnt--; - Py_INCREF(ro->element); - return ro->element; + return Py_NewRef(ro->element); } static PyObject * @@ -4603,8 +4755,7 @@ zip_longest_new(PyTypeObject *type, PyObject *args, PyObject *kwds) lz->tuplesize = tuplesize; lz->numactive = tuplesize; lz->result = result; - Py_INCREF(fillvalue); - lz->fillvalue = fillvalue; + lz->fillvalue = Py_NewRef(fillvalue); return (PyObject *)lz; } @@ -4646,8 +4797,7 @@ zip_longest_next(ziplongestobject *lz) for (i=0 ; i < tuplesize ; i++) { it = PyTuple_GET_ITEM(lz->ittuple, i); if (it == NULL) { - Py_INCREF(lz->fillvalue); - item = lz->fillvalue; + item = Py_NewRef(lz->fillvalue); } else { item = PyIter_Next(it); if (item == NULL) { @@ -4657,8 +4807,7 @@ zip_longest_next(ziplongestobject *lz) Py_DECREF(result); return NULL; } else { - Py_INCREF(lz->fillvalue); - item = lz->fillvalue; + item = Py_NewRef(lz->fillvalue); PyTuple_SET_ITEM(lz->ittuple, i, NULL); Py_DECREF(it); } @@ -4680,8 +4829,7 @@ zip_longest_next(ziplongestobject *lz) for (i=0 ; i < tuplesize ; i++) { it = PyTuple_GET_ITEM(lz->ittuple, i); if (it == NULL) { - Py_INCREF(lz->fillvalue); - item = lz->fillvalue; + item = Py_NewRef(lz->fillvalue); } else { item = PyIter_Next(it); if (item == NULL) { @@ -4691,8 +4839,7 @@ zip_longest_next(ziplongestobject *lz) Py_DECREF(result); return NULL; } else { - Py_INCREF(lz->fillvalue); - item = lz->fillvalue; + item = Py_NewRef(lz->fillvalue); PyTuple_SET_ITEM(lz->ittuple, i, NULL); Py_DECREF(it); } @@ -4815,6 +4962,7 @@ repeat(elem [,n]) --> elem, elem, elem, ... endlessly or up to n times\n\ \n\ Iterators terminating on the shortest input sequence:\n\ accumulate(p[, func]) --> p0, p0+p1, p0+p1+p2\n\ +batched(p, n) --> [p0, p1, ..., p_n-1], [p_n, p_n+1, ..., p_2n-1], ...\n\ chain(p, q, ...) --> p0, p1, ... plast, q0, q1, ...\n\ chain.from_iterable([p, q, ...]) --> p0, p1, ... plast, q0, q1, ...\n\ compress(data, selectors) --> (d[0] if s[0]), (d[1] if s[1]), ...\n\ @@ -4841,6 +4989,7 @@ itertoolsmodule_exec(PyObject *m) { PyTypeObject *typelist[] = { &accumulate_type, + &batched_type, &combinations_type, &cwr_type, &cycle_type, diff --git a/Modules/main.c b/Modules/main.c index aa523fc58d93fb..7edfeb3365b4c6 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -296,10 +296,10 @@ pymain_run_module(const wchar_t *modname, int set_argv0) Py_DECREF(module); return pymain_exit_err_print(); } - _Py_UnhandledKeyboardInterrupt = 0; + _PyRuntime.signals.unhandled_keyboard_interrupt = 0; result = PyObject_Call(runmodule, runargs, NULL); if (!result && PyErr_Occurred() == PyExc_KeyboardInterrupt) { - _Py_UnhandledKeyboardInterrupt = 1; + _PyRuntime.signals.unhandled_keyboard_interrupt = 1; } Py_DECREF(runpy); Py_DECREF(runmodule); @@ -696,7 +696,7 @@ Py_RunMain(void) pymain_free(); - if (_Py_UnhandledKeyboardInterrupt) { + if (_PyRuntime.signals.unhandled_keyboard_interrupt) { exitcode = exit_sigint(); } diff --git a/Modules/makesetup b/Modules/makesetup index 5c275ac9a04945..f000c9cd67310e 100755 --- a/Modules/makesetup +++ b/Modules/makesetup @@ -262,12 +262,15 @@ sed -e 's/[ ]*#.*//' -e '/^[ ]*$/d' | esac # custom flags first, PY_STDMODULE_CFLAGS may contain -I with system libmpdec case $doconfig in - no) cc="$cc $cpps \$(PY_STDMODULE_CFLAGS) \$(CCSHARED)";; + no) + cc="$cc $cpps \$(PY_STDMODULE_CFLAGS) \$(CCSHARED)" + rule="$obj: $src \$(MODULE_${mods_upper}_DEPS) \$(MODULE_DEPS_SHARED) \$(PYTHON_HEADERS); $cc -c $src -o $obj" + ;; *) - cc="$cc $cpps \$(PY_BUILTIN_MODULE_CFLAGS)";; + cc="$cc $cpps \$(PY_BUILTIN_MODULE_CFLAGS)" + rule="$obj: $src \$(MODULE_${mods_upper}_DEPS) \$(MODULE_DEPS_STATIC) \$(PYTHON_HEADERS); $cc -c $src -o $obj" + ;; esac - # force rebuild when header file or module build flavor (static/shared) is changed - rule="$obj: $src \$(MODULE_${mods_upper}_DEPS) \$(MODULE_DEPS); $cc -c $src -o $obj" echo "$rule" >>$rulesf done case $doconfig in diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index 48625c8c18d70e..49c0293d4f5ce3 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -2048,8 +2048,7 @@ factorial_odd_part(unsigned long n) inner = PyLong_FromLong(1); if (inner == NULL) return NULL; - outer = inner; - Py_INCREF(outer); + outer = Py_NewRef(inner); upper = 3; for (i = _Py_bit_length(n) - 2; i >= 0; i--) { @@ -2070,8 +2069,7 @@ factorial_odd_part(unsigned long n) Py_DECREF(partial); if (tmp == NULL) goto error; - Py_DECREF(inner); - inner = tmp; + Py_SETREF(inner, tmp); /* Now inner is the product of all odd integers j in the range (0, n/2**i], giving the inner product in the formula above. */ @@ -2079,8 +2077,7 @@ factorial_odd_part(unsigned long n) tmp = PyNumber_Multiply(outer, inner); if (tmp == NULL) goto error; - Py_DECREF(outer); - outer = tmp; + Py_SETREF(outer, tmp); } Py_DECREF(inner); return outer; @@ -2717,13 +2714,13 @@ math_dist_impl(PyObject *module, PyObject *p, PyObject *q) if (m != n) { PyErr_SetString(PyExc_ValueError, "both points must have the same number of dimensions"); - return NULL; - + goto error_exit; } if (n > NUM_STACK_ELEMS) { diffs = (double *) PyObject_Malloc(n * sizeof(double)); if (diffs == NULL) { - return PyErr_NoMemory(); + PyErr_NoMemory(); + goto error_exit; } } for (i=0 ; idata = mmap(NULL, map_size, - prot, flags, - fd, offset); + Py_BEGIN_ALLOW_THREADS + m_obj->data = mmap(NULL, map_size, prot, flags, fd, offset); + Py_END_ALLOW_THREADS if (devzero != -1) { close(devzero); diff --git a/Modules/nismodule.c b/Modules/nismodule.c index 39b991162b2761..ec7f6d8031e84b 100644 --- a/Modules/nismodule.c +++ b/Modules/nismodule.c @@ -458,8 +458,7 @@ nis_maps (PyObject *module, PyObject *args, PyObject *kwdict) if (!str || PyList_Append(list, str) < 0) { Py_XDECREF(str); - Py_DECREF(list); - list = NULL; + Py_SETREF(list, NULL); break; } Py_DECREF(str); diff --git a/Modules/ossaudiodev.c b/Modules/ossaudiodev.c index 0568b6dc44103a..79f4ebad836c8b 100644 --- a/Modules/ossaudiodev.c +++ b/Modules/ossaudiodev.c @@ -20,7 +20,6 @@ #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif -#define NEEDS_PY_IDENTIFIER #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -535,16 +534,13 @@ oss_close(oss_audio_t *self, PyObject *unused) static PyObject * oss_self(PyObject *self, PyObject *unused) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * oss_exit(PyObject *self, PyObject *unused) { - _Py_IDENTIFIER(close); - - PyObject *ret = _PyObject_CallMethodIdNoArgs(self, &PyId_close); + PyObject *ret = PyObject_CallMethod(self, "close", NULL); if (!ret) return NULL; Py_DECREF(ret); @@ -1138,10 +1134,8 @@ PyInit_ossaudiodev(void) NULL, NULL); if (OSSAudioError) { /* Each call to PyModule_AddObject decrefs it; compensate: */ - Py_INCREF(OSSAudioError); - Py_INCREF(OSSAudioError); - PyModule_AddObject(m, "error", OSSAudioError); - PyModule_AddObject(m, "OSSAudioError", OSSAudioError); + PyModule_AddObject(m, "error", Py_NewRef(OSSAudioError)); + PyModule_AddObject(m, "OSSAudioError", Py_NewRef(OSSAudioError)); } /* Build 'control_labels' and 'control_names' lists and add them diff --git a/Modules/overlapped.c b/Modules/overlapped.c index 369b1beae84e3f..02c0f401be4c9e 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -912,8 +912,7 @@ _overlapped_Overlapped_getresult_impl(OverlappedObject *self, BOOL wait) _PyBytes_Resize(&self->allocated_buffer, transferred)) return NULL; - Py_INCREF(self->allocated_buffer); - return self->allocated_buffer; + return Py_NewRef(self->allocated_buffer); case TYPE_READ_FROM: assert(PyBytes_CheckExact(self->read_from.allocated_buffer)); @@ -940,14 +939,12 @@ _overlapped_Overlapped_getresult_impl(OverlappedObject *self, BOOL wait) } // first item: message - Py_INCREF(self->read_from.allocated_buffer); PyTuple_SET_ITEM(self->read_from.result, 0, - self->read_from.allocated_buffer); + Py_NewRef(self->read_from.allocated_buffer)); // second item: address PyTuple_SET_ITEM(self->read_from.result, 1, addr); - Py_INCREF(self->read_from.result); - return self->read_from.result; + return Py_NewRef(self->read_from.result); case TYPE_READ_FROM_INTO: // unparse the address addr = unparse_address((SOCKADDR*)&self->read_from_into.address, @@ -970,8 +967,7 @@ _overlapped_Overlapped_getresult_impl(OverlappedObject *self, BOOL wait) // second item: address PyTuple_SET_ITEM(self->read_from_into.result, 1, addr); - Py_INCREF(self->read_from_into.result); - return self->read_from_into.result; + return Py_NewRef(self->read_from_into.result); default: return PyLong_FromUnsignedLong((unsigned long) transferred); } @@ -1674,7 +1670,7 @@ Overlapped_traverse(OverlappedObject *self, visitproc visit, void *arg) _overlapped.WSAConnect client_handle as ConnectSocket: HANDLE - address_as_bytes as AddressObj: object + address_as_bytes as AddressObj: object(subclass_of='&PyTuple_Type') / Bind a remote address to a connectionless (UDP) socket. @@ -1683,7 +1679,7 @@ Bind a remote address to a connectionless (UDP) socket. static PyObject * _overlapped_WSAConnect_impl(PyObject *module, HANDLE ConnectSocket, PyObject *AddressObj) -/*[clinic end generated code: output=ea0b4391e94dad63 input=169f8075e9ae7fa4]*/ +/*[clinic end generated code: output=ea0b4391e94dad63 input=7cf65313d49c015a]*/ { char AddressBuf[sizeof(struct sockaddr_in6)]; SOCKADDR *Address = (SOCKADDR*)AddressBuf; @@ -1717,7 +1713,7 @@ _overlapped.Overlapped.WSASendTo handle: HANDLE buf as bufobj: Py_buffer flags: DWORD - address_as_bytes as AddressObj: object + address_as_bytes as AddressObj: object(subclass_of='&PyTuple_Type') / Start overlapped sendto over a connectionless (UDP) socket. @@ -1727,7 +1723,7 @@ static PyObject * _overlapped_Overlapped_WSASendTo_impl(OverlappedObject *self, HANDLE handle, Py_buffer *bufobj, DWORD flags, PyObject *AddressObj) -/*[clinic end generated code: output=3cdedc4cfaeb70cd input=b7c1749a62e2e374]*/ +/*[clinic end generated code: output=3cdedc4cfaeb70cd input=31f44cd4ab92fc33]*/ { char AddressBuf[sizeof(struct sockaddr_in6)]; SOCKADDR *Address = (SOCKADDR*)AddressBuf; diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 3810bc87c1fbab..95ecf1c7c4b28c 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -16,6 +16,9 @@ #ifdef MS_WINDOWS # include # include +# include // UNLEN +# include "osdefs.h" // SEP +# define HAVE_SYMLINK #endif #ifdef __VXWORKS__ @@ -91,6 +94,8 @@ # define HAVE_FUTIMENS_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) # define HAVE_UTIMENSAT_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) # define HAVE_PWRITEV_RUNTIME __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) +# define HAVE_MKFIFOAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) +# define HAVE_MKNODAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) # define HAVE_POSIX_SPAWN_SETSID_RUNTIME __builtin_available(macOS 10.15, *) @@ -152,6 +157,18 @@ # define HAVE_SYMLINKAT_RUNTIME (symlinkat != NULL) # endif +# ifdef HAVE_UTIMENSAT +# define HAVE_UTIMENSAT_RUNTIME (utimensat != NULL) +# endif + +# ifdef HAVE_FUTIMENS +# define HAVE_FUTIMENS_RUNTIME (futimens != NULL) +# endif + +# ifdef HAVE_PWRITEV +# define HAVE_PWRITEV_RUNTIME (pwritev != NULL) +# endif + #endif #ifdef HAVE_FUTIMESAT @@ -175,6 +192,8 @@ # define HAVE_FUTIMENS_RUNTIME 1 # define HAVE_UTIMENSAT_RUNTIME 1 # define HAVE_PWRITEV_RUNTIME 1 +# define HAVE_MKFIFOAT_RUNTIME 1 +# define HAVE_MKNODAT_RUNTIME 1 #endif @@ -410,18 +429,7 @@ extern char *ctermid_r(char *); # ifdef HAVE_PROCESS_H # include # endif -# ifndef IO_REPARSE_TAG_SYMLINK -# define IO_REPARSE_TAG_SYMLINK (0xA000000CL) -# endif -# ifndef IO_REPARSE_TAG_MOUNT_POINT -# define IO_REPARSE_TAG_MOUNT_POINT (0xA0000003L) -# endif -# include "osdefs.h" // SEP # include -# include -# include // ShellExecute() -# include // UNLEN -# define HAVE_SYMLINK #endif /* _MSC_VER */ #ifndef MAXPATHLEN @@ -1116,7 +1124,7 @@ path_converter(PyObject *o, void *p) path_t *path = (path_t *)p; PyObject *bytes = NULL; Py_ssize_t length = 0; - int is_index, is_buffer, is_bytes, is_unicode; + int is_index, is_bytes, is_unicode; const char *narrow; #ifdef MS_WINDOWS PyObject *wo = NULL; @@ -1154,11 +1162,10 @@ path_converter(PyObject *o, void *p) /* Only call this here so that we don't treat the return value of os.fspath() as an fd or buffer. */ is_index = path->allow_fd && PyIndex_Check(o); - is_buffer = PyObject_CheckBuffer(o); is_bytes = PyBytes_Check(o); is_unicode = PyUnicode_Check(o); - if (!is_index && !is_buffer && !is_unicode && !is_bytes) { + if (!is_index && !is_unicode && !is_bytes) { /* Inline PyOS_FSPath() for better error messages. */ PyObject *func, *res; @@ -1187,8 +1194,7 @@ path_converter(PyObject *o, void *p) } /* still owns a reference to the original object */ - Py_DECREF(o); - o = res; + Py_SETREF(o, res); } if (is_unicode) { @@ -1218,29 +1224,7 @@ path_converter(PyObject *o, void *p) #endif } else if (is_bytes) { - bytes = o; - Py_INCREF(bytes); - } - else if (is_buffer) { - /* XXX Replace PyObject_CheckBuffer with PyBytes_Check in other code - after removing support of non-bytes buffer objects. */ - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "%s%s%s should be %s, not %.200s", - path->function_name ? path->function_name : "", - path->function_name ? ": " : "", - path->argument_name ? path->argument_name : "path", - path->allow_fd && path->nullable ? "string, bytes, os.PathLike, " - "integer or None" : - path->allow_fd ? "string, bytes, os.PathLike or integer" : - path->nullable ? "string, bytes, os.PathLike or None" : - "string, bytes or os.PathLike", - _PyType_Name(Py_TYPE(o)))) { - goto error_exit; - } - bytes = PyBytes_FromObject(o); - if (!bytes) { - goto error_exit; - } + bytes = Py_NewRef(o); } else if (is_index) { if (!_fd_converter(o, &path->fd)) { @@ -2258,8 +2242,7 @@ statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds) for (i = 7; i <= 9; i++) { if (result->ob_item[i+3] == Py_None) { Py_DECREF(Py_None); - Py_INCREF(result->ob_item[i]); - result->ob_item[i+3] = result->ob_item[i]; + result->ob_item[i+3] = Py_NewRef(result->ob_item[i]); } } return (PyObject*)result; @@ -4045,14 +4028,12 @@ _listdir_windows_no_opendir(path_t *path, PyObject *list) Py_SETREF(v, PyUnicode_EncodeFSDefault(v)); } if (v == NULL) { - Py_DECREF(list); - list = NULL; + Py_SETREF(list, NULL); break; } if (PyList_Append(list, v) != 0) { Py_DECREF(v); - Py_DECREF(list); - list = NULL; + Py_SETREF(list, NULL); break; } Py_DECREF(v); @@ -4122,8 +4103,8 @@ _posix_listdir(path_t *path, PyObject *list) const char *name; if (path->narrow) { name = path->narrow; - /* only return bytes if they specified a bytes-like object */ - return_str = !PyObject_CheckBuffer(path->object); + /* only return bytes if they specified a bytes object */ + return_str = !PyBytes_Check(path->object); } else { name = "."; @@ -4535,12 +4516,13 @@ If dir_fd is not None, it should be a file descriptor open to a directory, dir_fd may not be implemented on your platform. If it is unavailable, using it will raise a NotImplementedError. -The mode argument is ignored on Windows. +The mode argument is ignored on Windows. Where it is used, the current umask +value is first masked out. [clinic start generated code]*/ static PyObject * os_mkdir_impl(PyObject *module, path_t *path, int mode, int dir_fd) -/*[clinic end generated code: output=a70446903abe821f input=e965f68377e9b1ce]*/ +/*[clinic end generated code: output=a70446903abe821f input=a61722e1576fab03]*/ { int result; #ifdef HAVE_MKDIRAT @@ -5790,6 +5772,13 @@ os_execv_impl(PyObject *module, path_t *path, PyObject *argv) EXECV_CHAR **argvlist; Py_ssize_t argc; + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (!_PyInterpreterState_HasFeature(interp, Py_RTFLAGS_EXEC)) { + PyErr_SetString(PyExc_RuntimeError, + "exec not supported for isolated subinterpreters"); + return NULL; + } + /* execv has two arguments: (path, argv), where argv is a list or tuple of strings. */ @@ -5856,6 +5845,13 @@ os_execve_impl(PyObject *module, path_t *path, PyObject *argv, PyObject *env) EXECV_CHAR **envlist; Py_ssize_t argc, envc; + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (!_PyInterpreterState_HasFeature(interp, Py_RTFLAGS_EXEC)) { + PyErr_SetString(PyExc_RuntimeError, + "exec not supported for isolated subinterpreters"); + return NULL; + } + /* execve has three arguments: (path, argv, env), where argv is a list or tuple of strings and env is a dictionary like posix.environ. */ @@ -6777,7 +6773,7 @@ os_fork_impl(PyObject *module) { pid_t pid; PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->config._isolated_interpreter) { + if (!_PyInterpreterState_HasFeature(interp, Py_RTFLAGS_FORK)) { PyErr_SetString(PyExc_RuntimeError, "fork not supported for isolated subinterpreters"); return NULL; @@ -6891,8 +6887,7 @@ os_sched_param_impl(PyTypeObject *type, PyObject *sched_priority) res = PyStructSequence_New(type); if (!res) return NULL; - Py_INCREF(sched_priority); - PyStructSequence_SET_ITEM(res, 0, sched_priority); + PyStructSequence_SET_ITEM(res, 0, Py_NewRef(sched_priority)); return res; } @@ -7071,8 +7066,13 @@ static PyObject * os_sched_yield_impl(PyObject *module) /*[clinic end generated code: output=902323500f222cac input=e54d6f98189391d4]*/ { - if (sched_yield()) + int result; + Py_BEGIN_ALLOW_THREADS + result = sched_yield(); + Py_END_ALLOW_THREADS + if (result < 0) { return posix_error(); + } Py_RETURN_NONE; } @@ -8015,8 +8015,7 @@ os_kill_impl(PyObject *module, pid_t pid, Py_ssize_t signal) err = GetLastError(); result = PyErr_SetFromWindowsErr(err); } else { - Py_INCREF(Py_None); - result = Py_None; + result = Py_NewRef(Py_None); } CloseHandle(handle); @@ -8593,6 +8592,64 @@ os_pidfd_open_impl(PyObject *module, pid_t pid, unsigned int flags) #endif +#ifdef HAVE_SETNS +/*[clinic input] +os.setns + fd: fildes + A file descriptor to a namespace. + nstype: int = 0 + Type of namespace. + +Move the calling thread into different namespaces. +[clinic start generated code]*/ + +static PyObject * +os_setns_impl(PyObject *module, int fd, int nstype) +/*[clinic end generated code: output=5dbd055bfb66ecd0 input=42787871226bf3ee]*/ +{ + int res; + + Py_BEGIN_ALLOW_THREADS + res = setns(fd, nstype); + Py_END_ALLOW_THREADS + + if (res != 0) { + return posix_error(); + } + + Py_RETURN_NONE; +} +#endif + + +#ifdef HAVE_UNSHARE +/*[clinic input] +os.unshare + flags: int + Namespaces to be unshared. + +Disassociate parts of a process (or thread) execution context. +[clinic start generated code]*/ + +static PyObject * +os_unshare_impl(PyObject *module, int flags) +/*[clinic end generated code: output=1b3177906dd237ee input=9e065db3232b8b1b]*/ +{ + int res; + + Py_BEGIN_ALLOW_THREADS + res = unshare(flags); + Py_END_ALLOW_THREADS + + if (res != 0) { + return posix_error(); + } + + Py_RETURN_NONE; +} +#endif + + #if defined(HAVE_READLINK) || defined(MS_WINDOWS) /*[clinic input] os.readlink @@ -9782,7 +9839,7 @@ os_preadv_impl(PyObject *module, int fd, PyObject *buffers, Py_off_t offset, } while (n < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); #else do { -#ifdef __APPLE__ +#if defined(__APPLE__) && defined(__clang__) /* This entire function will be removed from the module dict when the API * is not available. */ @@ -9797,7 +9854,7 @@ os_preadv_impl(PyObject *module, int fd, PyObject *buffers, Py_off_t offset, Py_END_ALLOW_THREADS } while (n < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); -#ifdef __APPLE__ +#if defined(__APPLE__) && defined(__clang__) #pragma clang diagnostic pop #endif @@ -10424,7 +10481,7 @@ os_pwritev_impl(PyObject *module, int fd, PyObject *buffers, Py_off_t offset, } while (result < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); #else -#ifdef __APPLE__ +#if defined(__APPLE__) && defined(__clang__) /* This entire function will be removed from the module dict when the API * is not available. */ @@ -10440,7 +10497,7 @@ os_pwritev_impl(PyObject *module, int fd, PyObject *buffers, Py_off_t offset, Py_END_ALLOW_THREADS } while (result < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); -#ifdef __APPLE__ +#if defined(__APPLE__) && defined(__clang__) #pragma clang diagnostic pop #endif @@ -10619,18 +10676,35 @@ os_mkfifo_impl(PyObject *module, path_t *path, int mode, int dir_fd) { int result; int async_err = 0; +#ifdef HAVE_MKFIFOAT + int mkfifoat_unavailable = 0; +#endif do { Py_BEGIN_ALLOW_THREADS #ifdef HAVE_MKFIFOAT - if (dir_fd != DEFAULT_DIR_FD) - result = mkfifoat(dir_fd, path->narrow, mode); - else + if (dir_fd != DEFAULT_DIR_FD) { + if (HAVE_MKFIFOAT_RUNTIME) { + result = mkfifoat(dir_fd, path->narrow, mode); + + } else { + mkfifoat_unavailable = 1; + result = 0; + } + } else #endif result = mkfifo(path->narrow, mode); Py_END_ALLOW_THREADS } while (result != 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); + +#ifdef HAVE_MKFIFOAT + if (mkfifoat_unavailable) { + argument_unavailable_error(NULL, "dir_fd"); + return NULL; + } +#endif + if (result != 0) return (!async_err) ? posix_error() : NULL; @@ -10671,18 +10745,33 @@ os_mknod_impl(PyObject *module, path_t *path, int mode, dev_t device, { int result; int async_err = 0; +#ifdef HAVE_MKNODAT + int mknodat_unavailable = 0; +#endif do { Py_BEGIN_ALLOW_THREADS #ifdef HAVE_MKNODAT - if (dir_fd != DEFAULT_DIR_FD) - result = mknodat(dir_fd, path->narrow, mode, device); - else + if (dir_fd != DEFAULT_DIR_FD) { + if (HAVE_MKNODAT_RUNTIME) { + result = mknodat(dir_fd, path->narrow, mode, device); + + } else { + mknodat_unavailable = 1; + result = 0; + } + } else #endif result = mknod(path->narrow, mode, device); Py_END_ALLOW_THREADS } while (result != 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); +#ifdef HAVE_MKNODAT + if (mknodat_unavailable) { + argument_unavailable_error(NULL, "dir_fd"); + return NULL; + } +#endif if (result != 0) return (!async_err) ? posix_error() : NULL; @@ -13032,15 +13121,13 @@ os_listxattr_impl(PyObject *module, path_t *path, int follow_symlinks) PyObject *attribute = PyUnicode_DecodeFSDefaultAndSize(start, trace - start); if (!attribute) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); goto exit; } error = PyList_Append(result, attribute); Py_DECREF(attribute); if (error) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); goto exit; } start = trace + 1; @@ -13534,6 +13621,25 @@ os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class) #endif } +/*[clinic input] +os.DirEntry.is_junction -> bool + defining_class: defining_class + / + +Return True if the entry is a junction; cached per entry. +[clinic start generated code]*/ + +static int +os_DirEntry_is_junction_impl(DirEntry *self, PyTypeObject *defining_class) +/*[clinic end generated code: output=7061a07b0ef2cd1f input=475cd36fb7d4723f]*/ +{ +#ifdef MS_WINDOWS + return self->win32_lstat.st_reparse_tag == IO_REPARSE_TAG_MOUNT_POINT; +#else + return 0; +#endif +} + static PyObject * DirEntry_fetch_stat(PyObject *module, DirEntry *self, int follow_symlinks) { @@ -13601,8 +13707,7 @@ DirEntry_get_lstat(PyTypeObject *defining_class, DirEntry *self) self->lstat = DirEntry_fetch_stat(module, self, 0); #endif } - Py_XINCREF(self->lstat); - return self->lstat; + return Py_XNewRef(self->lstat); } /*[clinic input] @@ -13638,8 +13743,7 @@ os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, } } - Py_XINCREF(self->stat); - return self->stat; + return Py_XNewRef(self->stat); } /* Set exception and return -1 on error, 0 for False, 1 for True */ @@ -13813,8 +13917,7 @@ static PyObject * os_DirEntry___fspath___impl(DirEntry *self) /*[clinic end generated code: output=6dd7f7ef752e6f4f input=3c49d0cf38df4fac]*/ { - Py_INCREF(self->path); - return self->path; + return Py_NewRef(self->path); } static PyMemberDef DirEntry_members[] = { @@ -13831,6 +13934,7 @@ static PyMethodDef DirEntry_methods[] = { OS_DIRENTRY_IS_DIR_METHODDEF OS_DIRENTRY_IS_FILE_METHODDEF OS_DIRENTRY_IS_SYMLINK_METHODDEF + OS_DIRENTRY_IS_JUNCTION_METHODDEF OS_DIRENTRY_STAT_METHODDEF OS_DIRENTRY_INODE_METHODDEF OS_DIRENTRY___FSPATH___METHODDEF @@ -14007,7 +14111,7 @@ DirEntry_from_posix_info(PyObject *module, path_t *path, const char *name, goto error; } - if (!path->narrow || !PyObject_CheckBuffer(path->object)) { + if (!path->narrow || !PyBytes_Check(path->object)) { entry->name = PyUnicode_DecodeFSDefaultAndSize(name, name_len); if (joined_path) entry->path = PyUnicode_DecodeFSDefault(joined_path); @@ -14022,8 +14126,7 @@ DirEntry_from_posix_info(PyObject *module, path_t *path, const char *name, goto error; if (path->fd != -1) { - entry->path = entry->name; - Py_INCREF(entry->path); + entry->path = Py_NewRef(entry->name); } else if (!entry->path) goto error; @@ -14214,8 +14317,7 @@ ScandirIterator_close(ScandirIterator *self, PyObject *args) static PyObject * ScandirIterator_enter(PyObject *self, PyObject *args) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -14423,8 +14525,7 @@ PyOS_FSPath(PyObject *path) PyObject *path_repr = NULL; if (PyUnicode_Check(path) || PyBytes_Check(path)) { - Py_INCREF(path); - return path; + return Py_NewRef(path); } func = _PyObject_LookupSpecial(path, &_Py_ID(__fspath__)); @@ -14925,6 +15026,8 @@ static PyMethodDef posix_methods[] = { OS__ADD_DLL_DIRECTORY_METHODDEF OS__REMOVE_DLL_DIRECTORY_METHODDEF OS_WAITSTATUS_TO_EXITCODE_METHODDEF + OS_SETNS_METHODDEF + OS_UNSHARE_METHODDEF {NULL, NULL} /* Sentinel */ }; @@ -15370,6 +15473,53 @@ all_ins(PyObject *m) #ifdef SCHED_FX if (PyModule_AddIntConstant(m, "SCHED_FX", SCHED_FSS)) return -1; #endif + +/* constants for namespaces */ +#if defined(HAVE_SETNS) || defined(HAVE_UNSHARE) +#ifdef CLONE_FS + if (PyModule_AddIntMacro(m, CLONE_FS)) return -1; +#endif +#ifdef CLONE_FILES + if (PyModule_AddIntMacro(m, CLONE_FILES)) return -1; +#endif +#ifdef CLONE_NEWNS + if (PyModule_AddIntMacro(m, CLONE_NEWNS)) return -1; +#endif +#ifdef CLONE_NEWCGROUP + if (PyModule_AddIntMacro(m, CLONE_NEWCGROUP)) return -1; +#endif +#ifdef CLONE_NEWUTS + if (PyModule_AddIntMacro(m, CLONE_NEWUTS)) return -1; +#endif +#ifdef CLONE_NEWIPC + if (PyModule_AddIntMacro(m, CLONE_NEWIPC)) return -1; +#endif +#ifdef CLONE_NEWUSER + if (PyModule_AddIntMacro(m, CLONE_NEWUSER)) return -1; +#endif +#ifdef CLONE_NEWPID + if (PyModule_AddIntMacro(m, CLONE_NEWPID)) return -1; +#endif +#ifdef CLONE_NEWNET + if (PyModule_AddIntMacro(m, CLONE_NEWNET)) return -1; +#endif +#ifdef CLONE_NEWTIME + if (PyModule_AddIntMacro(m, CLONE_NEWTIME)) return -1; +#endif +#ifdef CLONE_SYSVSEM + if (PyModule_AddIntMacro(m, CLONE_SYSVSEM)) return -1; +#endif +#ifdef CLONE_THREAD + if (PyModule_AddIntMacro(m, CLONE_THREAD)) return -1; +#endif +#ifdef CLONE_SIGHAND + if (PyModule_AddIntMacro(m, CLONE_SIGHAND)) return -1; +#endif +#ifdef CLONE_VM + if (PyModule_AddIntMacro(m, CLONE_VM)) return -1; +#endif +#endif + #endif #ifdef USE_XATTRS @@ -15525,6 +15675,14 @@ PROBE(probe_fdopendir, HAVE_FDOPENDIR_RUNTIME) PROBE(probe_mkdirat, HAVE_MKDIRAT_RUNTIME) #endif +#ifdef HAVE_MKFIFOAT +PROBE(probe_mkfifoat, HAVE_MKFIFOAT_RUNTIME) +#endif + +#ifdef HAVE_MKNODAT +PROBE(probe_mknodat, HAVE_MKNODAT_RUNTIME) +#endif + #ifdef HAVE_RENAMEAT PROBE(probe_renameat, HAVE_RENAMEAT_RUNTIME) #endif @@ -15658,11 +15816,11 @@ static const struct have_function { #endif #ifdef HAVE_MKFIFOAT - { "HAVE_MKFIFOAT", NULL }, + { "HAVE_MKFIFOAT", probe_mkfifoat }, #endif #ifdef HAVE_MKNODAT - { "HAVE_MKNODAT", NULL }, + { "HAVE_MKNODAT", probe_mknodat }, #endif #ifdef HAVE_OPENAT @@ -15732,8 +15890,7 @@ posixmodule_exec(PyObject *m) if (setup_confname_tables(m)) return -1; - Py_INCREF(PyExc_OSError); - PyModule_AddObject(m, "error", PyExc_OSError); + PyModule_AddObject(m, "error", Py_NewRef(PyExc_OSError)); #if defined(HAVE_WAITID) && !defined(__APPLE__) waitid_result_desc.name = MODNAME ".waitid_result"; @@ -15741,8 +15898,7 @@ posixmodule_exec(PyObject *m) if (WaitidResultType == NULL) { return -1; } - Py_INCREF(WaitidResultType); - PyModule_AddObject(m, "waitid_result", WaitidResultType); + PyModule_AddObject(m, "waitid_result", Py_NewRef(WaitidResultType)); state->WaitidResultType = WaitidResultType; #endif @@ -15754,8 +15910,7 @@ posixmodule_exec(PyObject *m) if (StatResultType == NULL) { return -1; } - Py_INCREF(StatResultType); - PyModule_AddObject(m, "stat_result", StatResultType); + PyModule_AddObject(m, "stat_result", Py_NewRef(StatResultType)); state->StatResultType = StatResultType; structseq_new = ((PyTypeObject *)StatResultType)->tp_new; ((PyTypeObject *)StatResultType)->tp_new = statresult_new; @@ -15765,8 +15920,7 @@ posixmodule_exec(PyObject *m) if (StatVFSResultType == NULL) { return -1; } - Py_INCREF(StatVFSResultType); - PyModule_AddObject(m, "statvfs_result", StatVFSResultType); + PyModule_AddObject(m, "statvfs_result", Py_NewRef(StatVFSResultType)); state->StatVFSResultType = StatVFSResultType; #ifdef NEED_TICKS_PER_SECOND # if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK) @@ -15784,8 +15938,7 @@ posixmodule_exec(PyObject *m) if (SchedParamType == NULL) { return -1; } - Py_INCREF(SchedParamType); - PyModule_AddObject(m, "sched_param", SchedParamType); + PyModule_AddObject(m, "sched_param", Py_NewRef(SchedParamType)); state->SchedParamType = SchedParamType; ((PyTypeObject *)SchedParamType)->tp_new = os_sched_param; #endif @@ -15795,8 +15948,7 @@ posixmodule_exec(PyObject *m) if (TerminalSizeType == NULL) { return -1; } - Py_INCREF(TerminalSizeType); - PyModule_AddObject(m, "terminal_size", TerminalSizeType); + PyModule_AddObject(m, "terminal_size", Py_NewRef(TerminalSizeType)); state->TerminalSizeType = TerminalSizeType; /* initialize scandir types */ @@ -15810,8 +15962,7 @@ posixmodule_exec(PyObject *m) if (DirEntryType == NULL) { return -1; } - Py_INCREF(DirEntryType); - PyModule_AddObject(m, "DirEntry", DirEntryType); + PyModule_AddObject(m, "DirEntry", Py_NewRef(DirEntryType)); state->DirEntryType = DirEntryType; times_result_desc.name = MODNAME ".times_result"; @@ -15819,16 +15970,15 @@ posixmodule_exec(PyObject *m) if (TimesResultType == NULL) { return -1; } - Py_INCREF(TimesResultType); - PyModule_AddObject(m, "times_result", TimesResultType); + PyModule_AddObject(m, "times_result", Py_NewRef(TimesResultType)); state->TimesResultType = TimesResultType; PyTypeObject *UnameResultType = PyStructSequence_NewType(&uname_result_desc); if (UnameResultType == NULL) { return -1; } - Py_INCREF(UnameResultType); - PyModule_AddObject(m, "uname_result", (PyObject *)UnameResultType); + ; + PyModule_AddObject(m, "uname_result", Py_NewRef(UnameResultType)); state->UnameResultType = (PyObject *)UnameResultType; if ((state->billion = PyLong_FromLong(1000000000)) == NULL) diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c index 165cb0effae2e4..0e0a9cf7cc2c5c 100644 --- a/Modules/pyexpat.c +++ b/Modules/pyexpat.c @@ -959,8 +959,7 @@ pyexpat_xmlparser_ExternalEntityParserCreate_impl(xmlparseobject *self, new_parser->itself = XML_ExternalEntityParserCreate(self->itself, context, encoding); new_parser->handlers = 0; - new_parser->intern = self->intern; - Py_XINCREF(new_parser->intern); + new_parser->intern = Py_XNewRef(self->intern); if (self->buffer != NULL) { new_parser->buffer = PyMem_Malloc(new_parser->buffer_size); @@ -991,8 +990,7 @@ pyexpat_xmlparser_ExternalEntityParserCreate_impl(xmlparseobject *self, for (i = 0; handler_info[i].name != NULL; i++) { PyObject *handler = self->handlers[i]; if (handler != NULL) { - Py_INCREF(handler); - new_parser->handlers[i] = handler; + new_parser->handlers[i] = Py_NewRef(handler); handler_info[i].setter(new_parser->itself, handler_info[i].handler); } @@ -1148,8 +1146,7 @@ newxmlparseobject(pyexpat_state *state, const char *encoding, self->in_callback = 0; self->ns_prefixes = 0; self->handlers = NULL; - self->intern = intern; - Py_XINCREF(self->intern); + self->intern = Py_XNewRef(intern); /* namespace_separator is either NULL or contains one char + \0 */ self->itself = XML_ParserCreate_MM(encoding, &ExpatMemoryHandler, @@ -1232,8 +1229,7 @@ xmlparse_handler_getter(xmlparseobject *self, struct HandlerInfo *hi) PyObject *result = self->handlers[handlernum]; if (result == NULL) result = Py_None; - Py_INCREF(result); - return result; + return Py_NewRef(result); } static int @@ -1365,9 +1361,7 @@ xmlparse_buffer_size_setter(xmlparseobject *self, PyObject *v, void *closure) /* check maximum */ if (new_buffer_size > INT_MAX) { - char errmsg[100]; - sprintf(errmsg, "buffer_size must not be greater than %i", INT_MAX); - PyErr_SetString(PyExc_ValueError, errmsg); + PyErr_Format(PyExc_ValueError, "buffer_size must not be greater than %i", INT_MAX); return -1; } @@ -1725,7 +1719,7 @@ add_error(PyObject *errors_module, PyObject *codes_dict, const int error_code = (int)error_index; /* NOTE: This keeps the source of truth regarding error - * messages with libexpat and (by definiton) in bulletproof sync + * messages with libexpat and (by definition) in bulletproof sync * with the other uses of the XML_ErrorString function * elsewhere within this file. pyexpat's copy of the messages * only acts as a fallback in case of outdated runtime libexpat, @@ -1796,15 +1790,13 @@ add_errors_module(PyObject *mod) goto error; } - Py_INCREF(codes_dict); - if (PyModule_AddObject(errors_module, "codes", codes_dict) < 0) { + if (PyModule_AddObject(errors_module, "codes", Py_NewRef(codes_dict)) < 0) { Py_DECREF(codes_dict); goto error; } Py_CLEAR(codes_dict); - Py_INCREF(rev_codes_dict); - if (PyModule_AddObject(errors_module, "messages", rev_codes_dict) < 0) { + if (PyModule_AddObject(errors_module, "messages", Py_NewRef(rev_codes_dict)) < 0) { Py_DECREF(rev_codes_dict); goto error; } diff --git a/Modules/readline.c b/Modules/readline.c index 1b616fc4f3b4e1..fdb6356e1c84b5 100644 --- a/Modules/readline.c +++ b/Modules/readline.c @@ -402,8 +402,7 @@ set_hook(const char *funcname, PyObject **hook_var, PyObject *function) Py_CLEAR(*hook_var); } else if (PyCallable_Check(function)) { - Py_INCREF(function); - Py_XSETREF(*hook_var, function); + Py_XSETREF(*hook_var, Py_NewRef(function)); } else { PyErr_Format(PyExc_TypeError, @@ -524,8 +523,7 @@ static PyObject * readline_get_begidx_impl(PyObject *module) /*[clinic end generated code: output=362616ee8ed1b2b1 input=e083b81c8eb4bac3]*/ { - Py_INCREF(readlinestate_global->begidx); - return readlinestate_global->begidx; + return Py_NewRef(readlinestate_global->begidx); } /* Get the ending index for the scope of the tab-completion */ @@ -540,8 +538,7 @@ static PyObject * readline_get_endidx_impl(PyObject *module) /*[clinic end generated code: output=7f763350b12d7517 input=d4c7e34a625fd770]*/ { - Py_INCREF(readlinestate_global->endidx); - return readlinestate_global->endidx; + return Py_NewRef(readlinestate_global->endidx); } /* Set the tab-completion word-delimiters that readline uses */ @@ -784,8 +781,7 @@ readline_get_completer_impl(PyObject *module) if (readlinestate_global->completer == NULL) { Py_RETURN_NONE; } - Py_INCREF(readlinestate_global->completer); - return readlinestate_global->completer; + return Py_NewRef(readlinestate_global->completer); } /* Private function to get current length of history. XXX It may be @@ -1258,9 +1254,9 @@ setup_readline(readlinestate *mod_state) rl_attempted_completion_function = flex_complete; /* Set Python word break characters */ completer_word_break_characters = - rl_completer_word_break_characters = strdup(" \t\n`~!@#$%^&*()-=+[{]}\\|;:'\",<>/?"); /* All nonalphanums except '.' */ + rl_completer_word_break_characters = completer_word_break_characters; mod_state->begidx = PyLong_FromLong(0L); mod_state->endidx = PyLong_FromLong(0L); diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 4eea928a2683ae..df4043de08dacc 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -1652,8 +1652,7 @@ select_epoll___enter___impl(pyEpoll_Object *self) if (self->epfd < 0) return pyepoll_err_closed(); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } /*[clinic input] diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 0f30b4da036313..c539787e5829dd 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -272,9 +272,8 @@ report_wakeup_write_error(void *data) errno = (int) (intptr_t) data; PyErr_Fetch(&exc, &val, &tb); PyErr_SetFromErrno(PyExc_OSError); - PySys_WriteStderr("Exception ignored when trying to write to the " - "signal wakeup fd:\n"); - PyErr_WriteUnraisable(NULL); + _PyErr_WriteUnraisableMsg("when trying to write to the signal wakeup fd", + NULL); PyErr_Restore(exc, val, tb); errno = save_errno; return 0; @@ -284,15 +283,15 @@ report_wakeup_write_error(void *data) static int report_wakeup_send_error(void* data) { + int send_errno = (int) (intptr_t) data; + PyObject *exc, *val, *tb; PyErr_Fetch(&exc, &val, &tb); /* PyErr_SetExcFromWindowsErr() invokes FormatMessage() which recognizes the error codes used by both GetLastError() and WSAGetLastError */ - PyErr_SetExcFromWindowsErr(PyExc_OSError, (int) (intptr_t) data); - PySys_WriteStderr("Exception ignored when trying to send to the " - "signal wakeup fd:\n"); - PyErr_WriteUnraisable(NULL); + PyErr_SetExcFromWindowsErr(PyExc_OSError, send_errno); + _PyErr_WriteUnraisableMsg("when trying to send to the signal wakeup fd", NULL); PyErr_Restore(exc, val, tb); return 0; } @@ -627,13 +626,14 @@ signal.strsignal Return the system description of the given signal. -The return values can be such as "Interrupt", "Segmentation fault", etc. -Returns None if the signal is not recognized. +Returns the description of signal *signalnum*, such as "Interrupt" +for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no +description. Raises :exc:`ValueError` if *signalnum* is invalid. [clinic start generated code]*/ static PyObject * signal_strsignal_impl(PyObject *module, int signalnum) -/*[clinic end generated code: output=44e12e1e3b666261 input=b77914b03f856c74]*/ +/*[clinic end generated code: output=44e12e1e3b666261 input=238b335847778bc0]*/ { const char *res; @@ -1798,6 +1798,19 @@ int PyErr_CheckSignals(void) { PyThreadState *tstate = _PyThreadState_GET(); + + /* Opportunistically check if the GC is scheduled to run and run it + if we have a request. This is done here because native code needs + to call this API if is going to run for some time without executing + Python code to ensure signals are handled. Checking for the GC here + allows long running native code to clean cycles created using the C-API + even if it doesn't run the evaluation loop */ + struct _ceval_state *interp_ceval_state = &tstate->interp->ceval; + if (_Py_atomic_load_relaxed(&interp_ceval_state->gc_scheduled)) { + _Py_atomic_store_relaxed(&interp_ceval_state->gc_scheduled, 0); + _Py_RunGC(tstate); + } + if (!_Py_ThreadCanHandleSignals(tstate->interp)) { return 0; } diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 869bacde924d83..4ecb88cef7224e 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -247,6 +247,10 @@ shutdown(how) -- shut down traffic in one or both directions\n\ #include #endif +#ifdef HAVE_NET_ETHERNET_H +#include +#endif + /* Generic socket object definitions and includes */ #define PySocket_BUILDING_SOCKET #include "socketmodule.h" @@ -4106,8 +4110,7 @@ makeval_recvmsg(ssize_t received, void *data) if (received < PyBytes_GET_SIZE(*buf)) _PyBytes_Resize(buf, received); - Py_XINCREF(*buf); - return *buf; + return Py_XNewRef(*buf); } /* s.recvmsg(bufsize[, ancbufsize[, flags]]) method */ @@ -4386,8 +4389,7 @@ sock_sendall(PySocketSockObject *s, PyObject *args) } while (len > 0); PyBuffer_Release(&pbuf); - Py_INCREF(Py_None); - res = Py_None; + res = Py_NewRef(Py_None); done: PyBuffer_Release(&pbuf); @@ -7342,29 +7344,22 @@ PyInit__socket(void) if (m == NULL) return NULL; - Py_INCREF(PyExc_OSError); - PyModule_AddObject(m, "error", PyExc_OSError); + PyModule_AddObject(m, "error", Py_NewRef(PyExc_OSError)); socket_herror = PyErr_NewException("socket.herror", PyExc_OSError, NULL); if (socket_herror == NULL) return NULL; - Py_INCREF(socket_herror); - PyModule_AddObject(m, "herror", socket_herror); + PyModule_AddObject(m, "herror", Py_NewRef(socket_herror)); socket_gaierror = PyErr_NewException("socket.gaierror", PyExc_OSError, NULL); if (socket_gaierror == NULL) return NULL; - Py_INCREF(socket_gaierror); - PyModule_AddObject(m, "gaierror", socket_gaierror); + PyModule_AddObject(m, "gaierror", Py_NewRef(socket_gaierror)); PyModule_AddObjectRef(m, "timeout", PyExc_TimeoutError); - Py_INCREF((PyObject *)&sock_type); - if (PyModule_AddObject(m, "SocketType", - (PyObject *)&sock_type) != 0) + if (PyModule_AddObject(m, "SocketType", Py_NewRef(&sock_type)) != 0) return NULL; - Py_INCREF((PyObject *)&sock_type); - if (PyModule_AddObject(m, "socket", - (PyObject *)&sock_type) != 0) + if (PyModule_AddObject(m, "socket", Py_NewRef(&sock_type)) != 0) return NULL; #ifdef ENABLE_IPV6 @@ -7372,8 +7367,7 @@ PyInit__socket(void) #else has_ipv6 = Py_False; #endif - Py_INCREF(has_ipv6); - PyModule_AddObject(m, "has_ipv6", has_ipv6); + PyModule_AddObject(m, "has_ipv6", Py_NewRef(has_ipv6)); /* Export C API */ PySocketModule_APIObject *capi = sock_get_api(); @@ -7711,6 +7705,25 @@ PyInit__socket(void) PyModule_AddIntMacro(m, ALG_OP_VERIFY); #endif +/* IEEE 802.3 protocol numbers required for a standard TCP/IP network stack */ +#ifdef ETHERTYPE_ARP + PyModule_AddIntMacro(m, ETHERTYPE_ARP); +#endif +#ifdef ETHERTYPE_IP + PyModule_AddIntMacro(m, ETHERTYPE_IP); +#endif +#ifdef ETHERTYPE_IPV6 + PyModule_AddIntMacro(m, ETHERTYPE_IPV6); +#endif +#ifdef ETHERTYPE_VLAN + PyModule_AddIntMacro(m, ETHERTYPE_VLAN); +#endif + +/* Linux pseudo-protocol for sniffing every packet */ +#ifdef ETH_P_ALL + PyModule_AddIntMacro(m, ETH_P_ALL); +#endif + /* Socket types */ PyModule_AddIntMacro(m, SOCK_STREAM); PyModule_AddIntMacro(m, SOCK_DGRAM); diff --git a/Modules/symtablemodule.c b/Modules/symtablemodule.c index c25ecc2b5dc7a6..4ef1d8cde07db6 100644 --- a/Modules/symtablemodule.c +++ b/Modules/symtablemodule.c @@ -56,8 +56,7 @@ _symtable_symtable_impl(PyObject *module, PyObject *source, if (st == NULL) { return NULL; } - t = (PyObject *)st->st_top; - Py_INCREF(t); + t = Py_NewRef(st->st_top); _PySymtable_Free(st); return t; } diff --git a/Modules/syslogmodule.c b/Modules/syslogmodule.c index c409fe968f8894..23833b72850313 100644 --- a/Modules/syslogmodule.c +++ b/Modules/syslogmodule.c @@ -54,6 +54,13 @@ Revision history: #include +/*[clinic input] +module syslog +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=478f4ac94a1d4cae]*/ + +#include "clinic/syslogmodule.c.h" + /* only one instance, only one syslog, so globals should be ok */ static PyObject *S_ident_o = NULL; /* identifier, held by openlog() */ static char S_log_open = 0; @@ -113,19 +120,23 @@ syslog_get_argv(void) } +/*[clinic input] +syslog.openlog + + ident: unicode = NULL + logoption as logopt: long = 0 + facility: long(c_default="LOG_USER") = LOG_USER + +Set logging options of subsequent syslog() calls. +[clinic start generated code]*/ + static PyObject * -syslog_openlog(PyObject * self, PyObject * args, PyObject *kwds) +syslog_openlog_impl(PyObject *module, PyObject *ident, long logopt, + long facility) +/*[clinic end generated code: output=5476c12829b6eb75 input=8a987a96a586eee7]*/ { - long logopt = 0; - long facility = LOG_USER; - PyObject *ident = NULL; - static char *keywords[] = {"ident", "logoption", "facility", 0}; const char *ident_str = NULL; - if (!PyArg_ParseTupleAndKeywords(args, kwds, - "|Ull:openlog", keywords, &ident, &logopt, &facility)) - return NULL; - if (ident) { Py_INCREF(ident); } @@ -158,64 +169,65 @@ syslog_openlog(PyObject * self, PyObject * args, PyObject *kwds) } -static PyObject * -syslog_syslog(PyObject * self, PyObject * args) -{ - PyObject *message_object; - const char *message; - int priority = LOG_INFO; - if (!PyArg_ParseTuple(args, "iU;[priority,] message string", - &priority, &message_object)) { - PyErr_Clear(); - if (!PyArg_ParseTuple(args, "U;[priority,] message string", - &message_object)) - return NULL; - } +/*[clinic input] +syslog.syslog - message = PyUnicode_AsUTF8(message_object); - if (message == NULL) - return NULL; + [ + priority: int(c_default="LOG_INFO") = LOG_INFO + ] + + message: str + + / + +Send the string message to the system logger. +[clinic start generated code]*/ +static PyObject * +syslog_syslog_impl(PyObject *module, int group_left_1, int priority, + const char *message) +/*[clinic end generated code: output=c3dbc73445a0e078 input=ac83d92b12ea3d4e]*/ +{ if (PySys_Audit("syslog.syslog", "is", priority, message) < 0) { return NULL; } /* if log is not opened, open it now */ if (!S_log_open) { - PyObject *openargs; - - /* Continue even if PyTuple_New fails, because openlog(3) is optional. - * So, we can still do logging in the unlikely event things are so hosed - * that we can't do this tuple. - */ - if ((openargs = PyTuple_New(0))) { - PyObject *openlog_ret = syslog_openlog(self, openargs, NULL); - Py_DECREF(openargs); - if (openlog_ret == NULL) { - return NULL; - } - Py_DECREF(openlog_ret); - } - else { + PyObject *openlog_ret = syslog_openlog_impl(module, NULL, 0, LOG_USER); + if (openlog_ret == NULL) { return NULL; } + Py_DECREF(openlog_ret); } /* Incref ident, because it can be decrefed if syslog.openlog() is * called when the GIL is released. */ - PyObject *ident = S_ident_o; - Py_XINCREF(ident); + PyObject *ident = Py_XNewRef(S_ident_o); +#ifdef __APPLE__ + // gh-98178: On macOS, libc syslog() is not thread-safe + syslog(priority, "%s", message); +#else Py_BEGIN_ALLOW_THREADS; syslog(priority, "%s", message); Py_END_ALLOW_THREADS; +#endif Py_XDECREF(ident); Py_RETURN_NONE; } + +/*[clinic input] +syslog.closelog + +Reset the syslog module values and call the system library closelog(). +[clinic start generated code]*/ + static PyObject * -syslog_closelog(PyObject *self, PyObject *unused) +syslog_closelog_impl(PyObject *module) +/*[clinic end generated code: output=97890a80a24b1b84 input=fb77a54d447acf07]*/ { if (PySys_Audit("syslog.closelog", NULL) < 0) { return NULL; @@ -228,51 +240,67 @@ syslog_closelog(PyObject *self, PyObject *unused) Py_RETURN_NONE; } -static PyObject * -syslog_setlogmask(PyObject *self, PyObject *args) -{ - long maskpri, omaskpri; +/*[clinic input] +syslog.setlogmask -> long - if (!PyArg_ParseTuple(args, "l;mask for priority", &maskpri)) - return NULL; - if (PySys_Audit("syslog.setlogmask", "(O)", args ? args : Py_None) < 0) { - return NULL; + maskpri: long + / + +Set the priority mask to maskpri and return the previous mask value. +[clinic start generated code]*/ + +static long +syslog_setlogmask_impl(PyObject *module, long maskpri) +/*[clinic end generated code: output=d6ed163917b434bf input=adff2c2b76c7629c]*/ +{ + if (PySys_Audit("syslog.setlogmask", "l", maskpri) < 0) { + return -1; } - omaskpri = setlogmask(maskpri); - return PyLong_FromLong(omaskpri); + + return setlogmask(maskpri); } -static PyObject * -syslog_log_mask(PyObject *self, PyObject *args) +/*[clinic input] +syslog.LOG_MASK -> long + + pri: long + / + +Calculates the mask for the individual priority pri. +[clinic start generated code]*/ + +static long +syslog_LOG_MASK_impl(PyObject *module, long pri) +/*[clinic end generated code: output=c4a5bbfcc74c7c94 input=534829cb7fb5f7d2]*/ { - long mask; - long pri; - if (!PyArg_ParseTuple(args, "l:LOG_MASK", &pri)) - return NULL; - mask = LOG_MASK(pri); - return PyLong_FromLong(mask); + return LOG_MASK(pri); } -static PyObject * -syslog_log_upto(PyObject *self, PyObject *args) +/*[clinic input] +syslog.LOG_UPTO -> long + + pri: long + / + +Calculates the mask for all priorities up to and including pri. +[clinic start generated code]*/ + +static long +syslog_LOG_UPTO_impl(PyObject *module, long pri) +/*[clinic end generated code: output=9eab083c90601d7e input=5e906d6c406b7458]*/ { - long mask; - long pri; - if (!PyArg_ParseTuple(args, "l:LOG_UPTO", &pri)) - return NULL; - mask = LOG_UPTO(pri); - return PyLong_FromLong(mask); + return LOG_UPTO(pri); } /* List of functions defined in the module */ static PyMethodDef syslog_methods[] = { - {"openlog", _PyCFunction_CAST(syslog_openlog), METH_VARARGS | METH_KEYWORDS}, - {"closelog", syslog_closelog, METH_NOARGS}, - {"syslog", syslog_syslog, METH_VARARGS}, - {"setlogmask", syslog_setlogmask, METH_VARARGS}, - {"LOG_MASK", syslog_log_mask, METH_VARARGS}, - {"LOG_UPTO", syslog_log_upto, METH_VARARGS}, + SYSLOG_OPENLOG_METHODDEF + SYSLOG_CLOSELOG_METHODDEF + SYSLOG_SYSLOG_METHODDEF + SYSLOG_SETLOGMASK_METHODDEF + SYSLOG_LOG_MASK_METHODDEF + SYSLOG_LOG_UPTO_METHODDEF {NULL, NULL, 0} }; diff --git a/Modules/termios.c b/Modules/termios.c index 354e5ca18d04d8..fcc8f042679870 100644 --- a/Modules/termios.c +++ b/Modules/termios.c @@ -82,7 +82,12 @@ termios_tcgetattr_impl(PyObject *module, int fd) { termiosmodulestate *state = PyModule_GetState(module); struct termios mode; - if (tcgetattr(fd, &mode) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcgetattr(fd, &mode); + Py_END_ALLOW_THREADS + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -169,7 +174,12 @@ termios_tcsetattr_impl(PyObject *module, int fd, int when, PyObject *term) /* Get the old mode, in case there are any hidden fields... */ termiosmodulestate *state = PyModule_GetState(module); struct termios mode; - if (tcgetattr(fd, &mode) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcgetattr(fd, &mode); + Py_END_ALLOW_THREADS + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -211,7 +221,12 @@ termios_tcsetattr_impl(PyObject *module, int fd, int when, PyObject *term) return PyErr_SetFromErrno(state->TermiosError); if (cfsetospeed(&mode, (speed_t) ospeed) == -1) return PyErr_SetFromErrno(state->TermiosError); - if (tcsetattr(fd, when, &mode) == -1) + + Py_BEGIN_ALLOW_THREADS + r = tcsetattr(fd, when, &mode); + Py_END_ALLOW_THREADS + + if (r == -1) return PyErr_SetFromErrno(state->TermiosError); Py_RETURN_NONE; @@ -235,7 +250,13 @@ termios_tcsendbreak_impl(PyObject *module, int fd, int duration) /*[clinic end generated code: output=5945f589b5d3ac66 input=dc2f32417691f8ed]*/ { termiosmodulestate *state = PyModule_GetState(module); - if (tcsendbreak(fd, duration) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcsendbreak(fd, duration); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -256,7 +277,13 @@ termios_tcdrain_impl(PyObject *module, int fd) /*[clinic end generated code: output=5fd86944c6255955 input=c99241b140b32447]*/ { termiosmodulestate *state = PyModule_GetState(module); - if (tcdrain(fd) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcdrain(fd); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -282,7 +309,13 @@ termios_tcflush_impl(PyObject *module, int fd, int queue) /*[clinic end generated code: output=2424f80312ec2f21 input=0f7d08122ddc07b5]*/ { termiosmodulestate *state = PyModule_GetState(module); - if (tcflush(fd, queue) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcflush(fd, queue); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -308,7 +341,13 @@ termios_tcflow_impl(PyObject *module, int fd, int action) /*[clinic end generated code: output=afd10928e6ea66eb input=c6aff0640b6efd9c]*/ { termiosmodulestate *state = PyModule_GetState(module); - if (tcflow(fd, action) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = tcflow(fd, action); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -333,7 +372,13 @@ termios_tcgetwinsize_impl(PyObject *module, int fd) #if defined(TIOCGWINSZ) termiosmodulestate *state = PyModule_GetState(module); struct winsize w; - if (ioctl(fd, TIOCGWINSZ, &w) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCGWINSZ, &w); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -352,7 +397,12 @@ termios_tcgetwinsize_impl(PyObject *module, int fd) #elif defined(TIOCGSIZE) termiosmodulestate *state = PyModule_GetState(module); struct ttysize s; - if (ioctl(fd, TIOCGSIZE, &s) == -1) { + int r; + + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCGSIZE, &s); + Py_END_ALLOW_THREADS + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -433,15 +483,25 @@ termios_tcsetwinsize_impl(PyObject *module, int fd, PyObject *winsz) return NULL; } - if (ioctl(fd, TIOCSWINSZ, &w) == -1) { + int r; + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCSWINSZ, &w); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } Py_RETURN_NONE; #elif defined(TIOCGSIZE) && defined(TIOCSSIZE) struct ttysize s; + int r; /* Get the old ttysize because it might have more fields. */ - if (ioctl(fd, TIOCGSIZE, &s) == -1) { + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCGSIZE, &s); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } @@ -453,7 +513,11 @@ termios_tcsetwinsize_impl(PyObject *module, int fd, PyObject *winsz) return NULL; } - if (ioctl(fd, TIOCSSIZE, &s) == -1) { + Py_BEGIN_ALLOW_THREADS + r = ioctl(fd, TIOCSSIZE, &s); + Py_END_ALLOW_THREADS + + if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); } diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c index 32014707bce747..59fccd4b834dd3 100644 --- a/Modules/unicodedata.c +++ b/Modules/unicodedata.c @@ -159,8 +159,7 @@ unicodedata_UCD_decimal_impl(PyObject *self, int chr, return NULL; } else { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } } return PyLong_FromLong(rc); @@ -194,8 +193,7 @@ unicodedata_UCD_digit_impl(PyObject *self, int chr, PyObject *default_value) return NULL; } else { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } } return PyLong_FromLong(rc); @@ -246,8 +244,7 @@ unicodedata_UCD_numeric_impl(PyObject *self, int chr, return NULL; } else { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } } return PyFloat_FromDouble(rc); @@ -917,8 +914,7 @@ unicodedata_UCD_is_normalized_impl(PyObject *self, PyObject *form, result = (m == YES) ? Py_True : Py_False; } - Py_INCREF(result); - return result; + return Py_NewRef(result); } @@ -943,39 +939,34 @@ unicodedata_UCD_normalize_impl(PyObject *self, PyObject *form, if (PyUnicode_GET_LENGTH(input) == 0) { /* Special case empty input strings, since resizing them later would cause internal errors. */ - Py_INCREF(input); - return input; + return Py_NewRef(input); } if (PyUnicode_CompareWithASCIIString(form, "NFC") == 0) { if (is_normalized_quickcheck(self, input, true, false, true) == YES) { - Py_INCREF(input); - return input; + return Py_NewRef(input); } return nfc_nfkc(self, input, 0); } if (PyUnicode_CompareWithASCIIString(form, "NFKC") == 0) { if (is_normalized_quickcheck(self, input, true, true, true) == YES) { - Py_INCREF(input); - return input; + return Py_NewRef(input); } return nfc_nfkc(self, input, 1); } if (PyUnicode_CompareWithASCIIString(form, "NFD") == 0) { if (is_normalized_quickcheck(self, input, false, false, true) == YES) { - Py_INCREF(input); - return input; + return Py_NewRef(input); } return nfd_nfkd(self, input, 0); } if (PyUnicode_CompareWithASCIIString(form, "NFKD") == 0) { if (is_normalized_quickcheck(self, input, false, true, true) == YES) { - Py_INCREF(input); - return input; + return Py_NewRef(input); } return nfd_nfkd(self, input, 1); } @@ -1370,8 +1361,7 @@ unicodedata_UCD_name_impl(PyObject *self, int chr, PyObject *default_value) return NULL; } else { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } } diff --git a/Modules/xxlimited.c b/Modules/xxlimited.c index e234504e331945..5f5297ba6337af 100644 --- a/Modules/xxlimited.c +++ b/Modules/xxlimited.c @@ -155,8 +155,7 @@ Xxo_getattro(XxoObject *self, PyObject *name) if (self->x_attr != NULL) { PyObject *v = PyDict_GetItemWithError(self->x_attr, name); if (v != NULL) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; @@ -210,18 +209,15 @@ Xxo_demo(XxoObject *self, PyTypeObject *defining_class, /* Test if the argument is "str" */ if (PyUnicode_Check(o)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } /* test if the argument is of the Xxo class */ if (PyObject_TypeCheck(o, defining_class)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyMethodDef Xxo_methods[] = { diff --git a/Modules/xxlimited_35.c b/Modules/xxlimited_35.c index 8d29c71951768a..361c7e76d77f50 100644 --- a/Modules/xxlimited_35.c +++ b/Modules/xxlimited_35.c @@ -64,11 +64,9 @@ Xxo_demo(XxoObject *self, PyObject *args) return NULL; /* Test availability of fast type checks */ if (o != NULL && PyUnicode_Check(o)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyMethodDef Xxo_methods[] = { @@ -83,8 +81,7 @@ Xxo_getattro(XxoObject *self, PyObject *name) if (self->x_attr != NULL) { PyObject *v = PyDict_GetItemWithError(self->x_attr, name); if (v != NULL) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; @@ -176,8 +173,7 @@ xx_roj(PyObject *self, PyObject *args) long b; if (!PyArg_ParseTuple(args, "O#:roj", &a, &b)) return NULL; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } diff --git a/Modules/xxmodule.c b/Modules/xxmodule.c index a6e5071d1d6303..a676fdb4ec773a 100644 --- a/Modules/xxmodule.c +++ b/Modules/xxmodule.c @@ -52,8 +52,7 @@ Xxo_demo(XxoObject *self, PyObject *args) { if (!PyArg_ParseTuple(args, ":demo")) return NULL; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyMethodDef Xxo_methods[] = { @@ -68,8 +67,7 @@ Xxo_getattro(XxoObject *self, PyObject *name) if (self->x_attr != NULL) { PyObject *v = PyDict_GetItemWithError(self->x_attr, name); if (v != NULL) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } else if (PyErr_Occurred()) { return NULL; @@ -195,8 +193,7 @@ xx_bug(PyObject *self, PyObject *args) printf("\n"); /* Py_DECREF(item); */ - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } /* Test bad format character */ @@ -208,8 +205,7 @@ xx_roj(PyObject *self, PyObject *args) long b; if (!PyArg_ParseTuple(args, "O#:roj", &a, &b)) return NULL; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } @@ -266,8 +262,7 @@ static PyTypeObject Str_Type = { static PyObject * null_richcompare(PyObject *self, PyObject *other, int op) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; + return Py_NewRef(Py_NotImplemented); } static PyTypeObject Null_Type = { diff --git a/Modules/xxsubtype.c b/Modules/xxsubtype.c index 12306f2fc5247c..8512baf7cd0a2d 100644 --- a/Modules/xxsubtype.c +++ b/Modules/xxsubtype.c @@ -39,8 +39,7 @@ spamlist_setstate(spamlistobject *self, PyObject *args) if (!PyArg_ParseTuple(args, "i:setstate", &state)) return NULL; self->state = state; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyObject * @@ -53,12 +52,9 @@ spamlist_specialmeth(PyObject *self, PyObject *args, PyObject *kw) self = Py_None; if (kw == NULL) kw = Py_None; - Py_INCREF(self); - PyTuple_SET_ITEM(result, 0, self); - Py_INCREF(args); - PyTuple_SET_ITEM(result, 1, args); - Py_INCREF(kw); - PyTuple_SET_ITEM(result, 2, kw); + PyTuple_SET_ITEM(result, 0, Py_NewRef(self)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(args)); + PyTuple_SET_ITEM(result, 2, Py_NewRef(kw)); } return result; } @@ -164,8 +160,7 @@ spamdict_setstate(spamdictobject *self, PyObject *args) if (!PyArg_ParseTuple(args, "i:setstate", &state)) return NULL; self->state = state; - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static PyMethodDef spamdict_methods[] = { @@ -279,14 +274,12 @@ xxsubtype_exec(PyObject* m) if (PyType_Ready(&spamdict_type) < 0) return -1; - Py_INCREF(&spamlist_type); if (PyModule_AddObject(m, "spamlist", - (PyObject *) &spamlist_type) < 0) + Py_NewRef(&spamlist_type)) < 0) return -1; - Py_INCREF(&spamdict_type); if (PyModule_AddObject(m, "spamdict", - (PyObject *) &spamdict_type) < 0) + Py_NewRef(&spamdict_type)) < 0) return -1; return 0; } diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c index 2fc39a3bd56201..1cdfd01320288b 100644 --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -8,6 +8,11 @@ #include "Python.h" #include "structmember.h" // PyMemberDef #include "zlib.h" +#include "stdbool.h" + +#if defined(ZLIB_VERNUM) && ZLIB_VERNUM < 0x1221 +#error "At least zlib version 1.2.2.1 is required" +#endif // Blocks output buffer wrappers #include "pycore_blocks_output_buffer.h" @@ -171,9 +176,6 @@ OutputBuffer_WindowOnError(_BlocksOutputBuffer *buffer, _Uint32Window *window) } } while (0) #define LEAVE_ZLIB(obj) PyThread_release_lock((obj)->lock); -#if defined(ZLIB_VERNUM) && ZLIB_VERNUM >= 0x1221 -# define AT_LEAST_ZLIB_1_2_2_1 -#endif /* The following parameters are copied from zutil.h, version 0.95 */ #define DEFLATED 8 @@ -185,12 +187,14 @@ OutputBuffer_WindowOnError(_BlocksOutputBuffer *buffer, _Uint32Window *window) /* Initial buffer size. */ #define DEF_BUF_SIZE (16*1024) +#define DEF_MAX_INITIAL_BUF_SIZE (16 * 1024 * 1024) static PyModuleDef zlibmodule; typedef struct { PyTypeObject *Comptype; PyTypeObject *Decomptype; + PyTypeObject *ZlibDecompressorType; PyObject *ZlibError; } zlibstate; @@ -209,7 +213,7 @@ typedef struct PyObject *unused_data; PyObject *unconsumed_tail; char eof; - int is_initialised; + bool is_initialised; PyObject *zdict; PyThread_type_lock lock; } compobject; @@ -320,7 +324,7 @@ static PyObject * zlib_compress_impl(PyObject *module, Py_buffer *data, int level, int wbits) /*[clinic end generated code: output=46bd152fadd66df2 input=c4d06ee5782a7e3f]*/ { - PyObject *RetVal; + PyObject *return_value; int flush; z_stream zst; _BlocksOutputBuffer buffer = {.list = NULL}; @@ -387,11 +391,11 @@ zlib_compress_impl(PyObject *module, Py_buffer *data, int level, int wbits) err = deflateEnd(&zst); if (err == Z_OK) { - RetVal = OutputBuffer_Finish(&buffer, zst.avail_out); - if (RetVal == NULL) { + return_value = OutputBuffer_Finish(&buffer, zst.avail_out); + if (return_value == NULL) { goto error; } - return RetVal; + return return_value; } else zlib_error(state, zst, err, "while finishing compression"); @@ -419,7 +423,7 @@ zlib_decompress_impl(PyObject *module, Py_buffer *data, int wbits, Py_ssize_t bufsize) /*[clinic end generated code: output=77c7e35111dc8c42 input=a9ac17beff1f893f]*/ { - PyObject *RetVal; + PyObject *return_value; Byte *ibuf; Py_ssize_t ibuflen; int err, flush; @@ -514,9 +518,9 @@ zlib_decompress_impl(PyObject *module, Py_buffer *data, int wbits, goto error; } - RetVal = OutputBuffer_WindowFinish(&buffer, &window, zst.avail_out); - if (RetVal != NULL) { - return RetVal; + return_value = OutputBuffer_WindowFinish(&buffer, &window, zst.avail_out); + if (return_value != NULL) { + return return_value; } error: @@ -667,26 +671,17 @@ zlib_decompressobj_impl(PyObject *module, int wbits, PyObject *zdict) self->zst.next_in = NULL; self->zst.avail_in = 0; if (zdict != NULL) { - Py_INCREF(zdict); - self->zdict = zdict; + self->zdict = Py_NewRef(zdict); } int err = inflateInit2(&self->zst, wbits); switch (err) { case Z_OK: self->is_initialised = 1; if (self->zdict != NULL && wbits < 0) { -#ifdef AT_LEAST_ZLIB_1_2_2_1 if (set_inflate_zdict(state, self) < 0) { Py_DECREF(self); return NULL; } -#else - PyErr_Format(state->ZlibError, - "zlib version %s does not allow raw inflate with dictionary", - ZLIB_VERSION); - Py_DECREF(self); - return NULL; -#endif } return (PyObject *)self; case Z_STREAM_ERROR: @@ -753,7 +748,7 @@ zlib_Compress_compress_impl(compobject *self, PyTypeObject *cls, Py_buffer *data) /*[clinic end generated code: output=6731b3f0ff357ca6 input=04d00f65ab01d260]*/ { - PyObject *RetVal; + PyObject *return_value; int err; _BlocksOutputBuffer buffer = {.list = NULL}; zlibstate *state = PyType_GetModuleState(cls); @@ -791,17 +786,17 @@ zlib_Compress_compress_impl(compobject *self, PyTypeObject *cls, } while (ibuflen != 0); - RetVal = OutputBuffer_Finish(&buffer, self->zst.avail_out); - if (RetVal != NULL) { + return_value = OutputBuffer_Finish(&buffer, self->zst.avail_out); + if (return_value != NULL) { goto success; } error: OutputBuffer_OnError(&buffer); - RetVal = NULL; + return_value = NULL; success: LEAVE_ZLIB(self); - return RetVal; + return return_value; } /* Helper for objdecompress() and flush(). Saves any unconsumed input data in @@ -875,7 +870,7 @@ zlib_Decompress_decompress_impl(compobject *self, PyTypeObject *cls, { int err = Z_OK; Py_ssize_t ibuflen; - PyObject *RetVal; + PyObject *return_value; _BlocksOutputBuffer buffer = {.list = NULL}; PyObject *module = PyType_GetModule(cls); @@ -953,17 +948,17 @@ zlib_Decompress_decompress_impl(compobject *self, PyTypeObject *cls, goto abort; } - RetVal = OutputBuffer_Finish(&buffer, self->zst.avail_out); - if (RetVal != NULL) { + return_value = OutputBuffer_Finish(&buffer, self->zst.avail_out); + if (return_value != NULL) { goto success; } abort: OutputBuffer_OnError(&buffer); - RetVal = NULL; + return_value = NULL; success: LEAVE_ZLIB(self); - return RetVal; + return return_value; } /*[clinic input] @@ -985,7 +980,7 @@ zlib_Compress_flush_impl(compobject *self, PyTypeObject *cls, int mode) /*[clinic end generated code: output=c7efd13efd62add2 input=286146e29442eb6c]*/ { int err; - PyObject *RetVal; + PyObject *return_value; _BlocksOutputBuffer buffer = {.list = NULL}; zlibstate *state = PyType_GetModuleState(cls); @@ -1042,17 +1037,17 @@ zlib_Compress_flush_impl(compobject *self, PyTypeObject *cls, int mode) goto error; } - RetVal = OutputBuffer_Finish(&buffer, self->zst.avail_out); - if (RetVal != NULL) { + return_value = OutputBuffer_Finish(&buffer, self->zst.avail_out); + if (return_value != NULL) { goto success; } error: OutputBuffer_OnError(&buffer); - RetVal = NULL; + return_value = NULL; success: LEAVE_ZLIB(self); - return RetVal; + return return_value; } #ifdef HAVE_ZLIB_COPY @@ -1071,14 +1066,14 @@ zlib_Compress_copy_impl(compobject *self, PyTypeObject *cls) { zlibstate *state = PyType_GetModuleState(cls); - compobject *retval = newcompobject(state->Comptype); - if (!retval) return NULL; + compobject *return_value = newcompobject(state->Comptype); + if (!return_value) return NULL; /* Copy the zstream state * We use ENTER_ZLIB / LEAVE_ZLIB to make this thread-safe */ ENTER_ZLIB(self); - int err = deflateCopy(&retval->zst, &self->zst); + int err = deflateCopy(&return_value->zst, &self->zst); switch (err) { case Z_OK: break; @@ -1093,23 +1088,20 @@ zlib_Compress_copy_impl(compobject *self, PyTypeObject *cls) zlib_error(state, self->zst, err, "while copying compression object"); goto error; } - Py_INCREF(self->unused_data); - Py_XSETREF(retval->unused_data, self->unused_data); - Py_INCREF(self->unconsumed_tail); - Py_XSETREF(retval->unconsumed_tail, self->unconsumed_tail); - Py_XINCREF(self->zdict); - Py_XSETREF(retval->zdict, self->zdict); - retval->eof = self->eof; + Py_XSETREF(return_value->unused_data, Py_NewRef(self->unused_data)); + Py_XSETREF(return_value->unconsumed_tail, Py_NewRef(self->unconsumed_tail)); + Py_XSETREF(return_value->zdict, Py_XNewRef(self->zdict)); + return_value->eof = self->eof; /* Mark it as being initialized */ - retval->is_initialised = 1; + return_value->is_initialised = 1; LEAVE_ZLIB(self); - return (PyObject *)retval; + return (PyObject *)return_value; error: LEAVE_ZLIB(self); - Py_XDECREF(retval); + Py_XDECREF(return_value); return NULL; } @@ -1158,14 +1150,14 @@ zlib_Decompress_copy_impl(compobject *self, PyTypeObject *cls) { zlibstate *state = PyType_GetModuleState(cls); - compobject *retval = newcompobject(state->Decomptype); - if (!retval) return NULL; + compobject *return_value = newcompobject(state->Decomptype); + if (!return_value) return NULL; /* Copy the zstream state * We use ENTER_ZLIB / LEAVE_ZLIB to make this thread-safe */ ENTER_ZLIB(self); - int err = inflateCopy(&retval->zst, &self->zst); + int err = inflateCopy(&return_value->zst, &self->zst); switch (err) { case Z_OK: break; @@ -1181,23 +1173,20 @@ zlib_Decompress_copy_impl(compobject *self, PyTypeObject *cls) goto error; } - Py_INCREF(self->unused_data); - Py_XSETREF(retval->unused_data, self->unused_data); - Py_INCREF(self->unconsumed_tail); - Py_XSETREF(retval->unconsumed_tail, self->unconsumed_tail); - Py_XINCREF(self->zdict); - Py_XSETREF(retval->zdict, self->zdict); - retval->eof = self->eof; + Py_XSETREF(return_value->unused_data, Py_NewRef(self->unused_data)); + Py_XSETREF(return_value->unconsumed_tail, Py_NewRef(self->unconsumed_tail)); + Py_XSETREF(return_value->zdict, Py_XNewRef(self->zdict)); + return_value->eof = self->eof; /* Mark it as being initialized */ - retval->is_initialised = 1; + return_value->is_initialised = 1; LEAVE_ZLIB(self); - return (PyObject *)retval; + return (PyObject *)return_value; error: LEAVE_ZLIB(self); - Py_XDECREF(retval); + Py_XDECREF(return_value); return NULL; } @@ -1252,7 +1241,7 @@ zlib_Decompress_flush_impl(compobject *self, PyTypeObject *cls, { int err, flush; Py_buffer data; - PyObject *RetVal; + PyObject *return_value; Py_ssize_t ibuflen; _BlocksOutputBuffer buffer = {.list = NULL}; _Uint32Window window; // output buffer's UINT32_MAX sliding window @@ -1306,13 +1295,6 @@ zlib_Decompress_flush_impl(compobject *self, PyTypeObject *cls, case Z_STREAM_END: break; default: - if (err == Z_NEED_DICT && self->zdict != NULL) { - if (set_inflate_zdict(state, self) < 0) { - goto abort; - } - else - break; - } goto save; } @@ -1336,18 +1318,456 @@ zlib_Decompress_flush_impl(compobject *self, PyTypeObject *cls, } } - RetVal = OutputBuffer_WindowFinish(&buffer, &window, self->zst.avail_out); - if (RetVal != NULL) { + return_value = OutputBuffer_WindowFinish(&buffer, &window, self->zst.avail_out); + if (return_value != NULL) { goto success; } abort: OutputBuffer_WindowOnError(&buffer, &window); - RetVal = NULL; + return_value = NULL; success: PyBuffer_Release(&data); LEAVE_ZLIB(self); - return RetVal; + return return_value; +} + + +typedef struct { + PyObject_HEAD + z_stream zst; + PyObject *zdict; + PyThread_type_lock lock; + PyObject *unused_data; + uint8_t *input_buffer; + Py_ssize_t input_buffer_size; + /* zst>avail_in is only 32 bit, so we store the true length + separately. Conversion and looping is encapsulated in + decompress_buf() */ + Py_ssize_t avail_in_real; + bool is_initialised; + char eof; /* T_BOOL expects a char */ + char needs_input; +} ZlibDecompressor; + +/*[clinic input] +class zlib.ZlibDecompressor "ZlibDecompressor *" "&ZlibDecompressorType" +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=0658178ab94645df]*/ + +static void +ZlibDecompressor_dealloc(ZlibDecompressor *self) +{ + PyObject *type = (PyObject *)Py_TYPE(self); + PyThread_free_lock(self->lock); + if (self->is_initialised) { + inflateEnd(&self->zst); + } + PyMem_Free(self->input_buffer); + Py_CLEAR(self->unused_data); + Py_CLEAR(self->zdict); + PyObject_Free(self); + Py_DECREF(type); +} + +static int +set_inflate_zdict_ZlibDecompressor(zlibstate *state, ZlibDecompressor *self) +{ + Py_buffer zdict_buf; + if (PyObject_GetBuffer(self->zdict, &zdict_buf, PyBUF_SIMPLE) == -1) { + return -1; + } + if ((size_t)zdict_buf.len > UINT_MAX) { + PyErr_SetString(PyExc_OverflowError, + "zdict length does not fit in an unsigned int"); + PyBuffer_Release(&zdict_buf); + return -1; + } + int err; + err = inflateSetDictionary(&self->zst, + zdict_buf.buf, (unsigned int)zdict_buf.len); + PyBuffer_Release(&zdict_buf); + if (err != Z_OK) { + zlib_error(state, self->zst, err, "while setting zdict"); + return -1; + } + return 0; +} + +static Py_ssize_t +arrange_output_buffer_with_maximum(uint32_t *avail_out, + uint8_t **next_out, + PyObject **buffer, + Py_ssize_t length, + Py_ssize_t max_length) +{ + Py_ssize_t occupied; + + if (*buffer == NULL) { + if (!(*buffer = PyBytes_FromStringAndSize(NULL, length))) + return -1; + occupied = 0; + } + else { + occupied = *next_out - (uint8_t *)PyBytes_AS_STRING(*buffer); + + if (length == occupied) { + Py_ssize_t new_length; + assert(length <= max_length); + /* can not scale the buffer over max_length */ + if (length == max_length) + return -2; + if (length <= (max_length >> 1)) + new_length = length << 1; + else + new_length = max_length; + if (_PyBytes_Resize(buffer, new_length) < 0) + return -1; + length = new_length; + } + } + + *avail_out = (uint32_t)Py_MIN((size_t)(length - occupied), UINT32_MAX); + *next_out = (uint8_t *)PyBytes_AS_STRING(*buffer) + occupied; + + return length; +} + +/* Decompress data of length self->avail_in_real in self->state.next_in. The + output buffer is allocated dynamically and returned. If the max_length is + of sufficiently low size, max_length is allocated immediately. At most + max_length bytes are returned, so some of the input may not be consumed. + self->state.next_in and self->avail_in_real are updated to reflect the + consumed input. */ +static PyObject* +decompress_buf(ZlibDecompressor *self, Py_ssize_t max_length) +{ + /* data_size is strictly positive, but because we repeatedly have to + compare against max_length and PyBytes_GET_SIZE we declare it as + signed */ + PyObject *return_value = NULL; + Py_ssize_t hard_limit; + Py_ssize_t obuflen; + zlibstate *state = PyType_GetModuleState(Py_TYPE(self)); + + int err = Z_OK; + + /* When sys.maxsize is passed as default use DEF_BUF_SIZE as start buffer. + In this particular case the data may not necessarily be very big, so + it is better to grow dynamically.*/ + if ((max_length < 0) || max_length == PY_SSIZE_T_MAX) { + hard_limit = PY_SSIZE_T_MAX; + obuflen = DEF_BUF_SIZE; + } else { + /* Assume that decompressor is used in file decompression with a fixed + block size of max_length. In that case we will reach max_length almost + always (except at the end of the file). So it makes sense to allocate + max_length. */ + hard_limit = max_length; + obuflen = max_length; + if (obuflen > DEF_MAX_INITIAL_BUF_SIZE){ + // Safeguard against memory overflow. + obuflen = DEF_MAX_INITIAL_BUF_SIZE; + } + } + + do { + arrange_input_buffer(&(self->zst), &(self->avail_in_real)); + + do { + obuflen = arrange_output_buffer_with_maximum(&(self->zst.avail_out), + &(self->zst.next_out), + &return_value, + obuflen, + hard_limit); + if (obuflen == -1){ + PyErr_SetString(PyExc_MemoryError, + "Insufficient memory for buffer allocation"); + goto error; + } + else if (obuflen == -2) { + break; + } + Py_BEGIN_ALLOW_THREADS + err = inflate(&self->zst, Z_SYNC_FLUSH); + Py_END_ALLOW_THREADS + switch (err) { + case Z_OK: /* fall through */ + case Z_BUF_ERROR: /* fall through */ + case Z_STREAM_END: + break; + default: + if (err == Z_NEED_DICT) { + goto error; + } + else { + break; + } + } + } while (self->zst.avail_out == 0); + } while(err != Z_STREAM_END && self->avail_in_real != 0); + + if (err == Z_STREAM_END) { + self->eof = 1; + self->is_initialised = 0; + /* Unlike the Decompress object we call inflateEnd here as there are no + backwards compatibility issues */ + err = inflateEnd(&self->zst); + if (err != Z_OK) { + zlib_error(state, self->zst, err, "while finishing decompression"); + goto error; + } + } else if (err != Z_OK && err != Z_BUF_ERROR) { + zlib_error(state, self->zst, err, "while decompressing data"); + } + + self->avail_in_real += self->zst.avail_in; + + if (_PyBytes_Resize(&return_value, self->zst.next_out - + (uint8_t *)PyBytes_AS_STRING(return_value)) != 0) { + goto error; + } + + goto success; +error: + Py_CLEAR(return_value); +success: + return return_value; +} + + +static PyObject * +decompress(ZlibDecompressor *self, uint8_t *data, + size_t len, Py_ssize_t max_length) +{ + bool input_buffer_in_use; + PyObject *result; + + /* Prepend unconsumed input if necessary */ + if (self->zst.next_in != NULL) { + size_t avail_now, avail_total; + + /* Number of bytes we can append to input buffer */ + avail_now = (self->input_buffer + self->input_buffer_size) + - (self->zst.next_in + self->avail_in_real); + + /* Number of bytes we can append if we move existing + contents to beginning of buffer (overwriting + consumed input) */ + avail_total = self->input_buffer_size - self->avail_in_real; + + if (avail_total < len) { + size_t offset = self->zst.next_in - self->input_buffer; + uint8_t *tmp; + size_t new_size = self->input_buffer_size + len - avail_now; + + /* Assign to temporary variable first, so we don't + lose address of allocated buffer if realloc fails */ + tmp = PyMem_Realloc(self->input_buffer, new_size); + if (tmp == NULL) { + PyErr_SetNone(PyExc_MemoryError); + return NULL; + } + self->input_buffer = tmp; + self->input_buffer_size = new_size; + + self->zst.next_in = self->input_buffer + offset; + } + else if (avail_now < len) { + memmove(self->input_buffer, self->zst.next_in, + self->avail_in_real); + self->zst.next_in = self->input_buffer; + } + memcpy((void*)(self->zst.next_in + self->avail_in_real), data, len); + self->avail_in_real += len; + input_buffer_in_use = 1; + } + else { + self->zst.next_in = data; + self->avail_in_real = len; + input_buffer_in_use = 0; + } + + result = decompress_buf(self, max_length); + if(result == NULL) { + self->zst.next_in = NULL; + return NULL; + } + + if (self->eof) { + self->needs_input = 0; + + if (self->avail_in_real > 0) { + PyObject *unused_data = PyBytes_FromStringAndSize( + (char *)self->zst.next_in, self->avail_in_real); + if (unused_data == NULL) { + goto error; + } + Py_XSETREF(self->unused_data, unused_data); + } + } + else if (self->avail_in_real == 0) { + self->zst.next_in = NULL; + self->needs_input = 1; + } + else { + self->needs_input = 0; + + /* If we did not use the input buffer, we now have + to copy the tail from the caller's buffer into the + input buffer */ + if (!input_buffer_in_use) { + + /* Discard buffer if it's too small + (resizing it may needlessly copy the current contents) */ + if (self->input_buffer != NULL && + self->input_buffer_size < self->avail_in_real) { + PyMem_Free(self->input_buffer); + self->input_buffer = NULL; + } + + /* Allocate if necessary */ + if (self->input_buffer == NULL) { + self->input_buffer = PyMem_Malloc(self->avail_in_real); + if (self->input_buffer == NULL) { + PyErr_SetNone(PyExc_MemoryError); + goto error; + } + self->input_buffer_size = self->avail_in_real; + } + + /* Copy tail */ + memcpy(self->input_buffer, self->zst.next_in, self->avail_in_real); + self->zst.next_in = self->input_buffer; + } + } + return result; + +error: + Py_XDECREF(result); + return NULL; +} + +/*[clinic input] +zlib.ZlibDecompressor.decompress + + data: Py_buffer + max_length: Py_ssize_t=-1 + +Decompress *data*, returning uncompressed data as bytes. + +If *max_length* is nonnegative, returns at most *max_length* bytes of +decompressed data. If this limit is reached and further output can be +produced, *self.needs_input* will be set to ``False``. In this case, the next +call to *decompress()* may provide *data* as b'' to obtain more of the output. + +If all of the input data was decompressed and returned (either because this +was less than *max_length* bytes, or because *max_length* was negative), +*self.needs_input* will be set to True. + +Attempting to decompress data after the end of stream is reached raises an +EOFError. Any data found after the end of the stream is ignored and saved in +the unused_data attribute. +[clinic start generated code]*/ + +static PyObject * +zlib_ZlibDecompressor_decompress_impl(ZlibDecompressor *self, + Py_buffer *data, Py_ssize_t max_length) +/*[clinic end generated code: output=990d32787b775f85 input=0b29d99715250b96]*/ + +{ + PyObject *result = NULL; + + ENTER_ZLIB(self); + if (self->eof) { + PyErr_SetString(PyExc_EOFError, "End of stream already reached"); + } + else { + result = decompress(self, data->buf, data->len, max_length); + } + LEAVE_ZLIB(self); + return result; +} + +PyDoc_STRVAR(ZlibDecompressor__new____doc__, +"_ZlibDecompressor(wbits=15, zdict=b\'\')\n" +"--\n" +"\n" +"Create a decompressor object for decompressing data incrementally.\n" +"\n" +" wbits = 15\n" +" zdict\n" +" The predefined compression dictionary. This is a sequence of bytes\n" +" (such as a bytes object) containing subsequences that are expected\n" +" to occur frequently in the data that is to be compressed. Those\n" +" subsequences that are expected to be most common should come at the\n" +" end of the dictionary. This must be the same dictionary as used by the\n" +" compressor that produced the input data.\n" +"\n"); + +static PyObject * +ZlibDecompressor__new__(PyTypeObject *cls, + PyObject *args, + PyObject *kwargs) +{ + static char *keywords[] = {"wbits", "zdict", NULL}; + static const char * const format = "|iO:_ZlibDecompressor"; + int wbits = MAX_WBITS; + PyObject *zdict = NULL; + zlibstate *state = PyType_GetModuleState(cls); + + if (!PyArg_ParseTupleAndKeywords( + args, kwargs, format, keywords, &wbits, &zdict)) { + return NULL; + } + ZlibDecompressor *self = PyObject_New(ZlibDecompressor, cls); + self->eof = 0; + self->needs_input = 1; + self->avail_in_real = 0; + self->input_buffer = NULL; + self->input_buffer_size = 0; + self->zdict = Py_XNewRef(zdict); + self->zst.opaque = NULL; + self->zst.zalloc = PyZlib_Malloc; + self->zst.zfree = PyZlib_Free; + self->zst.next_in = NULL; + self->zst.avail_in = 0; + self->unused_data = PyBytes_FromStringAndSize(NULL, 0); + if (self->unused_data == NULL) { + Py_CLEAR(self); + return NULL; + } + self->lock = PyThread_allocate_lock(); + if (self->lock == NULL) { + Py_DECREF(self); + PyErr_SetString(PyExc_MemoryError, "Unable to allocate lock"); + return NULL; + } + int err = inflateInit2(&(self->zst), wbits); + switch (err) { + case Z_OK: + self->is_initialised = 1; + if (self->zdict != NULL && wbits < 0) { + if (set_inflate_zdict_ZlibDecompressor(state, self) < 0) { + Py_DECREF(self); + return NULL; + } + } + return (PyObject *)self; + case Z_STREAM_ERROR: + Py_DECREF(self); + PyErr_SetString(PyExc_ValueError, "Invalid initialization option"); + return NULL; + case Z_MEM_ERROR: + Py_DECREF(self); + PyErr_SetString(PyExc_MemoryError, + "Can't allocate memory for decompression object"); + return NULL; + default: + zlib_error(state, self->zst, err, "while creating decompression object"); + Py_DECREF(self); + return NULL; + } } #include "clinic/zlibmodule.c.h" @@ -1372,6 +1792,11 @@ static PyMethodDef Decomp_methods[] = {NULL, NULL} }; +static PyMethodDef ZlibDecompressor_methods[] = { + ZLIB_ZLIBDECOMPRESSOR_DECOMPRESS_METHODDEF + {NULL} +}; + #define COMP_OFF(x) offsetof(compobject, x) static PyMemberDef Decomp_members[] = { {"unused_data", T_OBJECT, COMP_OFF(unused_data), READONLY}, @@ -1380,6 +1805,26 @@ static PyMemberDef Decomp_members[] = { {NULL}, }; +PyDoc_STRVAR(ZlibDecompressor_eof__doc__, +"True if the end-of-stream marker has been reached."); + +PyDoc_STRVAR(ZlibDecompressor_unused_data__doc__, +"Data found after the end of the compressed stream."); + +PyDoc_STRVAR(ZlibDecompressor_needs_input_doc, +"True if more input is needed before more decompressed data can be produced."); + +static PyMemberDef ZlibDecompressor_members[] = { + {"eof", T_BOOL, offsetof(ZlibDecompressor, eof), + READONLY, ZlibDecompressor_eof__doc__}, + {"unused_data", T_OBJECT_EX, offsetof(ZlibDecompressor, unused_data), + READONLY, ZlibDecompressor_unused_data__doc__}, + {"needs_input", T_BOOL, offsetof(ZlibDecompressor, needs_input), READONLY, + ZlibDecompressor_needs_input_doc}, + {NULL}, +}; + + /*[clinic input] zlib.adler32 @@ -1497,6 +1942,25 @@ static PyType_Spec Decomptype_spec = { .slots = Decomptype_slots, }; +static PyType_Slot ZlibDecompressor_type_slots[] = { + {Py_tp_dealloc, ZlibDecompressor_dealloc}, + {Py_tp_members, ZlibDecompressor_members}, + {Py_tp_new, ZlibDecompressor__new__}, + {Py_tp_doc, (char *)ZlibDecompressor__new____doc__}, + {Py_tp_methods, ZlibDecompressor_methods}, + {0, 0}, +}; + +static PyType_Spec ZlibDecompressor_type_spec = { + .name = "zlib._ZlibDecompressor", + .basicsize = sizeof(ZlibDecompressor), + // Calling PyType_GetModuleState() on a subclass is not safe. + // ZlibDecompressor_type_spec does not have Py_TPFLAGS_BASETYPE flag + // which prevents to create a subclass. + // So calling PyType_GetModuleState() in this file is always safe. + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE), + .slots = ZlibDecompressor_type_slots, +}; PyDoc_STRVAR(zlib_module_documentation, "The functions in this module allow compression and decompression using the\n" "zlib library, which is based on GNU zip.\n" @@ -1518,6 +1982,7 @@ zlib_clear(PyObject *mod) zlibstate *state = get_zlib_state(mod); Py_CLEAR(state->Comptype); Py_CLEAR(state->Decomptype); + Py_CLEAR(state->ZlibDecompressorType); Py_CLEAR(state->ZlibError); return 0; } @@ -1528,6 +1993,7 @@ zlib_traverse(PyObject *mod, visitproc visit, void *arg) zlibstate *state = get_zlib_state(mod); Py_VISIT(state->Comptype); Py_VISIT(state->Decomptype); + Py_VISIT(state->ZlibDecompressorType); Py_VISIT(state->ZlibError); return 0; } @@ -1555,16 +2021,26 @@ zlib_exec(PyObject *mod) return -1; } + state->ZlibDecompressorType = (PyTypeObject *)PyType_FromModuleAndSpec( + mod, &ZlibDecompressor_type_spec, NULL); + if (state->ZlibDecompressorType == NULL) { + return -1; + } + state->ZlibError = PyErr_NewException("zlib.error", NULL, NULL); if (state->ZlibError == NULL) { return -1; } - Py_INCREF(state->ZlibError); - if (PyModule_AddObject(mod, "error", state->ZlibError) < 0) { + if (PyModule_AddObject(mod, "error", Py_NewRef(state->ZlibError)) < 0) { Py_DECREF(state->ZlibError); return -1; } + if (PyModule_AddObject(mod, "_ZlibDecompressor", + Py_NewRef(state->ZlibDecompressorType)) < 0) { + Py_DECREF(state->ZlibDecompressorType); + return -1; + } #define ZLIB_ADD_INT_MACRO(c) \ do { \ diff --git a/Objects/abstract.c b/Objects/abstract.c index 5d50491b2dd347..9dc74fb9c2608c 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -45,8 +45,7 @@ PyObject_Type(PyObject *o) } v = (PyObject *)Py_TYPE(o); - Py_INCREF(v); - return v; + return Py_NewRef(v); } Py_ssize_t @@ -722,9 +721,7 @@ PyBuffer_FillInfo(Py_buffer *view, PyObject *obj, void *buf, Py_ssize_t len, return -1; } - view->obj = obj; - if (obj) - Py_INCREF(obj); + view->obj = Py_XNewRef(obj); view->buf = buf; view->len = len; view->readonly = readonly; @@ -776,8 +773,7 @@ PyObject_Format(PyObject *obj, PyObject *format_spec) /* Fast path for common types. */ if (format_spec == NULL || PyUnicode_GET_LENGTH(format_spec) == 0) { if (PyUnicode_CheckExact(obj)) { - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } if (PyLong_CheckExact(obj)) { return PyObject_Str(obj); @@ -810,8 +806,7 @@ PyObject_Format(PyObject *obj, PyObject *format_spec) PyErr_Format(PyExc_TypeError, "__format__ must return a str, not %.200s", Py_TYPE(result)->tp_name); - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); goto done; } @@ -1405,8 +1400,7 @@ _PyNumber_Index(PyObject *item) } if (PyLong_Check(item)) { - Py_INCREF(item); - return item; + return Py_NewRef(item); } if (!_PyIndex_Check(item)) { PyErr_Format(PyExc_TypeError, @@ -1520,8 +1514,7 @@ PyNumber_Long(PyObject *o) } if (PyLong_CheckExact(o)) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } m = Py_TYPE(o)->tp_as_number; if (m && m->nb_int) { /* This should include subclasses of int */ @@ -2045,8 +2038,7 @@ PySequence_Tuple(PyObject *v) a tuple *subclass* instance as-is, hence the restriction to exact tuples here. In contrast, lists always make a copy, so there's no need for exactness below. */ - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (PyList_CheckExact(v)) return PyList_AsTuple(v); @@ -2144,8 +2136,7 @@ PySequence_Fast(PyObject *v, const char *m) } if (PyList_CheckExact(v) || PyTuple_CheckExact(v)) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } it = PyObject_GetIter(v); @@ -2799,8 +2790,7 @@ PyObject_GetIter(PyObject *o) "iter() returned non-iterator " "of type '%.100s'", Py_TYPE(res)->tp_name); - Py_DECREF(res); - res = NULL; + Py_SETREF(res, NULL); } return res; } @@ -2820,8 +2810,7 @@ PyObject_GetAIter(PyObject *o) { PyErr_Format(PyExc_TypeError, "aiter() returned not an async iterator of type '%.100s'", Py_TYPE(it)->tp_name); - Py_DECREF(it); - it = NULL; + Py_SETREF(it, NULL); } return it; } diff --git a/Objects/boolobject.c b/Objects/boolobject.c index 8a20e368d4a42b..18666f88cbde10 100644 --- a/Objects/boolobject.c +++ b/Objects/boolobject.c @@ -23,8 +23,7 @@ PyObject *PyBool_FromLong(long ok) result = Py_True; else result = Py_False; - Py_INCREF(result); - return result; + return Py_NewRef(result); } /* We define bool_new to always return either Py_True or Py_False */ diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index b2962fd137d93e..0ba6fb5b76ccc7 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -313,8 +313,7 @@ bytearray_iconcat(PyByteArrayObject *self, PyObject *other) } memcpy(PyByteArray_AS_STRING(self) + size, vo.buf, vo.len); PyBuffer_Release(&vo); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static PyObject * @@ -340,8 +339,7 @@ bytearray_irepeat(PyByteArrayObject *self, Py_ssize_t count) if (count < 0) count = 0; else if (count == 1) { - Py_INCREF(self); - return (PyObject*)self; + return Py_NewRef(self); } const Py_ssize_t mysize = Py_SIZE(self); @@ -354,8 +352,7 @@ bytearray_irepeat(PyByteArrayObject *self, Py_ssize_t count) char* buf = PyByteArray_AS_STRING(self); _PyBytes_Repeat(buf, size, buf, mysize); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static PyObject * @@ -2477,8 +2474,7 @@ bytearray_iter(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (PyByteArrayObject *)seq; + it->it_seq = (PyByteArrayObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 80660881920fb7..a63f396e022f71 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -53,8 +53,7 @@ static inline PyObject* bytes_get_empty(void) // Return a strong reference to the empty bytes string singleton. static inline PyObject* bytes_new_empty(void) { - Py_INCREF(EMPTY); - return (PyObject *)EMPTY; + return Py_NewRef(EMPTY); } @@ -126,8 +125,7 @@ PyBytes_FromStringAndSize(const char *str, Py_ssize_t size) } if (size == 1 && str != NULL) { op = CHARACTER(*str & 255); - Py_INCREF(op); - return (PyObject *)op; + return Py_NewRef(op); } if (size == 0) { return bytes_new_empty(); @@ -162,8 +160,7 @@ PyBytes_FromString(const char *str) } else if (size == 1) { op = CHARACTER(*str & 255); - Py_INCREF(op); - return (PyObject *)op; + return Py_NewRef(op); } /* Inline PyObject_NewVar */ @@ -527,14 +524,12 @@ format_obj(PyObject *v, const char **pbuf, Py_ssize_t *plen) if (PyBytes_Check(v)) { *pbuf = PyBytes_AS_STRING(v); *plen = PyBytes_GET_SIZE(v); - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (PyByteArray_Check(v)) { *pbuf = PyByteArray_AS_STRING(v); *plen = PyByteArray_GET_SIZE(v); - Py_INCREF(v); - return v; + return Py_NewRef(v); } /* does it support __bytes__? */ func = _PyObject_LookupSpecial(v, &_Py_ID(__bytes__)); @@ -1411,13 +1406,11 @@ bytes_concat(PyObject *a, PyObject *b) /* Optimize end cases */ if (va.len == 0 && PyBytes_CheckExact(b)) { - result = b; - Py_INCREF(result); + result = Py_NewRef(b); goto done; } if (vb.len == 0 && PyBytes_CheckExact(a)) { - result = a; - Py_INCREF(result); + result = Py_NewRef(a); goto done; } @@ -1458,8 +1451,7 @@ bytes_repeat(PyBytesObject *a, Py_ssize_t n) } size = Py_SIZE(a) * n; if (size == Py_SIZE(a) && PyBytes_CheckExact(a)) { - Py_INCREF(a); - return (PyObject *)a; + return Py_NewRef(a); } nbytes = (size_t)size; if (nbytes + PyBytesObject_SIZE <= nbytes) { @@ -1625,8 +1617,7 @@ bytes_subscript(PyBytesObject* self, PyObject* item) else if (start == 0 && step == 1 && slicelength == PyBytes_GET_SIZE(self) && PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else if (step == 1) { return PyBytes_FromStringAndSize( @@ -1696,8 +1687,7 @@ bytes___bytes___impl(PyBytesObject *self) /*[clinic end generated code: output=63a306a9bc0caac5 input=34ec5ddba98bd6bb]*/ { if (PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { return PyBytes_FromStringAndSize(self->ob_sval, Py_SIZE(self)); @@ -1922,8 +1912,7 @@ do_xstrip(PyBytesObject *self, int striptype, PyObject *sepobj) PyBuffer_Release(&vsep); if (i == 0 && j == len && PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject*)self; + return Py_NewRef(self); } else return PyBytes_FromStringAndSize(s+i, j-i); @@ -1952,8 +1941,7 @@ do_strip(PyBytesObject *self, int striptype) } if (i == 0 && j == len && PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject*)self; + return Py_NewRef(self); } else return PyBytes_FromStringAndSize(s+i, j-i); @@ -2121,9 +2109,7 @@ bytes_translate_impl(PyBytesObject *self, PyObject *table, changed = 1; } if (!changed && PyBytes_CheckExact(input_obj)) { - Py_INCREF(input_obj); - Py_DECREF(result); - result = input_obj; + Py_SETREF(result, Py_NewRef(input_obj)); } PyBuffer_Release(&del_table_view); PyBuffer_Release(&table_view); @@ -2152,8 +2138,7 @@ bytes_translate_impl(PyBytesObject *self, PyObject *table, } if (!changed && PyBytes_CheckExact(input_obj)) { Py_DECREF(result); - Py_INCREF(input_obj); - return input_obj; + return Py_NewRef(input_obj); } /* Fix the size of the resulting byte string */ if (inlen > 0) @@ -2245,8 +2230,7 @@ bytes_removeprefix_impl(PyBytesObject *self, Py_buffer *prefix) } if (PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } return PyBytes_FromStringAndSize(self_start, self_len); @@ -2284,8 +2268,7 @@ bytes_removesuffix_impl(PyBytesObject *self, Py_buffer *suffix) } if (PyBytes_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } return PyBytes_FromStringAndSize(self_start, self_len); @@ -2844,8 +2827,7 @@ PyBytes_FromObject(PyObject *x) } if (PyBytes_CheckExact(x)) { - Py_INCREF(x); - return x; + return Py_NewRef(x); } /* Use the modern buffer interface */ @@ -3269,8 +3251,7 @@ bytes_iter(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (PyBytesObject *)seq; + it->it_seq = (PyBytesObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } diff --git a/Objects/call.c b/Objects/call.c index c2509db2a9a263..bd027e41f8a9a5 100644 --- a/Objects/call.c +++ b/Objects/call.c @@ -1,6 +1,7 @@ #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgsTstate() #include "pycore_ceval.h" // _Py_EnterRecursiveCallTstate() +#include "pycore_dict.h" // _PyDict_FromItems() #include "pycore_object.h" // _PyCFunctionWithKeywords_TrampolineCall() #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "pycore_pystate.h" // _PyThreadState_GET() @@ -1000,8 +1001,7 @@ _PyStack_UnpackDict(PyThreadState *tstate, /* Copy positional arguments */ for (Py_ssize_t i = 0; i < nargs; i++) { - Py_INCREF(args[i]); - stack[i] = args[i]; + stack[i] = Py_NewRef(args[i]); } PyObject **kwstack = stack + nargs; @@ -1013,10 +1013,8 @@ _PyStack_UnpackDict(PyThreadState *tstate, unsigned long keys_are_strings = Py_TPFLAGS_UNICODE_SUBCLASS; while (PyDict_Next(kwargs, &pos, &key, &value)) { keys_are_strings &= Py_TYPE(key)->tp_flags; - Py_INCREF(key); - Py_INCREF(value); - PyTuple_SET_ITEM(kwnames, i, key); - kwstack[i] = value; + PyTuple_SET_ITEM(kwnames, i, Py_NewRef(key)); + kwstack[i] = Py_NewRef(value); i++; } diff --git a/Objects/capsule.c b/Objects/capsule.c index 606e50e6961133..baaddb3f1f0849 100644 --- a/Objects/capsule.c +++ b/Objects/capsule.c @@ -220,8 +220,7 @@ PyCapsule_Import(const char *name, int no_block) } } else { PyObject *object2 = PyObject_GetAttrString(object, trace); - Py_DECREF(object); - object = object2; + Py_SETREF(object, object2); } if (!object) { goto EXIT; diff --git a/Objects/cellobject.c b/Objects/cellobject.c index 1ddf4c5d8b8bdc..f516707f6f8086 100644 --- a/Objects/cellobject.c +++ b/Objects/cellobject.c @@ -11,8 +11,7 @@ PyCell_New(PyObject *obj) op = (PyCellObject *)PyObject_GC_New(PyCellObject, &PyCell_Type); if (op == NULL) return NULL; - op->ob_ref = obj; - Py_XINCREF(obj); + op->ob_ref = Py_XNewRef(obj); _PyObject_GC_TRACK(op); return (PyObject *)op; @@ -68,8 +67,7 @@ PyCell_Set(PyObject *op, PyObject *value) return -1; } PyObject *old_value = PyCell_GET(op); - Py_XINCREF(value); - PyCell_SET(op, value); + PyCell_SET(op, Py_XNewRef(value)); Py_XDECREF(old_value); return 0; } @@ -135,15 +133,13 @@ cell_get_contents(PyCellObject *op, void *closure) PyErr_SetString(PyExc_ValueError, "Cell is empty"); return NULL; } - Py_INCREF(op->ob_ref); - return op->ob_ref; + return Py_NewRef(op->ob_ref); } static int cell_set_contents(PyCellObject *op, PyObject *obj, void *Py_UNUSED(ignored)) { - Py_XINCREF(obj); - Py_XSETREF(op->ob_ref, obj); + Py_XSETREF(op->ob_ref, Py_XNewRef(obj)); return 0; } diff --git a/Objects/classobject.c b/Objects/classobject.c index b9708ba0e41a3b..2cb192e725d40d 100644 --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -113,10 +113,8 @@ PyMethod_New(PyObject *func, PyObject *self) return NULL; } im->im_weakreflist = NULL; - Py_INCREF(func); - im->im_func = func; - Py_INCREF(self); - im->im_self = self; + im->im_func = Py_NewRef(func); + im->im_self = Py_NewRef(self); im->vectorcall = method_vectorcall; _PyObject_GC_TRACK(im); return (PyObject *)im; @@ -195,8 +193,7 @@ method_getattro(PyObject *obj, PyObject *name) if (f != NULL) return f(descr, obj, (PyObject *)Py_TYPE(obj)); else { - Py_INCREF(descr); - return descr; + return Py_NewRef(descr); } } @@ -267,8 +264,7 @@ method_richcompare(PyObject *self, PyObject *other, int op) res = eq ? Py_True : Py_False; else res = eq ? Py_False : Py_True; - Py_INCREF(res); - return res; + return Py_NewRef(res); } static PyObject * @@ -287,8 +283,7 @@ method_repr(PyMethodObject *a) } if (funcname != NULL && !PyUnicode_Check(funcname)) { - Py_DECREF(funcname); - funcname = NULL; + Py_SETREF(funcname, NULL); } /* XXX Shouldn't use repr()/%R here! */ @@ -359,8 +354,7 @@ PyInstanceMethod_New(PyObject *func) { method = PyObject_GC_New(PyInstanceMethodObject, &PyInstanceMethod_Type); if (method == NULL) return NULL; - Py_INCREF(func); - method->func = func; + method->func = Py_NewRef(func); _PyObject_GC_TRACK(method); return (PyObject *)method; } @@ -412,8 +406,7 @@ instancemethod_getattro(PyObject *self, PyObject *name) if (f != NULL) return f(descr, self, (PyObject *)Py_TYPE(self)); else { - Py_INCREF(descr); - return descr; + return Py_NewRef(descr); } } @@ -443,8 +436,7 @@ static PyObject * instancemethod_descr_get(PyObject *descr, PyObject *obj, PyObject *type) { PyObject *func = PyInstanceMethod_GET_FUNCTION(descr); if (obj == NULL) { - Py_INCREF(func); - return func; + return Py_NewRef(func); } else return PyMethod_New(func, obj); @@ -472,8 +464,7 @@ instancemethod_richcompare(PyObject *self, PyObject *other, int op) res = eq ? Py_True : Py_False; else res = eq ? Py_False : Py_True; - Py_INCREF(res); - return res; + return Py_NewRef(res); } static PyObject * @@ -492,8 +483,7 @@ instancemethod_repr(PyObject *self) return NULL; } if (funcname != NULL && !PyUnicode_Check(funcname)) { - Py_DECREF(funcname); - funcname = NULL; + Py_SETREF(funcname, NULL); } result = PyUnicode_FromFormat("", diff --git a/Objects/codeobject.c b/Objects/codeobject.c index 14d1d00684aedf..fc1db72977aa01 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -150,7 +150,22 @@ validate_and_copy_tuple(PyObject *tup) return newtuple; } +static int +init_co_cached(PyCodeObject *self) { + if (self->_co_cached == NULL) { + self->_co_cached = PyMem_New(_PyCoCached, 1); + if (self->_co_cached == NULL) { + PyErr_NoMemory(); + return -1; + } + self->_co_cached->_co_code = NULL; + self->_co_cached->_co_cellvars = NULL; + self->_co_cached->_co_freevars = NULL; + self->_co_cached->_co_varnames = NULL; + } + return 0; +} /****************** * _PyCode_New() ******************/ @@ -160,8 +175,7 @@ void _Py_set_localsplus_info(int offset, PyObject *name, _PyLocals_Kind kind, PyObject *names, PyObject *kinds) { - Py_INCREF(name); - PyTuple_SET_ITEM(names, offset, name); + PyTuple_SET_ITEM(names, offset, Py_NewRef(name)); _PyLocals_SetKind(kinds, offset, kind); } @@ -220,8 +234,7 @@ get_localsplus_names(PyCodeObject *co, _PyLocals_Kind kind, int num) } assert(index < num); PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, offset); - Py_INCREF(name); - PyTuple_SET_ITEM(names, index, name); + PyTuple_SET_ITEM(names, index, Py_NewRef(name)); index += 1; } assert(index == num); @@ -286,6 +299,8 @@ _PyCode_Validate(struct _PyCodeConstructor *con) return 0; } +extern void _PyCode_Quicken(PyCodeObject *code); + static void init_code(PyCodeObject *co, struct _PyCodeConstructor *con) { @@ -294,27 +309,19 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) get_localsplus_counts(con->localsplusnames, con->localspluskinds, &nlocals, &nplaincellvars, &ncellvars, &nfreevars); - Py_INCREF(con->filename); - co->co_filename = con->filename; - Py_INCREF(con->name); - co->co_name = con->name; - Py_INCREF(con->qualname); - co->co_qualname = con->qualname; + co->co_filename = Py_NewRef(con->filename); + co->co_name = Py_NewRef(con->name); + co->co_qualname = Py_NewRef(con->qualname); co->co_flags = con->flags; co->co_firstlineno = con->firstlineno; - Py_INCREF(con->linetable); - co->co_linetable = con->linetable; + co->co_linetable = Py_NewRef(con->linetable); - Py_INCREF(con->consts); - co->co_consts = con->consts; - Py_INCREF(con->names); - co->co_names = con->names; + co->co_consts = Py_NewRef(con->consts); + co->co_names = Py_NewRef(con->names); - Py_INCREF(con->localsplusnames); - co->co_localsplusnames = con->localsplusnames; - Py_INCREF(con->localspluskinds); - co->co_localspluskinds = con->localspluskinds; + co->co_localsplusnames = Py_NewRef(con->localsplusnames); + co->co_localspluskinds = Py_NewRef(con->localspluskinds); co->co_argcount = con->argcount; co->co_posonlyargcount = con->posonlyargcount; @@ -322,8 +329,7 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) co->co_stacksize = con->stacksize; - Py_INCREF(con->exceptiontable); - co->co_exceptiontable = con->exceptiontable; + co->co_exceptiontable = Py_NewRef(con->exceptiontable); /* derived values */ co->co_nlocalsplus = nlocalsplus; @@ -336,9 +342,8 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) /* not set */ co->co_weakreflist = NULL; co->co_extra = NULL; - co->_co_code = NULL; + co->_co_cached = NULL; - co->co_warmup = QUICKENING_INITIAL_WARMUP_VALUE; co->_co_linearray_entry_size = 0; co->_co_linearray = NULL; memcpy(_PyCode_CODE(co), PyBytes_AS_STRING(con->code), @@ -349,6 +354,7 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) entry_point++; } co->_co_firsttraceable = entry_point; + _PyCode_Quicken(co); } static int @@ -1127,8 +1133,7 @@ lineiter_next(lineiterator *li) start = PyLong_FromLong(bounds->ar_start); end = PyLong_FromLong(bounds->ar_end); if (bounds->ar_line < 0) { - Py_INCREF(Py_None); - line = Py_None; + line = Py_NewRef(Py_None); } else { line = PyLong_FromLong(bounds->ar_line); @@ -1198,8 +1203,7 @@ new_linesiterator(PyCodeObject *code) if (li == NULL) { return NULL; } - Py_INCREF(code); - li->li_code = code; + li->li_code = (PyCodeObject*)Py_NewRef(code); _PyCode_InitAddressRange(code, &li->li_line); return li; } @@ -1298,8 +1302,7 @@ code_positionsiterator(PyCodeObject* code, PyObject* Py_UNUSED(args)) if (pi == NULL) { return NULL; } - Py_INCREF(code); - pi->pi_code = code; + pi->pi_code = (PyCodeObject*)Py_NewRef(code); _PyCode_InitAddressRange(code, &pi->pi_range); pi->pi_offset = pi->pi_range.ar_end; return (PyObject*)pi; @@ -1384,10 +1387,31 @@ _PyCode_SetExtra(PyObject *code, Py_ssize_t index, void *extra) * other PyCodeObject accessor functions ******************/ +static PyObject * +get_cached_locals(PyCodeObject *co, PyObject **cached_field, + _PyLocals_Kind kind, int num) +{ + assert(cached_field != NULL); + assert(co->_co_cached != NULL); + if (*cached_field != NULL) { + return Py_NewRef(*cached_field); + } + assert(*cached_field == NULL); + PyObject *varnames = get_localsplus_names(co, kind, num); + if (varnames == NULL) { + return NULL; + } + *cached_field = Py_NewRef(varnames); + return varnames; +} + PyObject * _PyCode_GetVarnames(PyCodeObject *co) { - return get_localsplus_names(co, CO_FAST_LOCAL, co->co_nlocals); + if (init_co_cached(co)) { + return NULL; + } + return get_cached_locals(co, &co->_co_cached->_co_varnames, CO_FAST_LOCAL, co->co_nlocals); } PyObject * @@ -1399,7 +1423,10 @@ PyCode_GetVarnames(PyCodeObject *code) PyObject * _PyCode_GetCellvars(PyCodeObject *co) { - return get_localsplus_names(co, CO_FAST_CELL, co->co_ncellvars); + if (init_co_cached(co)) { + return NULL; + } + return get_cached_locals(co, &co->_co_cached->_co_cellvars, CO_FAST_CELL, co->co_ncellvars); } PyObject * @@ -1411,7 +1438,10 @@ PyCode_GetCellvars(PyCodeObject *code) PyObject * _PyCode_GetFreevars(PyCodeObject *co) { - return get_localsplus_names(co, CO_FAST_FREE, co->co_nfreevars); + if (init_co_cached(co)) { + return NULL; + } + return get_cached_locals(co, &co->_co_cached->_co_freevars, CO_FAST_FREE, co->co_nfreevars); } PyObject * @@ -1437,8 +1467,11 @@ deopt_code(_Py_CODEUNIT *instructions, Py_ssize_t len) PyObject * _PyCode_GetCode(PyCodeObject *co) { - if (co->_co_code != NULL) { - return Py_NewRef(co->_co_code); + if (init_co_cached(co)) { + return NULL; + } + if (co->_co_cached->_co_code != NULL) { + return Py_NewRef(co->_co_cached->_co_code); } PyObject *code = PyBytes_FromStringAndSize((const char *)_PyCode_CODE(co), _PyCode_NBYTES(co)); @@ -1446,8 +1479,8 @@ _PyCode_GetCode(PyCodeObject *co) return NULL; } deopt_code((_Py_CODEUNIT *)PyBytes_AS_STRING(code), Py_SIZE(co)); - assert(co->_co_code == NULL); - co->_co_code = Py_NewRef(code); + assert(co->_co_cached->_co_code == NULL); + co->_co_cached->_co_code = Py_NewRef(code); return code; } @@ -1606,16 +1639,19 @@ code_dealloc(PyCodeObject *co) Py_XDECREF(co->co_qualname); Py_XDECREF(co->co_linetable); Py_XDECREF(co->co_exceptiontable); - Py_XDECREF(co->_co_code); + if (co->_co_cached != NULL) { + Py_XDECREF(co->_co_cached->_co_code); + Py_XDECREF(co->_co_cached->_co_cellvars); + Py_XDECREF(co->_co_cached->_co_freevars); + Py_XDECREF(co->_co_cached->_co_varnames); + PyMem_Free(co->_co_cached); + } if (co->co_weakreflist != NULL) { PyObject_ClearWeakRefs((PyObject*)co); } if (co->_co_linearray) { PyMem_Free(co->_co_linearray); } - if (co->co_warmup == 0) { - _Py_QuickenedCount--; - } PyObject_Free(co); } @@ -1727,8 +1763,7 @@ code_richcompare(PyObject *self, PyObject *other, int op) res = Py_False; done: - Py_INCREF(res); - return res; + return Py_NewRef(res); } static Py_hash_t @@ -1980,8 +2015,7 @@ code__varname_from_oparg_impl(PyCodeObject *self, int oparg) if (name == NULL) { return NULL; } - Py_INCREF(name); - return name; + return Py_NewRef(name); } /* XXX code objects need to participate in GC? */ @@ -2056,8 +2090,7 @@ _PyCode_ConstantKey(PyObject *op) { /* Objects of these types are always different from object of other * type and from tuples. */ - Py_INCREF(op); - key = op; + key = Py_NewRef(op); } else if (PyBool_Check(op) || PyBytes_CheckExact(op)) { /* Make booleans different from integers 0 and 1. @@ -2173,15 +2206,18 @@ _PyCode_ConstantKey(PyObject *op) } void -_PyStaticCode_Dealloc(PyCodeObject *co) +_PyStaticCode_Fini(PyCodeObject *co) { - if (co->co_warmup == 0) { - _Py_QuickenedCount--; - } deopt_code(_PyCode_CODE(co), Py_SIZE(co)); - co->co_warmup = QUICKENING_INITIAL_WARMUP_VALUE; PyMem_Free(co->co_extra); - Py_CLEAR(co->_co_code); + if (co->_co_cached != NULL) { + Py_CLEAR(co->_co_cached->_co_code); + Py_CLEAR(co->_co_cached->_co_cellvars); + Py_CLEAR(co->_co_cached->_co_freevars); + Py_CLEAR(co->_co_cached->_co_varnames); + PyMem_Free(co->_co_cached); + co->_co_cached = NULL; + } co->co_extra = NULL; if (co->co_weakreflist != NULL) { PyObject_ClearWeakRefs((PyObject *)co); @@ -2194,7 +2230,7 @@ _PyStaticCode_Dealloc(PyCodeObject *co) } int -_PyStaticCode_InternStrings(PyCodeObject *co) +_PyStaticCode_Init(PyCodeObject *co) { int res = intern_strings(co->co_names); if (res < 0) { @@ -2208,5 +2244,81 @@ _PyStaticCode_InternStrings(PyCodeObject *co) if (res < 0) { return -1; } + _PyCode_Quicken(co); return 0; } + +#define MAX_CODE_UNITS_PER_LOC_ENTRY 8 + +PyCodeObject * +_Py_MakeShimCode(const _PyShimCodeDef *codedef) +{ + PyObject *name = NULL; + PyObject *co_code = NULL; + PyObject *lines = NULL; + PyCodeObject *codeobj = NULL; + uint8_t *loc_table = NULL; + + name = _PyUnicode_FromASCII(codedef->cname, strlen(codedef->cname)); + if (name == NULL) { + goto cleanup; + } + co_code = PyBytes_FromStringAndSize( + (const char *)codedef->code, codedef->codelen); + if (co_code == NULL) { + goto cleanup; + } + int code_units = codedef->codelen / sizeof(_Py_CODEUNIT); + int loc_entries = (code_units + MAX_CODE_UNITS_PER_LOC_ENTRY - 1) / + MAX_CODE_UNITS_PER_LOC_ENTRY; + loc_table = PyMem_Malloc(loc_entries); + if (loc_table == NULL) { + PyErr_NoMemory(); + goto cleanup; + } + for (int i = 0; i < loc_entries-1; i++) { + loc_table[i] = 0x80 | (PY_CODE_LOCATION_INFO_NONE << 3) | 7; + code_units -= MAX_CODE_UNITS_PER_LOC_ENTRY; + } + assert(loc_entries > 0); + assert(code_units > 0 && code_units <= MAX_CODE_UNITS_PER_LOC_ENTRY); + loc_table[loc_entries-1] = 0x80 | + (PY_CODE_LOCATION_INFO_NONE << 3) | (code_units-1); + lines = PyBytes_FromStringAndSize((const char *)loc_table, loc_entries); + PyMem_Free(loc_table); + if (lines == NULL) { + goto cleanup; + } + _Py_DECLARE_STR(shim_name, ""); + struct _PyCodeConstructor con = { + .filename = &_Py_STR(shim_name), + .name = name, + .qualname = name, + .flags = CO_NEWLOCALS | CO_OPTIMIZED, + + .code = co_code, + .firstlineno = 1, + .linetable = lines, + + .consts = (PyObject *)&_Py_SINGLETON(tuple_empty), + .names = (PyObject *)&_Py_SINGLETON(tuple_empty), + + .localsplusnames = (PyObject *)&_Py_SINGLETON(tuple_empty), + .localspluskinds = (PyObject *)&_Py_SINGLETON(bytes_empty), + + .argcount = 0, + .posonlyargcount = 0, + .kwonlyargcount = 0, + + .stacksize = codedef->stacksize, + + .exceptiontable = (PyObject *)&_Py_SINGLETON(bytes_empty), + }; + + codeobj = _PyCode_New(&con); +cleanup: + Py_XDECREF(name); + Py_XDECREF(co_code); + Py_XDECREF(lines); + return codeobj; +} diff --git a/Objects/complexobject.c b/Objects/complexobject.c index 9bd68d50c30ae0..aee03ddfb07aec 100644 --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -449,8 +449,7 @@ to_complex(PyObject **pobj, Py_complex *pc) pc->real = PyFloat_AsDouble(obj); return 0; } - Py_INCREF(Py_NotImplemented); - *pobj = Py_NotImplemented; + *pobj = Py_NewRef(Py_NotImplemented); return -1; } @@ -553,8 +552,7 @@ static PyObject * complex_pos(PyComplexObject *v) { if (PyComplex_CheckExact(v)) { - Py_INCREF(v); - return (PyObject *)v; + return Py_NewRef(v); } else return PyComplex_FromCComplex(v->cval); @@ -631,8 +629,7 @@ complex_richcompare(PyObject *v, PyObject *w, int op) else res = Py_False; - Py_INCREF(res); - return res; + return Py_NewRef(res); Unimplemented: Py_RETURN_NOTIMPLEMENTED; @@ -705,8 +702,7 @@ complex___complex___impl(PyComplexObject *self) /*[clinic end generated code: output=e6b35ba3d275dc9c input=3589ada9d27db854]*/ { if (PyComplex_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { return PyComplex_FromCComplex(self->cval); @@ -917,8 +913,7 @@ complex_new_impl(PyTypeObject *type, PyObject *r, PyObject *i) to exact complexes here. If either the input or the output is a complex subclass, it will be handled below as a non-orthogonal vector. */ - Py_INCREF(r); - return r; + return Py_NewRef(r); } if (PyUnicode_Check(r)) { if (i != NULL) { diff --git a/Objects/descrobject.c b/Objects/descrobject.c index a2974f91aaaec3..c545b90c6283e1 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -17,7 +17,7 @@ class property "propertyobject *" "&PyProperty_Type" // see pycore_object.h #if defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE) #include -EM_JS(PyObject*, descr_set_trampoline_call, (setter set, PyObject *obj, PyObject *value, void *closure), { +EM_JS(int, descr_set_trampoline_call, (setter set, PyObject *obj, PyObject *value, void *closure), { return wasmTable.get(set)(obj, value, closure); }); @@ -622,8 +622,7 @@ descr_get_qualname(PyDescrObject *descr, void *Py_UNUSED(ignored)) { if (descr->d_qualname == NULL) descr->d_qualname = calculate_qualname(descr); - Py_XINCREF(descr->d_qualname); - return descr->d_qualname; + return Py_XNewRef(descr->d_qualname); } static PyObject * @@ -904,12 +903,10 @@ descr_new(PyTypeObject *descrtype, PyTypeObject *type, const char *name) descr = (PyDescrObject *)PyType_GenericAlloc(descrtype, 0); if (descr != NULL) { - Py_XINCREF(type); - descr->d_type = type; + descr->d_type = (PyTypeObject*)Py_XNewRef(type); descr->d_name = PyUnicode_InternFromString(name); if (descr->d_name == NULL) { - Py_DECREF(descr); - descr = NULL; + Py_SETREF(descr, NULL); } else { descr->d_qualname = NULL; @@ -1244,8 +1241,7 @@ mappingproxy_new_impl(PyTypeObject *type, PyObject *mapping) mappingproxy = PyObject_GC_New(mappingproxyobject, &PyDictProxy_Type); if (mappingproxy == NULL) return NULL; - Py_INCREF(mapping); - mappingproxy->mapping = mapping; + mappingproxy->mapping = Py_NewRef(mapping); _PyObject_GC_TRACK(mappingproxy); return (PyObject *)mappingproxy; } @@ -1260,8 +1256,7 @@ PyDictProxy_New(PyObject *mapping) pp = PyObject_GC_New(mappingproxyobject, &PyDictProxy_Type); if (pp != NULL) { - Py_INCREF(mapping); - pp->mapping = mapping; + pp->mapping = Py_NewRef(mapping); _PyObject_GC_TRACK(pp); } return (PyObject *)pp; @@ -1361,8 +1356,7 @@ wrapper_objclass(wrapperobject *wp, void *Py_UNUSED(ignored)) { PyObject *c = (PyObject *)PyDescr_TYPE(wp->descr); - Py_INCREF(c); - return c; + return Py_NewRef(c); } static PyObject * @@ -1466,10 +1460,8 @@ PyWrapper_New(PyObject *d, PyObject *self) wp = PyObject_GC_New(wrapperobject, &_PyMethodWrapper_Type); if (wp != NULL) { - Py_INCREF(descr); - wp->descr = descr; - Py_INCREF(self); - wp->self = self; + wp->descr = (PyWrapperDescrObject*)Py_NewRef(descr); + wp->self = Py_NewRef(self); _PyObject_GC_TRACK(wp); } return (PyObject *)wp; @@ -1566,8 +1558,7 @@ property_set_name(PyObject *self, PyObject *args) { propertyobject *prop = (propertyobject *)self; PyObject *name = PyTuple_GET_ITEM(args, 1); - Py_XINCREF(name); - Py_XSETREF(prop->prop_name, name); + Py_XSETREF(prop->prop_name, Py_XNewRef(name)); Py_RETURN_NONE; } @@ -1599,8 +1590,7 @@ static PyObject * property_descr_get(PyObject *self, PyObject *obj, PyObject *type) { if (obj == NULL || obj == Py_None) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } propertyobject *gs = (propertyobject *)self; @@ -1722,8 +1712,7 @@ property_copy(PyObject *old, PyObject *get, PyObject *set, PyObject *del) if (new == NULL) return NULL; - Py_XINCREF(pold->prop_name); - Py_XSETREF(((propertyobject *) new)->prop_name, pold->prop_name); + Py_XSETREF(((propertyobject *) new)->prop_name, Py_XNewRef(pold->prop_name)); return new; } @@ -1776,13 +1765,9 @@ property_init_impl(propertyobject *self, PyObject *fget, PyObject *fset, if (fdel == Py_None) fdel = NULL; - Py_XINCREF(fget); - Py_XINCREF(fset); - Py_XINCREF(fdel); - - Py_XSETREF(self->prop_get, fget); - Py_XSETREF(self->prop_set, fset); - Py_XSETREF(self->prop_del, fdel); + Py_XSETREF(self->prop_get, Py_XNewRef(fget)); + Py_XSETREF(self->prop_set, Py_XNewRef(fset)); + Py_XSETREF(self->prop_del, Py_XNewRef(fdel)); Py_XSETREF(self->prop_doc, NULL); Py_XSETREF(self->prop_name, NULL); @@ -1790,8 +1775,7 @@ property_init_impl(propertyobject *self, PyObject *fget, PyObject *fset, PyObject *prop_doc = NULL; if (doc != NULL && doc != Py_None) { - prop_doc = doc; - Py_XINCREF(prop_doc); + prop_doc = Py_XNewRef(doc); } /* if no docstring given and the getter has one, use that one */ else if (fget != NULL) { @@ -1820,8 +1804,7 @@ property_init_impl(propertyobject *self, PyObject *fget, PyObject *fset, class's dict. */ if (prop_doc == NULL) { - prop_doc = Py_None; - Py_INCREF(prop_doc); + prop_doc = Py_NewRef(Py_None); } int err = PyObject_SetAttr( (PyObject *)self, &_Py_ID(__doc__), prop_doc); diff --git a/Objects/dictobject.c b/Objects/dictobject.c index fecdfa86370193..c58d07b51bd89a 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -236,11 +236,6 @@ static int dictresize(PyDictObject *mp, uint8_t log_newsize, int unicode); static PyObject* dict_iter(PyDictObject *dict); -/*Global counter used to set ma_version_tag field of dictionary. - * It is incremented each time that a dictionary is created and each - * time that a dictionary is modified. */ -uint64_t _pydict_global_version = 0; - #include "clinic/dictobject.c.h" @@ -1200,7 +1195,6 @@ insert_into_dictkeys(PyDictKeysObject *keys, PyObject *name) if (keys->dk_usable <= 0) { return DKIX_EMPTY; } - Py_INCREF(name); /* Insert into new slot. */ keys->dk_version = 0; Py_ssize_t hashpos = find_empty_slot(keys, hash); @@ -1208,7 +1202,7 @@ insert_into_dictkeys(PyDictKeysObject *keys, PyObject *name) PyDictUnicodeEntry *ep = &DK_UNICODE_ENTRIES(keys)[ix]; dictkeys_set_index(keys, hashpos, ix); assert(ep->me_key == NULL); - ep->me_key = name; + ep->me_key = Py_NewRef(name); keys->dk_usable--; keys->dk_nentries++; } @@ -1240,6 +1234,7 @@ insertdict(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject *value) MAINTAIN_TRACKING(mp, key, value); if (ix == DKIX_EMPTY) { + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_ADDED, mp, key, value); /* Insert into new slot. */ mp->ma_keys->dk_version = 0; assert(old_value == NULL); @@ -1274,7 +1269,7 @@ insertdict(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject *value) ep->me_value = value; } mp->ma_used++; - mp->ma_version_tag = DICT_NEXT_VERSION(); + mp->ma_version_tag = new_version; mp->ma_keys->dk_usable--; mp->ma_keys->dk_nentries++; assert(mp->ma_keys->dk_usable >= 0); @@ -1283,6 +1278,7 @@ insertdict(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject *value) } if (old_value != value) { + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, mp, key, value); if (_PyDict_HasSplitTable(mp)) { mp->ma_values->values[ix] = value; if (old_value == NULL) { @@ -1299,7 +1295,7 @@ insertdict(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject *value) DK_ENTRIES(mp->ma_keys)[ix].me_value = value; } } - mp->ma_version_tag = DICT_NEXT_VERSION(); + mp->ma_version_tag = new_version; } Py_XDECREF(old_value); /* which **CAN** re-enter (see issue #22653) */ ASSERT_CONSISTENT(mp); @@ -1320,6 +1316,8 @@ insert_to_emptydict(PyDictObject *mp, PyObject *key, Py_hash_t hash, { assert(mp->ma_keys == Py_EMPTY_KEYS); + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_ADDED, mp, key, value); + int unicode = PyUnicode_CheckExact(key); PyDictKeysObject *newkeys = new_keys_object(PyDict_LOG_MINSIZE, unicode); if (newkeys == NULL) { @@ -1347,7 +1345,7 @@ insert_to_emptydict(PyDictObject *mp, PyObject *key, Py_hash_t hash, ep->me_value = value; } mp->ma_used++; - mp->ma_version_tag = DICT_NEXT_VERSION(); + mp->ma_version_tag = new_version; mp->ma_keys->dk_usable--; mp->ma_keys->dk_nentries++; return 0; @@ -1449,8 +1447,7 @@ dictresize(PyDictObject *mp, uint8_t log2_newsize, int unicode) int index = get_index_from_order(mp, i); PyDictUnicodeEntry *ep = &oldentries[index]; assert(oldvalues->values[index] != NULL); - Py_INCREF(ep->me_key); - newentries[i].me_key = ep->me_key; + newentries[i].me_key = Py_NewRef(ep->me_key); newentries[i].me_hash = unicode_get_hash(ep->me_key); newentries[i].me_value = oldvalues->values[index]; } @@ -1463,8 +1460,7 @@ dictresize(PyDictObject *mp, uint8_t log2_newsize, int unicode) int index = get_index_from_order(mp, i); PyDictUnicodeEntry *ep = &oldentries[index]; assert(oldvalues->values[index] != NULL); - Py_INCREF(ep->me_key); - newentries[i].me_key = ep->me_key; + newentries[i].me_key = Py_NewRef(ep->me_key); newentries[i].me_value = oldvalues->values[index]; } build_indices_unicode(mp->ma_keys, newentries, numentries); @@ -1863,9 +1859,8 @@ PyDict_SetItem(PyObject *op, PyObject *key, PyObject *value) } assert(key); assert(value); - Py_INCREF(key); - Py_INCREF(value); - return _PyDict_SetItem_Take2((PyDictObject *)op, key, value); + return _PyDict_SetItem_Take2((PyDictObject *)op, + Py_NewRef(key), Py_NewRef(value)); } int @@ -1883,13 +1878,11 @@ _PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value, assert(hash != -1); mp = (PyDictObject *)op; - Py_INCREF(key); - Py_INCREF(value); if (mp->ma_keys == Py_EMPTY_KEYS) { - return insert_to_emptydict(mp, key, hash, value); + return insert_to_emptydict(mp, Py_NewRef(key), hash, Py_NewRef(value)); } /* insertdict() handles any resizing that might be necessary */ - return insertdict(mp, key, hash, value); + return insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value)); } static void @@ -1910,7 +1903,7 @@ delete_index_from_values(PyDictValues *values, Py_ssize_t ix) static int delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix, - PyObject *old_value) + PyObject *old_value, uint64_t new_version) { PyObject *old_key; @@ -1918,7 +1911,7 @@ delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix, assert(hashpos >= 0); mp->ma_used--; - mp->ma_version_tag = DICT_NEXT_VERSION(); + mp->ma_version_tag = new_version; if (mp->ma_values) { assert(old_value == mp->ma_values->values[ix]); mp->ma_values->values[ix] = NULL; @@ -1987,7 +1980,8 @@ _PyDict_DelItem_KnownHash(PyObject *op, PyObject *key, Py_hash_t hash) return -1; } - return delitem_common(mp, hash, ix, old_value); + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_DELETED, mp, key, NULL); + return delitem_common(mp, hash, ix, old_value, new_version); } /* This function promises that the predicate -> deletion sequence is atomic @@ -2028,10 +2022,12 @@ _PyDict_DelItemIf(PyObject *op, PyObject *key, hashpos = lookdict_index(mp->ma_keys, hash, ix); assert(hashpos >= 0); - if (res > 0) - return delitem_common(mp, hashpos, ix, old_value); - else + if (res > 0) { + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_DELETED, mp, key, NULL); + return delitem_common(mp, hashpos, ix, old_value, new_version); + } else { return 0; + } } @@ -2052,11 +2048,12 @@ PyDict_Clear(PyObject *op) return; } /* Empty the dict... */ + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_CLEARED, mp, NULL, NULL); dictkeys_incref(Py_EMPTY_KEYS); mp->ma_keys = Py_EMPTY_KEYS; mp->ma_values = NULL; mp->ma_used = 0; - mp->ma_version_tag = DICT_NEXT_VERSION(); + mp->ma_version_tag = new_version; /* ...then clear the keys and values */ if (oldvalues != NULL) { n = oldkeys->dk_nentries; @@ -2177,8 +2174,7 @@ _PyDict_Pop_KnownHash(PyObject *dict, PyObject *key, Py_hash_t hash, PyObject *d if (mp->ma_used == 0) { if (deflt) { - Py_INCREF(deflt); - return deflt; + return Py_NewRef(deflt); } _PyErr_SetKeyError(key); return NULL; @@ -2188,15 +2184,14 @@ _PyDict_Pop_KnownHash(PyObject *dict, PyObject *key, Py_hash_t hash, PyObject *d return NULL; if (ix == DKIX_EMPTY || old_value == NULL) { if (deflt) { - Py_INCREF(deflt); - return deflt; + return Py_NewRef(deflt); } _PyErr_SetKeyError(key); return NULL; } assert(old_value != NULL); - Py_INCREF(old_value); - delitem_common(mp, hash, ix, old_value); + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_DELETED, mp, key, NULL); + delitem_common(mp, hash, ix, Py_NewRef(old_value), new_version); ASSERT_CONSISTENT(mp); return old_value; @@ -2209,8 +2204,7 @@ _PyDict_Pop(PyObject *dict, PyObject *key, PyObject *deflt) if (((PyDictObject *)dict)->ma_used == 0) { if (deflt) { - Py_INCREF(deflt); - return deflt; + return Py_NewRef(deflt); } _PyErr_SetKeyError(key); return NULL; @@ -2251,9 +2245,7 @@ _PyDict_FromKeys(PyObject *cls, PyObject *iterable, PyObject *value) } while (_PyDict_Next(iterable, &pos, &key, &oldvalue, &hash)) { - Py_INCREF(key); - Py_INCREF(value); - if (insertdict(mp, key, hash, value)) { + if (insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value))) { Py_DECREF(d); return NULL; } @@ -2272,9 +2264,7 @@ _PyDict_FromKeys(PyObject *cls, PyObject *iterable, PyObject *value) } while (_PySet_NextEntry(iterable, &pos, &key, &hash)) { - Py_INCREF(key); - Py_INCREF(value); - if (insertdict(mp, key, hash, value)) { + if (insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value))) { Py_DECREF(d); return NULL; } @@ -2321,6 +2311,7 @@ _PyDict_FromKeys(PyObject *cls, PyObject *iterable, PyObject *value) static void dict_dealloc(PyDictObject *mp) { + _PyDict_NotifyEvent(PyDict_EVENT_DEALLOCATED, mp, NULL, NULL); PyDictValues *values = mp->ma_values; PyDictKeysObject *keys = mp->ma_keys; Py_ssize_t i, n; @@ -2479,8 +2470,7 @@ dict_subscript(PyDictObject *mp, PyObject *key) _PyErr_SetKeyError(key); return NULL; } - Py_INCREF(value); - return value; + return Py_NewRef(value); } static int @@ -2522,8 +2512,7 @@ dict_keys(PyDictObject *mp) PyObject *key; while (_PyDict_Next((PyObject*)mp, &pos, &key, NULL, NULL)) { assert(j < n); - Py_INCREF(key); - PyList_SET_ITEM(v, j, key); + PyList_SET_ITEM(v, j, Py_NewRef(key)); j++; } assert(j == n); @@ -2554,8 +2543,7 @@ dict_values(PyDictObject *mp) PyObject *value; while (_PyDict_Next((PyObject*)mp, &pos, NULL, &value, NULL)) { assert(j < n); - Py_INCREF(value); - PyList_SET_ITEM(v, j, value); + PyList_SET_ITEM(v, j, Py_NewRef(value)); j++; } assert(j == n); @@ -2600,10 +2588,8 @@ dict_items(PyDictObject *mp) while (_PyDict_Next((PyObject*)mp, &pos, &key, &value, NULL)) { assert(j < n); PyObject *item = PyList_GET_ITEM(v, j); - Py_INCREF(key); - PyTuple_SET_ITEM(item, 0, key); - Py_INCREF(value); - PyTuple_SET_ITEM(item, 1, value); + PyTuple_SET_ITEM(item, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(item, 1, Py_NewRef(value)); j++; } assert(j == n); @@ -2809,6 +2795,7 @@ dict_merge(PyObject *a, PyObject *b, int override) other->ma_used == okeys->dk_nentries && (DK_LOG_SIZE(okeys) == PyDict_LOG_MINSIZE || USABLE_FRACTION(DK_SIZE(okeys)/2) < other->ma_used)) { + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_CLONED, mp, b, NULL); PyDictKeysObject *keys = clone_combined_dict_keys(other); if (keys == NULL) { return -1; @@ -2822,7 +2809,7 @@ dict_merge(PyObject *a, PyObject *b, int override) } mp->ma_used = other->ma_used; - mp->ma_version_tag = DICT_NEXT_VERSION(); + mp->ma_version_tag = new_version; ASSERT_CONSISTENT(mp); if (_PyObject_GC_IS_TRACKED(other) && !_PyObject_GC_IS_TRACKED(mp)) { @@ -2854,16 +2841,12 @@ dict_merge(PyObject *a, PyObject *b, int override) Py_INCREF(key); Py_INCREF(value); if (override == 1) { - Py_INCREF(key); - Py_INCREF(value); - err = insertdict(mp, key, hash, value); + err = insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value)); } else { err = _PyDict_Contains_KnownHash(a, key, hash); if (err == 0) { - Py_INCREF(key); - Py_INCREF(value); - err = insertdict(mp, key, hash, value); + err = insertdict(mp, Py_NewRef(key), hash, Py_NewRef(value)); } else if (err > 0) { if (override != 0) { @@ -3010,8 +2993,7 @@ PyDict_Copy(PyObject *o) dictkeys_incref(mp->ma_keys); for (i = 0, n = size; i < n; i++) { PyObject *value = mp->ma_values->values[i]; - Py_XINCREF(value); - split_copy->ma_values->values[i] = value; + split_copy->ma_values->values[i] = Py_XNewRef(value); } if (_PyObject_GC_IS_TRACKED(mp)) _PyObject_GC_TRACK(split_copy); @@ -3186,8 +3168,7 @@ dict_richcompare(PyObject *v, PyObject *w, int op) } else res = Py_NotImplemented; - Py_INCREF(res); - return res; + return Py_NewRef(res); } /*[clinic input] @@ -3252,8 +3233,7 @@ dict_get_impl(PyDictObject *self, PyObject *key, PyObject *default_value) if (ix == DKIX_EMPTY || val == NULL) { val = default_value; } - Py_INCREF(val); - return val; + return Py_NewRef(val); } PyObject * @@ -3275,9 +3255,8 @@ PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *defaultobj) } if (mp->ma_keys == Py_EMPTY_KEYS) { - Py_INCREF(key); - Py_INCREF(defaultobj); - if (insert_to_emptydict(mp, key, hash, defaultobj) < 0) { + if (insert_to_emptydict(mp, Py_NewRef(key), hash, + Py_NewRef(defaultobj)) < 0) { return NULL; } return defaultobj; @@ -3294,6 +3273,7 @@ PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *defaultobj) return NULL; if (ix == DKIX_EMPTY) { + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_ADDED, mp, key, defaultobj); mp->ma_keys->dk_version = 0; value = defaultobj; if (mp->ma_keys->dk_usable <= 0) { @@ -3306,43 +3286,41 @@ PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *defaultobj) if (DK_IS_UNICODE(mp->ma_keys)) { assert(PyUnicode_CheckExact(key)); PyDictUnicodeEntry *ep = &DK_UNICODE_ENTRIES(mp->ma_keys)[mp->ma_keys->dk_nentries]; - ep->me_key = key; + ep->me_key = Py_NewRef(key); if (_PyDict_HasSplitTable(mp)) { Py_ssize_t index = (int)mp->ma_keys->dk_nentries; assert(index < SHARED_KEYS_MAX_SIZE); assert(mp->ma_values->values[index] == NULL); - mp->ma_values->values[index] = value; + mp->ma_values->values[index] = Py_NewRef(value); _PyDictValues_AddToInsertionOrder(mp->ma_values, index); } else { - ep->me_value = value; + ep->me_value = Py_NewRef(value); } } else { PyDictKeyEntry *ep = &DK_ENTRIES(mp->ma_keys)[mp->ma_keys->dk_nentries]; - ep->me_key = key; + ep->me_key = Py_NewRef(key); ep->me_hash = hash; - ep->me_value = value; + ep->me_value = Py_NewRef(value); } - Py_INCREF(key); - Py_INCREF(value); MAINTAIN_TRACKING(mp, key, value); mp->ma_used++; - mp->ma_version_tag = DICT_NEXT_VERSION(); + mp->ma_version_tag = new_version; mp->ma_keys->dk_usable--; mp->ma_keys->dk_nentries++; assert(mp->ma_keys->dk_usable >= 0); } else if (value == NULL) { + uint64_t new_version = _PyDict_NotifyEvent(PyDict_EVENT_ADDED, mp, key, defaultobj); value = defaultobj; assert(_PyDict_HasSplitTable(mp)); assert(mp->ma_values->values[ix] == NULL); - Py_INCREF(value); MAINTAIN_TRACKING(mp, key, value); - mp->ma_values->values[ix] = value; + mp->ma_values->values[ix] = Py_NewRef(value); _PyDictValues_AddToInsertionOrder(mp->ma_values, ix); mp->ma_used++; - mp->ma_version_tag = DICT_NEXT_VERSION(); + mp->ma_version_tag = new_version; } ASSERT_CONSISTENT(mp); @@ -3369,8 +3347,7 @@ dict_setdefault_impl(PyDictObject *self, PyObject *key, PyObject *val; val = PyDict_SetDefault((PyObject *)self, key, default_value); - Py_XINCREF(val); - return val; + return Py_XNewRef(val); } static PyObject * @@ -3415,6 +3392,7 @@ dict_popitem_impl(PyDictObject *self) { Py_ssize_t i, j; PyObject *res; + uint64_t new_version; /* Allocate the result tuple before checking the size. Believe it * or not, this allocation could trigger a garbage collection which @@ -3454,6 +3432,7 @@ dict_popitem_impl(PyDictObject *self) assert(i >= 0); key = ep0[i].me_key; + new_version = _PyDict_NotifyEvent(PyDict_EVENT_DELETED, self, key, NULL); hash = unicode_get_hash(key); value = ep0[i].me_value; ep0[i].me_key = NULL; @@ -3468,6 +3447,7 @@ dict_popitem_impl(PyDictObject *self) assert(i >= 0); key = ep0[i].me_key; + new_version = _PyDict_NotifyEvent(PyDict_EVENT_DELETED, self, key, NULL); hash = ep0[i].me_hash; value = ep0[i].me_value; ep0[i].me_key = NULL; @@ -3485,7 +3465,7 @@ dict_popitem_impl(PyDictObject *self) /* We can't dk_usable++ since there is DKIX_DUMMY in indices */ self->ma_keys->dk_nentries = i; self->ma_used--; - self->ma_version_tag = DICT_NEXT_VERSION(); + self->ma_version_tag = new_version; ASSERT_CONSISTENT(self); return res; } @@ -3587,8 +3567,7 @@ dict_ior(PyObject *self, PyObject *other) if (dict_update_arg(self, other)) { return NULL; } - Py_INCREF(self); - return self; + return Py_NewRef(self); } PyDoc_STRVAR(getitem__doc__, @@ -3926,8 +3905,7 @@ dictiter_new(PyDictObject *dict, PyTypeObject *itertype) if (di == NULL) { return NULL; } - Py_INCREF(dict); - di->di_dict = dict; + di->di_dict = (PyDictObject*)Py_NewRef(dict); di->di_used = dict->ma_used; di->len = dict->ma_used; if (itertype == &PyDictRevIterKey_Type || @@ -4061,8 +4039,7 @@ dictiter_iternextkey(dictiterobject *di) } di->di_pos = i+1; di->len--; - Py_INCREF(key); - return key; + return Py_NewRef(key); fail: di->di_dict = NULL; @@ -4161,8 +4138,7 @@ dictiter_iternextvalue(dictiterobject *di) } di->di_pos = i+1; di->len--; - Py_INCREF(value); - return value; + return Py_NewRef(value); fail: di->di_dict = NULL; @@ -4264,14 +4240,12 @@ dictiter_iternextitem(dictiterobject *di) } di->di_pos = i+1; di->len--; - Py_INCREF(key); - Py_INCREF(value); result = di->di_result; if (Py_REFCNT(result) == 1) { PyObject *oldkey = PyTuple_GET_ITEM(result, 0); PyObject *oldvalue = PyTuple_GET_ITEM(result, 1); - PyTuple_SET_ITEM(result, 0, key); /* steals reference */ - PyTuple_SET_ITEM(result, 1, value); /* steals reference */ + PyTuple_SET_ITEM(result, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(value)); Py_INCREF(result); Py_DECREF(oldkey); Py_DECREF(oldvalue); @@ -4285,8 +4259,8 @@ dictiter_iternextitem(dictiterobject *di) result = PyTuple_New(2); if (result == NULL) return NULL; - PyTuple_SET_ITEM(result, 0, key); /* steals reference */ - PyTuple_SET_ITEM(result, 1, value); /* steals reference */ + PyTuple_SET_ITEM(result, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(value)); } return result; @@ -4390,22 +4364,18 @@ dictreviter_iternext(dictiterobject *di) di->len--; if (Py_IS_TYPE(di, &PyDictRevIterKey_Type)) { - Py_INCREF(key); - return key; + return Py_NewRef(key); } else if (Py_IS_TYPE(di, &PyDictRevIterValue_Type)) { - Py_INCREF(value); - return value; + return Py_NewRef(value); } else if (Py_IS_TYPE(di, &PyDictRevIterItem_Type)) { - Py_INCREF(key); - Py_INCREF(value); result = di->di_result; if (Py_REFCNT(result) == 1) { PyObject *oldkey = PyTuple_GET_ITEM(result, 0); PyObject *oldvalue = PyTuple_GET_ITEM(result, 1); - PyTuple_SET_ITEM(result, 0, key); /* steals reference */ - PyTuple_SET_ITEM(result, 1, value); /* steals reference */ + PyTuple_SET_ITEM(result, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(value)); Py_INCREF(result); Py_DECREF(oldkey); Py_DECREF(oldvalue); @@ -4420,8 +4390,8 @@ dictreviter_iternext(dictiterobject *di) if (result == NULL) { return NULL; } - PyTuple_SET_ITEM(result, 0, key); /* steals reference */ - PyTuple_SET_ITEM(result, 1, value); /* steals reference */ + PyTuple_SET_ITEM(result, 0, Py_NewRef(key)); + PyTuple_SET_ITEM(result, 1, Py_NewRef(value)); } return result; } @@ -4468,7 +4438,6 @@ dictiter_reduce(dictiterobject *di, PyObject *Py_UNUSED(ignored)) /* copy the iterator state */ dictiterobject tmp = *di; Py_XINCREF(tmp.di_dict); - PyObject *list = PySequence_List((PyObject*)&tmp); Py_XDECREF(tmp.di_dict); if (list == NULL) { @@ -4550,8 +4519,7 @@ _PyDictView_New(PyObject *dict, PyTypeObject *type) dv = PyObject_GC_New(_PyDictViewObject, type); if (dv == NULL) return NULL; - Py_INCREF(dict); - dv->dv_dict = (PyDictObject *)dict; + dv->dv_dict = (PyDictObject *)Py_NewRef(dict); _PyObject_GC_TRACK(dv); return (PyObject *)dv; } @@ -4662,8 +4630,7 @@ dictview_richcompare(PyObject *self, PyObject *other, int op) if (ok < 0) return NULL; result = ok ? Py_True : Py_False; - Py_INCREF(result); - return result; + return Py_NewRef(result); } static PyObject * @@ -5429,8 +5396,7 @@ _PyObject_StoreInstanceAttribute(PyObject *obj, PyDictValues *values, } } PyObject *old_value = values->values[ix]; - Py_XINCREF(value); - values->values[ix] = value; + values->values[ix] = Py_XNewRef(value); if (old_value == NULL) { if (value == NULL) { PyErr_Format(PyExc_AttributeError, @@ -5492,8 +5458,7 @@ _PyObject_GetInstanceAttribute(PyObject *obj, PyDictValues *values, return NULL; } PyObject *value = values->values[ix]; - Py_XINCREF(value); - return value; + return Py_XNewRef(value); } int @@ -5636,8 +5601,7 @@ PyObject_GenericGetDict(PyObject *obj, void *context) } } } - Py_XINCREF(dict); - return dict; + return Py_XNewRef(dict); } int @@ -5689,17 +5653,107 @@ _PyDictKeys_DecRef(PyDictKeysObject *keys) dictkeys_decref(keys); } -static uint32_t next_dict_keys_version = 2; - uint32_t _PyDictKeys_GetVersionForCurrentState(PyDictKeysObject *dictkeys) { if (dictkeys->dk_version != 0) { return dictkeys->dk_version; } - if (next_dict_keys_version == 0) { + if (_PyRuntime.dict_state.next_keys_version == 0) { return 0; } - uint32_t v = next_dict_keys_version++; + uint32_t v = _PyRuntime.dict_state.next_keys_version++; dictkeys->dk_version = v; return v; } + +static inline int +validate_watcher_id(PyInterpreterState *interp, int watcher_id) +{ + if (watcher_id < 0 || watcher_id >= DICT_MAX_WATCHERS) { + PyErr_Format(PyExc_ValueError, "Invalid dict watcher ID %d", watcher_id); + return -1; + } + if (!interp->dict_state.watchers[watcher_id]) { + PyErr_Format(PyExc_ValueError, "No dict watcher set for ID %d", watcher_id); + return -1; + } + return 0; +} + +int +PyDict_Watch(int watcher_id, PyObject* dict) +{ + if (!PyDict_Check(dict)) { + PyErr_SetString(PyExc_ValueError, "Cannot watch non-dictionary"); + return -1; + } + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (validate_watcher_id(interp, watcher_id)) { + return -1; + } + ((PyDictObject*)dict)->ma_version_tag |= (1LL << watcher_id); + return 0; +} + +int +PyDict_Unwatch(int watcher_id, PyObject* dict) +{ + if (!PyDict_Check(dict)) { + PyErr_SetString(PyExc_ValueError, "Cannot watch non-dictionary"); + return -1; + } + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (validate_watcher_id(interp, watcher_id)) { + return -1; + } + ((PyDictObject*)dict)->ma_version_tag &= ~(1LL << watcher_id); + return 0; +} + +int +PyDict_AddWatcher(PyDict_WatchCallback callback) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + + for (int i = 0; i < DICT_MAX_WATCHERS; i++) { + if (!interp->dict_state.watchers[i]) { + interp->dict_state.watchers[i] = callback; + return i; + } + } + + PyErr_SetString(PyExc_RuntimeError, "no more dict watcher IDs available"); + return -1; +} + +int +PyDict_ClearWatcher(int watcher_id) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (validate_watcher_id(interp, watcher_id)) { + return -1; + } + interp->dict_state.watchers[watcher_id] = NULL; + return 0; +} + +void +_PyDict_SendEvent(int watcher_bits, + PyDict_WatchEvent event, + PyDictObject *mp, + PyObject *key, + PyObject *value) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + for (int i = 0; i < DICT_MAX_WATCHERS; i++) { + if (watcher_bits & 1) { + PyDict_WatchCallback cb = interp->dict_state.watchers[i]; + if (cb && (cb(event, (PyObject*)mp, key, value) < 0)) { + // some dict modification paths (e.g. PyDict_Clear) can't raise, so we + // can't propagate exceptions from dict watchers. + PyErr_WriteUnraisable((PyObject *)mp); + } + } + watcher_bits >>= 1; + } +} diff --git a/Objects/enumobject.c b/Objects/enumobject.c index d84bac6f4c9af2..c9d90584c26b7d 100644 --- a/Objects/enumobject.c +++ b/Objects/enumobject.c @@ -389,8 +389,7 @@ reversed_new_impl(PyTypeObject *type, PyObject *seq) return NULL; ro->index = n-1; - Py_INCREF(seq); - ro->seq = seq; + ro->seq = Py_NewRef(seq); return (PyObject *)ro; } diff --git a/Objects/exceptions.c b/Objects/exceptions.c index 80e98bb4ffa43a..db6f7d52804d6a 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -53,8 +53,7 @@ BaseException_new(PyTypeObject *type, PyObject *args, PyObject *kwds) self->suppress_context = 0; if (args) { - self->args = args; - Py_INCREF(args); + self->args = Py_NewRef(args); return (PyObject *)self; } @@ -73,9 +72,7 @@ BaseException_init(PyBaseExceptionObject *self, PyObject *args, PyObject *kwds) if (!_PyArg_NoKeywords(Py_TYPE(self)->tp_name, kwds)) return -1; - Py_INCREF(args); - Py_XSETREF(self->args, args); - + Py_XSETREF(self->args, Py_NewRef(args)); return 0; } @@ -185,8 +182,7 @@ BaseException_with_traceback(PyObject *self, PyObject *tb) { if (PyException_SetTraceback(self, tb)) return NULL; - Py_INCREF(self); - return self; + return Py_NewRef(self); } PyDoc_STRVAR(with_traceback_doc, @@ -258,8 +254,7 @@ BaseException_get_args(PyBaseExceptionObject *self, void *Py_UNUSED(ignored)) if (self->args == NULL) { Py_RETURN_NONE; } - Py_INCREF(self->args); - return self->args; + return Py_NewRef(self->args); } static int @@ -283,8 +278,7 @@ BaseException_get_tb(PyBaseExceptionObject *self, void *Py_UNUSED(ignored)) if (self->traceback == NULL) { Py_RETURN_NONE; } - Py_INCREF(self->traceback); - return self->traceback; + return Py_NewRef(self->traceback); } static int @@ -300,8 +294,7 @@ BaseException_set_tb(PyBaseExceptionObject *self, PyObject *tb, void *Py_UNUSED( return -1; } - Py_INCREF(tb); - Py_XSETREF(self->traceback, tb); + Py_XSETREF(self->traceback, Py_NewRef(tb)); return 0; } @@ -380,8 +373,7 @@ PyObject * PyException_GetTraceback(PyObject *self) { PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); - Py_XINCREF(base_self->traceback); - return base_self->traceback; + return Py_XNewRef(base_self->traceback); } @@ -395,8 +387,7 @@ PyObject * PyException_GetCause(PyObject *self) { PyObject *cause = _PyBaseExceptionObject_cast(self)->cause; - Py_XINCREF(cause); - return cause; + return Py_XNewRef(cause); } /* Steals a reference to cause */ @@ -412,8 +403,7 @@ PyObject * PyException_GetContext(PyObject *self) { PyObject *context = _PyBaseExceptionObject_cast(self)->context; - Py_XINCREF(context); - return context; + return Py_XNewRef(context); } /* Steals a reference to context */ @@ -579,8 +569,7 @@ StopIteration_init(PyStopIterationObject *self, PyObject *args, PyObject *kwds) value = PyTuple_GET_ITEM(args, 0); else value = Py_None; - Py_INCREF(value); - self->value = value; + self->value = Py_NewRef(value); return 0; } @@ -633,12 +622,10 @@ SystemExit_init(PySystemExitObject *self, PyObject *args, PyObject *kwds) if (size == 0) return 0; if (size == 1) { - Py_INCREF(PyTuple_GET_ITEM(args, 0)); - Py_XSETREF(self->code, PyTuple_GET_ITEM(args, 0)); + Py_XSETREF(self->code, Py_NewRef(PyTuple_GET_ITEM(args, 0))); } else { /* size > 1 */ - Py_INCREF(args); - Py_XSETREF(self->code, args); + Py_XSETREF(self->code, Py_NewRef(args)); } return 0; } @@ -766,7 +753,19 @@ BaseExceptionGroup_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } } else { - /* Do nothing - we don't interfere with subclasses */ + /* user-defined subclass */ + if (nested_base_exceptions) { + int nonbase = PyObject_IsSubclass((PyObject*)cls, PyExc_Exception); + if (nonbase == -1) { + goto error; + } + else if (nonbase == 1) { + PyErr_Format(PyExc_TypeError, + "Cannot nest BaseExceptions in '%.200s'", + cls->tp_name); + goto error; + } + } } if (!cls) { @@ -962,11 +961,11 @@ typedef enum { EXCEPTION_GROUP_MATCH_BY_TYPE = 0, /* A PyFunction returning True for matching exceptions */ EXCEPTION_GROUP_MATCH_BY_PREDICATE = 1, - /* A set of leaf exceptions to include in the result. + /* A set of the IDs of leaf exceptions to include in the result. * This matcher type is used internally by the interpreter * to construct reraised exceptions. */ - EXCEPTION_GROUP_MATCH_INSTANCES = 2 + EXCEPTION_GROUP_MATCH_INSTANCE_IDS = 2 } _exceptiongroup_split_matcher_type; static int @@ -1024,10 +1023,16 @@ exceptiongroup_split_check_match(PyObject *exc, Py_DECREF(exc_matches); return is_true; } - case EXCEPTION_GROUP_MATCH_INSTANCES: { + case EXCEPTION_GROUP_MATCH_INSTANCE_IDS: { assert(PySet_Check(matcher_value)); if (!_PyBaseExceptionGroup_Check(exc)) { - return PySet_Contains(matcher_value, exc); + PyObject *exc_id = PyLong_FromVoidPtr(exc); + if (exc_id == NULL) { + return -1; + } + int res = PySet_Contains(matcher_value, exc_id); + Py_DECREF(exc_id); + return res; } return 0; } @@ -1212,32 +1217,35 @@ BaseExceptionGroup_subgroup(PyObject *self, PyObject *args) } static int -collect_exception_group_leaves(PyObject *exc, PyObject *leaves) +collect_exception_group_leaf_ids(PyObject *exc, PyObject *leaf_ids) { if (Py_IsNone(exc)) { return 0; } assert(PyExceptionInstance_Check(exc)); - assert(PySet_Check(leaves)); + assert(PySet_Check(leaf_ids)); - /* Add all leaf exceptions in exc to the leaves set */ + /* Add IDs of all leaf exceptions in exc to the leaf_ids set */ if (!_PyBaseExceptionGroup_Check(exc)) { - if (PySet_Add(leaves, exc) < 0) { + PyObject *exc_id = PyLong_FromVoidPtr(exc); + if (exc_id == NULL) { return -1; } - return 0; + int res = PySet_Add(leaf_ids, exc_id); + Py_DECREF(exc_id); + return res; } PyBaseExceptionGroupObject *eg = _PyBaseExceptionGroupObject_cast(exc); Py_ssize_t num_excs = PyTuple_GET_SIZE(eg->excs); /* recursive calls */ for (Py_ssize_t i = 0; i < num_excs; i++) { PyObject *e = PyTuple_GET_ITEM(eg->excs, i); - if (_Py_EnterRecursiveCall(" in collect_exception_group_leaves")) { + if (_Py_EnterRecursiveCall(" in collect_exception_group_leaf_ids")) { return -1; } - int res = collect_exception_group_leaves(e, leaves); + int res = collect_exception_group_leaf_ids(e, leaf_ids); _Py_LeaveRecursiveCall(); if (res < 0) { return -1; @@ -1258,8 +1266,8 @@ exception_group_projection(PyObject *eg, PyObject *keep) assert(_PyBaseExceptionGroup_Check(eg)); assert(PyList_CheckExact(keep)); - PyObject *leaves = PySet_New(NULL); - if (!leaves) { + PyObject *leaf_ids = PySet_New(NULL); + if (!leaf_ids) { return NULL; } @@ -1268,8 +1276,8 @@ exception_group_projection(PyObject *eg, PyObject *keep) PyObject *e = PyList_GET_ITEM(keep, i); assert(e != NULL); assert(_PyBaseExceptionGroup_Check(e)); - if (collect_exception_group_leaves(e, leaves) < 0) { - Py_DECREF(leaves); + if (collect_exception_group_leaf_ids(e, leaf_ids) < 0) { + Py_DECREF(leaf_ids); return NULL; } } @@ -1277,9 +1285,9 @@ exception_group_projection(PyObject *eg, PyObject *keep) _exceptiongroup_split_result split_result; bool construct_rest = false; int err = exceptiongroup_split_recursive( - eg, EXCEPTION_GROUP_MATCH_INSTANCES, leaves, + eg, EXCEPTION_GROUP_MATCH_INSTANCE_IDS, leaf_ids, construct_rest, &split_result); - Py_DECREF(leaves); + Py_DECREF(leaf_ids); if (err < 0) { return NULL; } @@ -1464,11 +1472,12 @@ SimpleExtendsException(PyExc_BaseException, KeyboardInterrupt, static int ImportError_init(PyImportErrorObject *self, PyObject *args, PyObject *kwds) { - static char *kwlist[] = {"name", "path", 0}; + static char *kwlist[] = {"name", "path", "name_from", 0}; PyObject *empty_tuple; PyObject *msg = NULL; PyObject *name = NULL; PyObject *path = NULL; + PyObject *name_from = NULL; if (BaseException_init((PyBaseExceptionObject *)self, args, NULL) == -1) return -1; @@ -1476,22 +1485,19 @@ ImportError_init(PyImportErrorObject *self, PyObject *args, PyObject *kwds) empty_tuple = PyTuple_New(0); if (!empty_tuple) return -1; - if (!PyArg_ParseTupleAndKeywords(empty_tuple, kwds, "|$OO:ImportError", kwlist, - &name, &path)) { + if (!PyArg_ParseTupleAndKeywords(empty_tuple, kwds, "|$OOO:ImportError", kwlist, + &name, &path, &name_from)) { Py_DECREF(empty_tuple); return -1; } Py_DECREF(empty_tuple); - Py_XINCREF(name); - Py_XSETREF(self->name, name); - - Py_XINCREF(path); - Py_XSETREF(self->path, path); + Py_XSETREF(self->name, Py_XNewRef(name)); + Py_XSETREF(self->path, Py_XNewRef(path)); + Py_XSETREF(self->name_from, Py_XNewRef(name_from)); if (PyTuple_GET_SIZE(args) == 1) { - msg = PyTuple_GET_ITEM(args, 0); - Py_INCREF(msg); + msg = Py_NewRef(PyTuple_GET_ITEM(args, 0)); } Py_XSETREF(self->msg, msg); @@ -1504,6 +1510,7 @@ ImportError_clear(PyImportErrorObject *self) Py_CLEAR(self->msg); Py_CLEAR(self->name); Py_CLEAR(self->path); + Py_CLEAR(self->name_from); return BaseException_clear((PyBaseExceptionObject *)self); } @@ -1521,6 +1528,7 @@ ImportError_traverse(PyImportErrorObject *self, visitproc visit, void *arg) Py_VISIT(self->msg); Py_VISIT(self->name); Py_VISIT(self->path); + Py_VISIT(self->name_from); return BaseException_traverse((PyBaseExceptionObject *)self, visit, arg); } @@ -1528,8 +1536,7 @@ static PyObject * ImportError_str(PyImportErrorObject *self) { if (self->msg && PyUnicode_CheckExact(self->msg)) { - Py_INCREF(self->msg); - return self->msg; + return Py_NewRef(self->msg); } else { return BaseException_str((PyBaseExceptionObject *)self); @@ -1540,7 +1547,7 @@ static PyObject * ImportError_getstate(PyImportErrorObject *self) { PyObject *dict = ((PyBaseExceptionObject *)self)->dict; - if (self->name || self->path) { + if (self->name || self->path || self->name_from) { dict = dict ? PyDict_Copy(dict) : PyDict_New(); if (dict == NULL) return NULL; @@ -1552,11 +1559,14 @@ ImportError_getstate(PyImportErrorObject *self) Py_DECREF(dict); return NULL; } + if (self->name_from && PyDict_SetItem(dict, &_Py_ID(name_from), self->name_from) < 0) { + Py_DECREF(dict); + return NULL; + } return dict; } else if (dict) { - Py_INCREF(dict); - return dict; + return Py_NewRef(dict); } else { Py_RETURN_NONE; @@ -1588,6 +1598,8 @@ static PyMemberDef ImportError_members[] = { PyDoc_STR("module name")}, {"path", T_OBJECT, offsetof(PyImportErrorObject, path), 0, PyDoc_STR("module path")}, + {"name_from", T_OBJECT, offsetof(PyImportErrorObject, name_from), 0, + PyDoc_STR("name imported from module")}, {NULL} /* Sentinel */ }; @@ -1681,8 +1693,7 @@ oserror_parse_args(PyObject **p_args, PyTuple_SET_ITEM(newargs, 0, *myerrno); for (i = 1; i < nargs; i++) { PyObject *val = PyTuple_GET_ITEM(args, i); - Py_INCREF(val); - PyTuple_SET_ITEM(newargs, i, val); + PyTuple_SET_ITEM(newargs, i, Py_NewRef(val)); } Py_DECREF(args); args = *p_args = newargs; @@ -1717,12 +1728,10 @@ oserror_init(PyOSErrorObject *self, PyObject **p_args, return -1; } else { - Py_INCREF(filename); - self->filename = filename; + self->filename = Py_NewRef(filename); if (filename2 && filename2 != Py_None) { - Py_INCREF(filename2); - self->filename2 = filename2; + self->filename2 = Py_NewRef(filename2); } if (nargs >= 2 && nargs <= 5) { @@ -1737,15 +1746,10 @@ oserror_init(PyOSErrorObject *self, PyObject **p_args, } } } - Py_XINCREF(myerrno); - self->myerrno = myerrno; - - Py_XINCREF(strerror); - self->strerror = strerror; - + self->myerrno = Py_XNewRef(myerrno); + self->strerror = Py_XNewRef(strerror); #ifdef MS_WINDOWS - Py_XINCREF(winerror); - self->winerror = winerror; + self->winerror = Py_XNewRef(winerror); #endif /* Steals the reference to args */ @@ -1971,7 +1975,7 @@ static PyObject * OSError_reduce(PyOSErrorObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *args = self->args; - PyObject *res = NULL, *tmp; + PyObject *res = NULL; /* self->args is only the first two real arguments if there was a * file name given to OSError. */ @@ -1981,16 +1985,9 @@ OSError_reduce(PyOSErrorObject *self, PyObject *Py_UNUSED(ignored)) if (!args) return NULL; - tmp = PyTuple_GET_ITEM(self->args, 0); - Py_INCREF(tmp); - PyTuple_SET_ITEM(args, 0, tmp); - - tmp = PyTuple_GET_ITEM(self->args, 1); - Py_INCREF(tmp); - PyTuple_SET_ITEM(args, 1, tmp); - - Py_INCREF(self->filename); - PyTuple_SET_ITEM(args, 2, self->filename); + PyTuple_SET_ITEM(args, 0, Py_NewRef(PyTuple_GET_ITEM(self->args, 0))); + PyTuple_SET_ITEM(args, 1, Py_NewRef(PyTuple_GET_ITEM(self->args, 1))); + PyTuple_SET_ITEM(args, 2, Py_NewRef(self->filename)); if (self->filename2) { /* @@ -1998,12 +1995,10 @@ OSError_reduce(PyOSErrorObject *self, PyObject *Py_UNUSED(ignored)) * So, to recreate filename2, we need to pass in * winerror as well. */ - Py_INCREF(Py_None); - PyTuple_SET_ITEM(args, 3, Py_None); + PyTuple_SET_ITEM(args, 3, Py_NewRef(Py_None)); /* filename2 */ - Py_INCREF(self->filename2); - PyTuple_SET_ITEM(args, 4, self->filename2); + PyTuple_SET_ITEM(args, 4, Py_NewRef(self->filename2)); } } else Py_INCREF(args); @@ -2164,8 +2159,7 @@ NameError_init(PyNameErrorObject *self, PyObject *args, PyObject *kwds) } Py_DECREF(empty_tuple); - Py_XINCREF(name); - Py_XSETREF(self->name, name); + Py_XSETREF(self->name, Py_XNewRef(name)); return 0; } @@ -2239,11 +2233,8 @@ AttributeError_init(PyAttributeErrorObject *self, PyObject *args, PyObject *kwds } Py_DECREF(empty_tuple); - Py_XINCREF(name); - Py_XSETREF(self->name, name); - - Py_XINCREF(obj); - Py_XSETREF(self->obj, obj); + Py_XSETREF(self->name, Py_XNewRef(name)); + Py_XSETREF(self->obj, Py_XNewRef(obj)); return 0; } @@ -2301,8 +2292,7 @@ SyntaxError_init(PySyntaxErrorObject *self, PyObject *args, PyObject *kwds) return -1; if (lenargs >= 1) { - Py_INCREF(PyTuple_GET_ITEM(args, 0)); - Py_XSETREF(self->msg, PyTuple_GET_ITEM(args, 0)); + Py_XSETREF(self->msg, Py_NewRef(PyTuple_GET_ITEM(args, 0))); } if (lenargs == 2) { info = PyTuple_GET_ITEM(args, 1); @@ -2398,8 +2388,7 @@ my_basename(PyObject *name) return PyUnicode_Substring(name, offset, size); } else { - Py_INCREF(name); - return name; + return Py_NewRef(name); } } @@ -2551,8 +2540,7 @@ get_string(PyObject *attr, const char *name) PyErr_Format(PyExc_TypeError, "%.200s attribute must be bytes", name); return NULL; } - Py_INCREF(attr); - return attr; + return Py_NewRef(attr); } static PyObject * @@ -2568,8 +2556,7 @@ get_unicode(PyObject *attr, const char *name) "%.200s attribute must be unicode", name); return NULL; } - Py_INCREF(attr); - return attr; + return Py_NewRef(attr); } static int diff --git a/Objects/fileobject.c b/Objects/fileobject.c index ab67cd23cef3b3..e99e155f2b8c98 100644 --- a/Objects/fileobject.c +++ b/Objects/fileobject.c @@ -67,8 +67,7 @@ PyFile_GetLine(PyObject *f, int n) } if (result != NULL && !PyBytes_Check(result) && !PyUnicode_Check(result)) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); PyErr_SetString(PyExc_TypeError, "object.readline() returned non-string"); } @@ -77,8 +76,7 @@ PyFile_GetLine(PyObject *f, int n) const char *s = PyBytes_AS_STRING(result); Py_ssize_t len = PyBytes_GET_SIZE(result); if (len == 0) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); PyErr_SetString(PyExc_EOFError, "EOF when reading a line"); } @@ -88,24 +86,21 @@ PyFile_GetLine(PyObject *f, int n) else { PyObject *v; v = PyBytes_FromStringAndSize(s, len-1); - Py_DECREF(result); - result = v; + Py_SETREF(result, v); } } } if (n < 0 && result != NULL && PyUnicode_Check(result)) { Py_ssize_t len = PyUnicode_GET_LENGTH(result); if (len == 0) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); PyErr_SetString(PyExc_EOFError, "EOF when reading a line"); } else if (PyUnicode_READ_CHAR(result, len-1) == '\n') { PyObject *v; v = PyUnicode_Substring(result, 0, len-1); - Py_DECREF(result); - result = v; + Py_SETREF(result, v); } } return result; diff --git a/Objects/floatobject.c b/Objects/floatobject.c index c4353572d32d8e..912b742f797d24 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -371,8 +371,7 @@ convert_to_double(PyObject **v, double *dbl) } } else { - Py_INCREF(Py_NotImplemented); - *v = Py_NotImplemented; + *v = Py_NewRef(Py_NotImplemented); return -1; } return 0; @@ -532,20 +531,17 @@ float_richcompare(PyObject *v, PyObject *w, int op) temp = _PyLong_Lshift(ww, 1); if (temp == NULL) goto Error; - Py_DECREF(ww); - ww = temp; + Py_SETREF(ww, temp); temp = _PyLong_Lshift(vv, 1); if (temp == NULL) goto Error; - Py_DECREF(vv); - vv = temp; + Py_SETREF(vv, temp); temp = PyNumber_Or(vv, _PyLong_GetOne()); if (temp == NULL) goto Error; - Py_DECREF(vv); - vv = temp; + Py_SETREF(vv, temp); } r = PyObject_RichCompareBool(vv, ww, op); @@ -904,8 +900,7 @@ float_is_integer_impl(PyObject *self) PyExc_ValueError); return NULL; } - Py_INCREF(o); - return o; + return Py_NewRef(o); } /*[clinic input] @@ -1124,11 +1119,12 @@ float___round___impl(PyObject *self, PyObject *o_ndigits) static PyObject * float_float(PyObject *v) { - if (PyFloat_CheckExact(v)) - Py_INCREF(v); - else - v = PyFloat_FromDouble(((PyFloatObject *)v)->ob_fval); - return v; + if (PyFloat_CheckExact(v)) { + return Py_NewRef(v); + } + else { + return PyFloat_FromDouble(((PyFloatObject *)v)->ob_fval); + } } /*[clinic input] @@ -1724,12 +1720,14 @@ float___getnewargs___impl(PyObject *self) } /* this is for the benefit of the pack/unpack routines below */ +typedef enum _py_float_format_type float_format_type; +#define unknown_format _py_float_format_unknown +#define ieee_big_endian_format _py_float_format_ieee_big_endian +#define ieee_little_endian_format _py_float_format_ieee_little_endian -typedef enum { - unknown_format, ieee_big_endian_format, ieee_little_endian_format -} float_format_type; +#define float_format (_PyRuntime.float_state.float_format) +#define double_format (_PyRuntime.float_state.double_format) -static float_format_type double_format, float_format; /*[clinic input] @classmethod @@ -1930,13 +1928,9 @@ PyTypeObject PyFloat_Type = { .tp_vectorcall = (vectorcallfunc)float_vectorcall, }; -void -_PyFloat_InitState(PyInterpreterState *interp) +static void +_init_global_state(void) { - if (!_Py_IsMainInterpreter(interp)) { - return; - } - float_format_type detected_double_format, detected_float_format; /* We attempt to determine if this machine is using IEEE @@ -1986,6 +1980,15 @@ _PyFloat_InitState(PyInterpreterState *interp) float_format = detected_float_format; } +void +_PyFloat_InitState(PyInterpreterState *interp) +{ + if (!_Py_IsMainInterpreter(interp)) { + return; + } + _init_global_state(); +} + PyStatus _PyFloat_InitTypes(PyInterpreterState *interp) { diff --git a/Objects/frame_layout.md b/Objects/frame_layout.md index 11688f68e42e3d..2f95214db56498 100644 --- a/Objects/frame_layout.md +++ b/Objects/frame_layout.md @@ -9,10 +9,10 @@ results in poor locality of reference. In 3.11, rather than have these frames scattered about memory, as happens for heap-allocated objects, frames are allocated -contiguously in a per-thread stack. +contiguously in a per-thread stack. This improves performance significantly for two reasons: * It reduces allocation overhead to a pointer comparison and increment. -* Stack allocated data has the best possible locality and will always be in +* Stack allocated data has the best possible locality and will always be in CPU cache. Generator and coroutines still need heap allocated activation records, but @@ -63,7 +63,7 @@ We may implement this in the future. > In a contiguous stack, we would need to save one fewer registers, as the > top of the caller's activation record would be the same at the base of the -> callee's. However, since some activation records are kept on the heap we +> callee's. However, since some activation records are kept on the heap we > cannot do this. ### Generators and Coroutines @@ -85,7 +85,7 @@ and builtins, than strong references to both globals and builtins. ### Frame objects When creating a backtrace or when calling `sys._getframe()` the frame becomes -visible to Python code. When this happens a new `PyFrameObject` is created +visible to Python code. When this happens a new `PyFrameObject` is created and a strong reference to it placed in the `frame_obj` field of the specials section. The `frame_obj` field is initially `NULL`. @@ -104,7 +104,7 @@ Generator objects have a `_PyInterpreterFrame` embedded in them. This means that creating a generator requires only a single allocation, reducing allocation overhead and improving locality of reference. The embedded frame is linked into the per-thread frame when iterated or -awaited. +awaited. If a frame object associated with a generator outlives the generator, then the embedded `_PyInterpreterFrame` is copied into the frame object. @@ -119,4 +119,14 @@ Thus, some of the field names may be a bit misleading. For example the `f_globals` field has a `f_` prefix implying it belongs to the `PyFrameObject` struct, although it belongs to the `_PyInterpreterFrame` struct. -We may rationalize this naming scheme for 3.12. \ No newline at end of file +We may rationalize this naming scheme for 3.12. + + +### Shim frames + +On entry to `_PyEval_EvalFrameDefault()` a shim `_PyInterpreterFrame` is pushed. +This frame is stored on the C stack, and popped when `_PyEval_EvalFrameDefault()` +returns. This extra frame is inserted so that `RETURN_VALUE`, `YIELD_VALUE`, and +`RETURN_GENERATOR` do not need to check whether the current frame is the entry frame. +The shim frame points to a special code object containing the `INTERPRETER_EXIT` +instruction which cleans up the shim frame and returns. diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 6a51a946ef35f0..74c26d8d4d96a5 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -28,8 +28,7 @@ frame_getlocals(PyFrameObject *f, void *closure) if (PyFrame_FastToLocalsWithError(f) < 0) return NULL; PyObject *locals = f->f_frame->f_locals; - Py_INCREF(locals); - return locals; + return Py_NewRef(locals); } int @@ -73,8 +72,7 @@ frame_getglobals(PyFrameObject *f, void *closure) if (globals == NULL) { globals = Py_None; } - Py_INCREF(globals); - return globals; + return Py_NewRef(globals); } static PyObject * @@ -84,8 +82,7 @@ frame_getbuiltins(PyFrameObject *f, void *closure) if (builtins == NULL) { builtins = Py_None; } - Py_INCREF(builtins); - return builtins; + return Py_NewRef(builtins); } static PyObject * @@ -367,8 +364,8 @@ mark_stacks(PyCodeObject *code_obj, int len) break; case FOR_ITER: { - int64_t target_stack = pop_value(next_stack); - stacks[i+1] = push_value(next_stack, Object); + int64_t target_stack = push_value(next_stack, Object); + stacks[i+1] = target_stack; j = get_arg(code, i) + 1 + INLINE_CACHE_ENTRIES_FOR_ITER + i; assert(j < len); assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack); @@ -603,7 +600,7 @@ _PyFrame_GetState(PyFrameObject *frame) if (_PyInterpreterFrame_LASTI(frame->f_frame) < 0) { return FRAME_CREATED; } - switch (_PyOpcode_Deopt[_Py_OPCODE(*frame->f_frame->prev_instr)]) + switch (_Py_OPCODE(*frame->f_frame->prev_instr)) { case COPY_FREE_VARS: case MAKE_CELL: @@ -620,37 +617,6 @@ _PyFrame_GetState(PyFrameObject *frame) Py_UNREACHABLE(); } -static void -add_load_fast_null_checks(PyCodeObject *co) -{ - int changed = 0; - _Py_CODEUNIT *instructions = _PyCode_CODE(co); - for (Py_ssize_t i = 0; i < Py_SIZE(co); i++) { - int opcode = _Py_OPCODE(instructions[i]); - switch (opcode) { - case LOAD_FAST: - case LOAD_FAST__LOAD_FAST: - case LOAD_FAST__LOAD_CONST: - changed = 1; - _Py_SET_OPCODE(instructions[i], LOAD_FAST_CHECK); - break; - case LOAD_CONST__LOAD_FAST: - changed = 1; - _Py_SET_OPCODE(instructions[i], LOAD_CONST); - break; - case STORE_FAST__LOAD_FAST: - changed = 1; - _Py_SET_OPCODE(instructions[i], STORE_FAST); - break; - } - i += _PyOpcode_Caches[_PyOpcode_Deopt[opcode]]; - } - if (changed) { - // invalidate cached co_code object - Py_CLEAR(co->_co_code); - } -} - /* Setter for f_lineno - you can set f_lineno from within a trace function in * order to jump to a given line of code, subject to some restrictions. Most * lines are OK to jump to because they don't make any assumptions about the @@ -740,8 +706,6 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore return -1; } - add_load_fast_null_checks(f->f_frame->f_code); - /* PyCode_NewWithPosOnlyArgs limits co_code to be under INT_MAX so this * should never overflow. */ int len = (int)Py_SIZE(f->f_frame->f_code); @@ -800,6 +764,31 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore PyErr_SetString(PyExc_ValueError, msg); return -1; } + // Populate any NULL locals that the compiler might have "proven" to exist + // in the new location. Rather than crashing or changing co_code, just bind + // None instead: + int unbound = 0; + for (int i = 0; i < f->f_frame->f_code->co_nlocalsplus; i++) { + // Counting every unbound local is overly-cautious, but a full flow + // analysis (like we do in the compiler) is probably too expensive: + unbound += f->f_frame->localsplus[i] == NULL; + } + if (unbound) { + const char *e = "assigning None to %d unbound local%s"; + const char *s = (unbound == 1) ? "" : "s"; + if (PyErr_WarnFormat(PyExc_RuntimeWarning, 0, e, unbound, s)) { + return -1; + } + // Do this in a second pass to avoid writing a bunch of Nones when + // warnings are being treated as errors and the previous bit raises: + for (int i = 0; i < f->f_frame->f_code->co_nlocalsplus; i++) { + if (f->f_frame->localsplus[i] == NULL) { + f->f_frame->localsplus[i] = Py_NewRef(Py_None); + unbound--; + } + } + assert(unbound == 0); + } if (state == FRAME_SUSPENDED) { /* Account for value popped by yield */ start_stack = pop_value(start_stack); @@ -831,13 +820,9 @@ static PyObject * frame_gettrace(PyFrameObject *f, void *closure) { PyObject* trace = f->f_trace; - if (trace == NULL) trace = Py_None; - - Py_INCREF(trace); - - return trace; + return Py_NewRef(trace); } static int @@ -846,9 +831,7 @@ frame_settrace(PyFrameObject *f, PyObject* v, void *closure) if (v == Py_None) { v = NULL; } - Py_XINCREF(v); - Py_XSETREF(f->f_trace, v); - + Py_XSETREF(f->f_trace, Py_XNewRef(v)); return 0; } @@ -866,15 +849,6 @@ static PyGetSetDef frame_getsetlist[] = { {0} }; -/* Stack frames are allocated and deallocated at a considerable rate. - In an attempt to improve the speed of function calls, we maintain - a separate free list of stack frames (just like floats are - allocated in a special way -- see floatobject.c). When a stack - frame is on the free list, only the following members have a meaning: - ob_type == &Frametype - f_back next item on free list, or NULL -*/ - static void frame_dealloc(PyFrameObject *f) { @@ -1036,11 +1010,9 @@ PyTypeObject PyFrame_Type = { static void init_frame(_PyInterpreterFrame *frame, PyFunctionObject *func, PyObject *locals) { - /* _PyFrame_InitializeSpecials consumes reference to func */ - Py_INCREF(func); - Py_XINCREF(locals); PyCodeObject *code = (PyCodeObject *)func->func_code; - _PyFrame_InitializeSpecials(frame, func, locals, code); + _PyFrame_InitializeSpecials(frame, (PyFunctionObject*)Py_NewRef(func), + Py_XNewRef(locals), code); for (Py_ssize_t i = 0; i < code->co_nlocalsplus; i++) { frame->localsplus[i] = NULL; } @@ -1128,82 +1100,106 @@ _PyFrame_OpAlreadyRan(_PyInterpreterFrame *frame, int opcode, int oparg) return 0; } + +// Initialize frame free variables if needed +static void +frame_init_get_vars(_PyInterpreterFrame *frame) +{ + // COPY_FREE_VARS has no quickened forms, so no need to use _PyOpcode_Deopt + // here: + PyCodeObject *co = frame->f_code; + int lasti = _PyInterpreterFrame_LASTI(frame); + if (!(lasti < 0 && _Py_OPCODE(_PyCode_CODE(co)[0]) == COPY_FREE_VARS + && PyFunction_Check(frame->f_funcobj))) + { + /* Free vars are initialized */ + return; + } + + /* Free vars have not been initialized -- Do that */ + PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + int offset = co->co_nlocals + co->co_nplaincellvars; + for (int i = 0; i < co->co_nfreevars; ++i) { + PyObject *o = PyTuple_GET_ITEM(closure, i); + frame->localsplus[offset + i] = Py_NewRef(o); + } + // COPY_FREE_VARS doesn't have inline CACHEs, either: + frame->prev_instr = _PyCode_CODE(frame->f_code); +} + + +static int +frame_get_var(_PyInterpreterFrame *frame, PyCodeObject *co, int i, + PyObject **pvalue) +{ + _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); + + /* If the namespace is unoptimized, then one of the + following cases applies: + 1. It does not contain free variables, because it + uses import * or is a top-level namespace. + 2. It is a class namespace. + We don't want to accidentally copy free variables + into the locals dict used by the class. + */ + if (kind & CO_FAST_FREE && !(co->co_flags & CO_OPTIMIZED)) { + return 0; + } + + PyObject *value = frame->localsplus[i]; + if (frame->stacktop) { + if (kind & CO_FAST_FREE) { + // The cell was set by COPY_FREE_VARS. + assert(value != NULL && PyCell_Check(value)); + value = PyCell_GET(value); + } + else if (kind & CO_FAST_CELL) { + // Note that no *_DEREF ops can happen before MAKE_CELL + // executes. So there's no need to duplicate the work + // that MAKE_CELL would otherwise do later, if it hasn't + // run yet. + if (value != NULL) { + if (PyCell_Check(value) && + _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) { + // (likely) MAKE_CELL must have executed already. + value = PyCell_GET(value); + } + // (likely) Otherwise it it is an arg (kind & CO_FAST_LOCAL), + // with the initial value set when the frame was created... + // (unlikely) ...or it was set to some initial value by + // an earlier call to PyFrame_LocalsToFast(). + } + } + } + else { + assert(value == NULL); + } + *pvalue = value; + return 1; +} + int -_PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) { +_PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) +{ /* Merge fast locals into f->f_locals */ - PyObject *locals; - PyObject **fast; - PyCodeObject *co; - locals = frame->f_locals; + PyObject *locals = frame->f_locals; if (locals == NULL) { locals = frame->f_locals = PyDict_New(); - if (locals == NULL) + if (locals == NULL) { return -1; - } - co = frame->f_code; - fast = _PyFrame_GetLocalsArray(frame); - // COPY_FREE_VARS has no quickened forms, so no need to use _PyOpcode_Deopt - // here: - int lasti = _PyInterpreterFrame_LASTI(frame); - if (lasti < 0 && _Py_OPCODE(_PyCode_CODE(co)[0]) == COPY_FREE_VARS - && PyFunction_Check(frame->f_funcobj)) - { - /* Free vars have not been initialized -- Do that */ - PyCodeObject *co = frame->f_code; - PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; - int offset = co->co_nlocals + co->co_nplaincellvars; - for (int i = 0; i < co->co_nfreevars; ++i) { - PyObject *o = PyTuple_GET_ITEM(closure, i); - Py_INCREF(o); - frame->localsplus[offset + i] = o; } - // COPY_FREE_VARS doesn't have inline CACHEs, either: - frame->prev_instr = _PyCode_CODE(frame->f_code); } - for (int i = 0; i < co->co_nlocalsplus; i++) { - _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); - /* If the namespace is unoptimized, then one of the - following cases applies: - 1. It does not contain free variables, because it - uses import * or is a top-level namespace. - 2. It is a class namespace. - We don't want to accidentally copy free variables - into the locals dict used by the class. - */ - if (kind & CO_FAST_FREE && !(co->co_flags & CO_OPTIMIZED)) { + frame_init_get_vars(frame); + + PyCodeObject *co = frame->f_code; + for (int i = 0; i < co->co_nlocalsplus; i++) { + PyObject *value; // borrowed reference + if (!frame_get_var(frame, co, i, &value)) { continue; } PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); - PyObject *value = fast[i]; - if (frame->stacktop) { - if (kind & CO_FAST_FREE) { - // The cell was set by COPY_FREE_VARS. - assert(value != NULL && PyCell_Check(value)); - value = PyCell_GET(value); - } - else if (kind & CO_FAST_CELL) { - // Note that no *_DEREF ops can happen before MAKE_CELL - // executes. So there's no need to duplicate the work - // that MAKE_CELL would otherwise do later, if it hasn't - // run yet. - if (value != NULL) { - if (PyCell_Check(value) && - _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) { - // (likely) MAKE_CELL must have executed already. - value = PyCell_GET(value); - } - // (likely) Otherwise it it is an arg (kind & CO_FAST_LOCAL), - // with the initial value set when the frame was created... - // (unlikely) ...or it was set to some initial value by - // an earlier call to PyFrame_LocalsToFast(). - } - } - } - else { - assert(value == NULL); - } if (value == NULL) { if (PyObject_DelItem(locals, name) != 0) { if (PyErr_ExceptionMatches(PyExc_KeyError)) { @@ -1223,6 +1219,54 @@ _PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) { return 0; } + +PyObject * +PyFrame_GetVar(PyFrameObject *frame_obj, PyObject *name) +{ + if (!PyUnicode_Check(name)) { + PyErr_Format(PyExc_TypeError, "name must be str, not %s", + Py_TYPE(name)->tp_name); + return NULL; + } + + _PyInterpreterFrame *frame = frame_obj->f_frame; + frame_init_get_vars(frame); + + PyCodeObject *co = frame->f_code; + for (int i = 0; i < co->co_nlocalsplus; i++) { + PyObject *var_name = PyTuple_GET_ITEM(co->co_localsplusnames, i); + if (!_PyUnicode_Equal(var_name, name)) { + continue; + } + + PyObject *value; // borrowed reference + if (!frame_get_var(frame, co, i, &value)) { + break; + } + if (value == NULL) { + break; + } + return Py_NewRef(value); + } + + PyErr_Format(PyExc_NameError, "variable %R does not exist", name); + return NULL; +} + + +PyObject * +PyFrame_GetVarString(PyFrameObject *frame, const char *name) +{ + PyObject *name_obj = PyUnicode_FromString(name); + if (name_obj == NULL) { + return NULL; + } + PyObject *value = PyFrame_GetVar(frame, name_obj); + Py_DECREF(name_obj); + return value; +} + + int PyFrame_FastToLocalsWithError(PyFrameObject *f) { @@ -1264,7 +1308,6 @@ _PyFrame_LocalsToFast(_PyInterpreterFrame *frame, int clear) } fast = _PyFrame_GetLocalsArray(frame); co = frame->f_code; - bool added_null_checks = false; PyErr_Fetch(&error_type, &error_value, &error_traceback); for (int i = 0; i < co->co_nlocalsplus; i++) { @@ -1284,10 +1327,6 @@ _PyFrame_LocalsToFast(_PyInterpreterFrame *frame, int clear) } } PyObject *oldvalue = fast[i]; - if (!added_null_checks && oldvalue != NULL && value == NULL) { - add_load_fast_null_checks(co); - added_null_checks = true; - } PyObject *cell = NULL; if (kind == CO_FAST_FREE) { // The cell was set when the frame was created from @@ -1308,14 +1347,22 @@ _PyFrame_LocalsToFast(_PyInterpreterFrame *frame, int clear) if (cell != NULL) { oldvalue = PyCell_GET(cell); if (value != oldvalue) { - Py_XINCREF(value); - PyCell_SET(cell, value); + PyCell_SET(cell, Py_XNewRef(value)); Py_XDECREF(oldvalue); } } else if (value != oldvalue) { - Py_XINCREF(value); - Py_XSETREF(fast[i], value); + if (value == NULL) { + // Probably can't delete this, since the compiler's flow + // analysis may have already "proven" that it exists here: + const char *e = "assigning None to unbound local %R"; + if (PyErr_WarnFormat(PyExc_RuntimeWarning, 0, e, name)) { + // It's okay if frame_obj is NULL, just try anyways: + PyErr_WriteUnraisable((PyObject *)frame->frame_obj); + } + value = Py_NewRef(Py_None); + } + Py_XSETREF(fast[i], Py_NewRef(value)); } Py_XDECREF(value); } @@ -1332,15 +1379,15 @@ PyFrame_LocalsToFast(PyFrameObject *f, int clear) } } - -int _PyFrame_IsEntryFrame(PyFrameObject *frame) +int +_PyFrame_IsEntryFrame(PyFrameObject *frame) { assert(frame != NULL); - assert(!_PyFrame_IsIncomplete(frame->f_frame)); - return frame->f_frame->is_entry; + _PyInterpreterFrame *f = frame->f_frame; + assert(!_PyFrame_IsIncomplete(f)); + return f->previous && f->previous->owner == FRAME_OWNED_BY_CSTACK; } - PyCodeObject * PyFrame_GetCode(PyFrameObject *frame) { @@ -1348,8 +1395,7 @@ PyFrame_GetCode(PyFrameObject *frame) assert(!_PyFrame_IsIncomplete(frame->f_frame)); PyCodeObject *code = frame->f_frame->f_code; assert(code != NULL); - Py_INCREF(code); - return code; + return (PyCodeObject*)Py_NewRef(code); } @@ -1368,8 +1414,7 @@ PyFrame_GetBack(PyFrameObject *frame) back = _PyFrame_GetFrameObject(prev); } } - Py_XINCREF(back); - return back; + return (PyFrameObject*)Py_XNewRef(back); } PyObject* @@ -1432,5 +1477,3 @@ _PyEval_BuiltinsFromGlobals(PyThreadState *tstate, PyObject *globals) return _PyEval_GetBuiltins(tstate); } - - diff --git a/Objects/funcobject.c b/Objects/funcobject.c index 7f257a9986987b..bf97edc53ad7d9 100644 --- a/Objects/funcobject.c +++ b/Objects/funcobject.c @@ -3,11 +3,67 @@ #include "Python.h" #include "pycore_ceval.h" // _PyEval_BuiltinsFromGlobals() +#include "pycore_function.h" // FUNC_MAX_WATCHERS #include "pycore_object.h" // _PyObject_GC_UNTRACK() #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "structmember.h" // PyMemberDef -static uint32_t next_func_version = 1; +static void +notify_func_watchers(PyInterpreterState *interp, PyFunction_WatchEvent event, + PyFunctionObject *func, PyObject *new_value) +{ + for (int i = 0; i < FUNC_MAX_WATCHERS; i++) { + PyFunction_WatchCallback cb = interp->func_watchers[i]; + if ((cb != NULL) && (cb(event, func, new_value) < 0)) { + PyErr_WriteUnraisable((PyObject *) func); + } + } +} + +static inline void +handle_func_event(PyFunction_WatchEvent event, PyFunctionObject *func, + PyObject *new_value) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (interp->active_func_watchers) { + notify_func_watchers(interp, event, func, new_value); + } +} + +int +PyFunction_AddWatcher(PyFunction_WatchCallback callback) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + assert(interp->_initialized); + for (int i = 0; i < FUNC_MAX_WATCHERS; i++) { + if (interp->func_watchers[i] == NULL) { + interp->func_watchers[i] = callback; + interp->active_func_watchers |= (1 << i); + return i; + } + } + PyErr_SetString(PyExc_RuntimeError, "no more func watcher IDs available"); + return -1; +} + +int +PyFunction_ClearWatcher(int watcher_id) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (watcher_id < 0 || watcher_id >= FUNC_MAX_WATCHERS) { + PyErr_Format(PyExc_ValueError, "invalid func watcher ID %d", + watcher_id); + return -1; + } + if (!interp->func_watchers[watcher_id]) { + PyErr_Format(PyExc_ValueError, "no func watcher set for ID %d", + watcher_id); + return -1; + } + interp->func_watchers[watcher_id] = NULL; + interp->active_func_watchers &= ~(1 << watcher_id); + return 0; +} PyFunctionObject * _PyFunction_FromConstructor(PyFrameConstructor *constr) @@ -17,22 +73,15 @@ _PyFunction_FromConstructor(PyFrameConstructor *constr) if (op == NULL) { return NULL; } - Py_INCREF(constr->fc_globals); - op->func_globals = constr->fc_globals; - Py_INCREF(constr->fc_builtins); - op->func_builtins = constr->fc_builtins; - Py_INCREF(constr->fc_name); - op->func_name = constr->fc_name; - Py_INCREF(constr->fc_qualname); - op->func_qualname = constr->fc_qualname; - Py_INCREF(constr->fc_code); - op->func_code = constr->fc_code; + op->func_globals = Py_NewRef(constr->fc_globals); + op->func_builtins = Py_NewRef(constr->fc_builtins); + op->func_name = Py_NewRef(constr->fc_name); + op->func_qualname = Py_NewRef(constr->fc_qualname); + op->func_code = Py_NewRef(constr->fc_code); op->func_defaults = NULL; op->func_kwdefaults = NULL; - Py_XINCREF(constr->fc_closure); - op->func_closure = constr->fc_closure; - Py_INCREF(Py_None); - op->func_doc = Py_None; + op->func_closure = Py_XNewRef(constr->fc_closure); + op->func_doc = Py_NewRef(Py_None); op->func_dict = NULL; op->func_weakreflist = NULL; op->func_module = NULL; @@ -40,6 +89,7 @@ _PyFunction_FromConstructor(PyFrameConstructor *constr) op->vectorcall = _PyFunction_Vectorcall; op->func_version = 0; _PyObject_GC_TRACK(op); + handle_func_event(PyFunction_EVENT_CREATE, op, NULL); return op; } @@ -52,12 +102,10 @@ PyFunction_NewWithQualName(PyObject *code, PyObject *globals, PyObject *qualname PyThreadState *tstate = _PyThreadState_GET(); - PyCodeObject *code_obj = (PyCodeObject *)code; - Py_INCREF(code_obj); + PyCodeObject *code_obj = (PyCodeObject *)Py_NewRef(code); - PyObject *name = code_obj->co_name; - assert(name != NULL); - Py_INCREF(name); + assert(code_obj->co_name != NULL); + PyObject *name = Py_NewRef(code_obj->co_name); if (!qualname) { qualname = code_obj->co_qualname; @@ -116,6 +164,7 @@ PyFunction_NewWithQualName(PyObject *code, PyObject *globals, PyObject *qualname op->vectorcall = _PyFunction_Vectorcall; op->func_version = 0; _PyObject_GC_TRACK(op); + handle_func_event(PyFunction_EVENT_CREATE, op, NULL); return (PyObject *)op; error: @@ -137,10 +186,10 @@ uint32_t _PyFunction_GetVersionForCurrentState(PyFunctionObject *func) if (func->vectorcall != _PyFunction_Vectorcall) { return 0; } - if (next_func_version == 0) { + if (_PyRuntime.func_state.next_version == 0) { return 0; } - uint32_t v = next_func_version++; + uint32_t v = _PyRuntime.func_state.next_version++; func->func_version = v; return v; } @@ -207,6 +256,8 @@ PyFunction_SetDefaults(PyObject *op, PyObject *defaults) PyErr_SetString(PyExc_SystemError, "non-tuple default args"); return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_DEFAULTS, + (PyFunctionObject *) op, defaults); ((PyFunctionObject *)op)->func_version = 0; Py_XSETREF(((PyFunctionObject *)op)->func_defaults, defaults); return 0; @@ -247,6 +298,8 @@ PyFunction_SetKwDefaults(PyObject *op, PyObject *defaults) "non-dict keyword only default args"); return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_KWDEFAULTS, + (PyFunctionObject *) op, defaults); ((PyFunctionObject *)op)->func_version = 0; Py_XSETREF(((PyFunctionObject *)op)->func_kwdefaults, defaults); return 0; @@ -311,7 +364,6 @@ func_get_annotation_dict(PyFunctionObject *op) } Py_SETREF(op->func_annotations, ann_dict); } - Py_INCREF(op->func_annotations); assert(PyDict_Check(op->func_annotations)); return op->func_annotations; } @@ -368,8 +420,7 @@ func_get_code(PyFunctionObject *op, void *Py_UNUSED(ignored)) return NULL; } - Py_INCREF(op->func_code); - return op->func_code; + return Py_NewRef(op->func_code); } static int @@ -402,17 +453,16 @@ func_set_code(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignored)) nclosure, nfree); return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_CODE, op, value); op->func_version = 0; - Py_INCREF(value); - Py_XSETREF(op->func_code, value); + Py_XSETREF(op->func_code, Py_NewRef(value)); return 0; } static PyObject * func_get_name(PyFunctionObject *op, void *Py_UNUSED(ignored)) { - Py_INCREF(op->func_name); - return op->func_name; + return Py_NewRef(op->func_name); } static int @@ -425,16 +475,14 @@ func_set_name(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignored)) "__name__ must be set to a string object"); return -1; } - Py_INCREF(value); - Py_XSETREF(op->func_name, value); + Py_XSETREF(op->func_name, Py_NewRef(value)); return 0; } static PyObject * func_get_qualname(PyFunctionObject *op, void *Py_UNUSED(ignored)) { - Py_INCREF(op->func_qualname); - return op->func_qualname; + return Py_NewRef(op->func_qualname); } static int @@ -447,8 +495,7 @@ func_set_qualname(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignored "__qualname__ must be set to a string object"); return -1; } - Py_INCREF(value); - Py_XSETREF(op->func_qualname, value); + Py_XSETREF(op->func_qualname, Py_NewRef(value)); return 0; } @@ -461,8 +508,7 @@ func_get_defaults(PyFunctionObject *op, void *Py_UNUSED(ignored)) if (op->func_defaults == NULL) { Py_RETURN_NONE; } - Py_INCREF(op->func_defaults); - return op->func_defaults; + return Py_NewRef(op->func_defaults); } static int @@ -487,9 +533,9 @@ func_set_defaults(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignored return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_DEFAULTS, op, value); op->func_version = 0; - Py_XINCREF(value); - Py_XSETREF(op->func_defaults, value); + Py_XSETREF(op->func_defaults, Py_XNewRef(value)); return 0; } @@ -503,8 +549,7 @@ func_get_kwdefaults(PyFunctionObject *op, void *Py_UNUSED(ignored)) if (op->func_kwdefaults == NULL) { Py_RETURN_NONE; } - Py_INCREF(op->func_kwdefaults); - return op->func_kwdefaults; + return Py_NewRef(op->func_kwdefaults); } static int @@ -529,9 +574,9 @@ func_set_kwdefaults(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(ignor return -1; } + handle_func_event(PyFunction_EVENT_MODIFY_KWDEFAULTS, op, value); op->func_version = 0; - Py_XINCREF(value); - Py_XSETREF(op->func_kwdefaults, value); + Py_XSETREF(op->func_kwdefaults, Py_XNewRef(value)); return 0; } @@ -543,7 +588,8 @@ func_get_annotations(PyFunctionObject *op, void *Py_UNUSED(ignored)) if (op->func_annotations == NULL) return NULL; } - return func_get_annotation_dict(op); + PyObject *d = func_get_annotation_dict(op); + return Py_XNewRef(d); } static int @@ -560,8 +606,7 @@ func_set_annotations(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(igno return -1; } op->func_version = 0; - Py_XINCREF(value); - Py_XSETREF(op->func_annotations, value); + Py_XSETREF(op->func_annotations, Py_XNewRef(value)); return 0; } @@ -671,16 +716,13 @@ func_new_impl(PyTypeObject *type, PyCodeObject *code, PyObject *globals, return NULL; } if (name != Py_None) { - Py_INCREF(name); - Py_SETREF(newfunc->func_name, name); + Py_SETREF(newfunc->func_name, Py_NewRef(name)); } if (defaults != Py_None) { - Py_INCREF(defaults); - newfunc->func_defaults = defaults; + newfunc->func_defaults = Py_NewRef(defaults); } if (closure != Py_None) { - Py_INCREF(closure); - newfunc->func_closure = closure; + newfunc->func_closure = Py_NewRef(closure); } return (PyObject *)newfunc; @@ -712,6 +754,7 @@ func_clear(PyFunctionObject *op) static void func_dealloc(PyFunctionObject *op) { + handle_func_event(PyFunction_EVENT_DESTROY, op, NULL); _PyObject_GC_UNTRACK(op); if (op->func_weakreflist != NULL) { PyObject_ClearWeakRefs((PyObject *) op); @@ -754,8 +797,7 @@ static PyObject * func_descr_get(PyObject *func, PyObject *obj, PyObject *type) { if (obj == Py_None || obj == NULL) { - Py_INCREF(func); - return func; + return Py_NewRef(func); } return PyMethod_New(func, obj); } @@ -924,8 +966,7 @@ cm_init(PyObject *self, PyObject *args, PyObject *kwds) return -1; if (!PyArg_UnpackTuple(args, "classmethod", 1, 1, &callable)) return -1; - Py_INCREF(callable); - Py_XSETREF(cm->cm_callable, callable); + Py_XSETREF(cm->cm_callable, Py_NewRef(callable)); if (functools_wraps((PyObject *)cm, cm->cm_callable) < 0) { return -1; @@ -1035,8 +1076,7 @@ PyClassMethod_New(PyObject *callable) classmethod *cm = (classmethod *) PyType_GenericAlloc(&PyClassMethod_Type, 0); if (cm != NULL) { - Py_INCREF(callable); - cm->cm_callable = callable; + cm->cm_callable = Py_NewRef(callable); } return (PyObject *)cm; } @@ -1101,8 +1141,7 @@ sm_descr_get(PyObject *self, PyObject *obj, PyObject *type) "uninitialized staticmethod object"); return NULL; } - Py_INCREF(sm->sm_callable); - return sm->sm_callable; + return Py_NewRef(sm->sm_callable); } static int @@ -1115,8 +1154,7 @@ sm_init(PyObject *self, PyObject *args, PyObject *kwds) return -1; if (!PyArg_UnpackTuple(args, "staticmethod", 1, 1, &callable)) return -1; - Py_INCREF(callable); - Py_XSETREF(sm->sm_callable, callable); + Py_XSETREF(sm->sm_callable, Py_NewRef(callable)); if (functools_wraps((PyObject *)sm, sm->sm_callable) < 0) { return -1; @@ -1231,8 +1269,7 @@ PyStaticMethod_New(PyObject *callable) staticmethod *sm = (staticmethod *) PyType_GenericAlloc(&PyStaticMethod_Type, 0); if (sm != NULL) { - Py_INCREF(callable); - sm->sm_callable = callable; + sm->sm_callable = Py_NewRef(callable); } return (PyObject *)sm; } diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c index 19f011fd3a7437..675fd496eefdaf 100644 --- a/Objects/genericaliasobject.c +++ b/Objects/genericaliasobject.c @@ -183,8 +183,7 @@ static int tuple_add(PyObject *self, Py_ssize_t len, PyObject *item) { if (tuple_index(self, len, item) < 0) { - Py_INCREF(item); - PyTuple_SET_ITEM(self, len, item); + PyTuple_SET_ITEM(self, len, Py_NewRef(item)); return 1; } return 0; @@ -201,8 +200,7 @@ tuple_extend(PyObject **dst, Py_ssize_t dstindex, assert(dstindex + count <= PyTuple_GET_SIZE(*dst)); for (Py_ssize_t i = 0; i < count; ++i) { PyObject *item = src[i]; - Py_INCREF(item); - PyTuple_SET_ITEM(*dst, dstindex + i, item); + PyTuple_SET_ITEM(*dst, dstindex + i, Py_NewRef(item)); } return dstindex + count; } @@ -304,8 +302,7 @@ subs_tvars(PyObject *obj, PyObject *params, continue; } } - Py_INCREF(arg); - PyTuple_SET_ITEM(subargs, j, arg); + PyTuple_SET_ITEM(subargs, j, Py_NewRef(arg)); j++; } assert(j == PyTuple_GET_SIZE(subargs)); @@ -347,8 +344,7 @@ _unpacked_tuple_args(PyObject *arg) ((gaobject *)arg)->origin == (PyObject *)&PyTuple_Type) { result = ((gaobject *)arg)->args; - Py_INCREF(result); - return result; + return Py_NewRef(result); } if (_PyObject_LookupAttr(arg, &_Py_ID(__typing_unpacked_tuple_args__), &result) > 0) { @@ -458,6 +454,12 @@ _Py_subs_parameters(PyObject *self, PyObject *args, PyObject *parameters, PyObje } for (Py_ssize_t iarg = 0, jarg = 0; iarg < nargs; iarg++) { PyObject *arg = PyTuple_GET_ITEM(args, iarg); + if (PyType_Check(arg)) { + PyTuple_SET_ITEM(newargs, jarg, Py_NewRef(arg)); + jarg++; + continue; + } + int unpack = _is_unpacked_typevartuple(arg); if (unpack < 0) { Py_DECREF(newargs); @@ -760,8 +762,7 @@ ga_parameters(PyObject *self, void *unused) return NULL; } } - Py_INCREF(alias->parameters); - return alias->parameters; + return Py_NewRef(alias->parameters); } static PyObject * @@ -769,8 +770,7 @@ ga_unpacked_tuple_args(PyObject *self, void *unused) { gaobject *alias = (gaobject *)self; if (alias->starred && alias->origin == (PyObject *)&PyTuple_Type) { - Py_INCREF(alias->args); - return alias->args; + return Py_NewRef(alias->args); } Py_RETURN_NONE; } @@ -796,8 +796,7 @@ setup_ga(gaobject *alias, PyObject *origin, PyObject *args) { Py_INCREF(args); } - Py_INCREF(origin); - alias->origin = origin; + alias->origin = Py_NewRef(origin); alias->args = args; alias->parameters = NULL; alias->weakreflist = NULL; diff --git a/Objects/genobject.c b/Objects/genobject.c index ad4fbed6d8d579..c006f1af2177f9 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -8,7 +8,6 @@ #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_genobject.h" // struct _Py_async_gen_state #include "pycore_object.h" // _PyObject_GC_UNTRACK() -#include "pycore_opcode.h" // _PyOpcode_Deopt #include "pycore_pyerrors.h" // _PyErr_ClearExcState() #include "pycore_pystate.h" // _PyThreadState_GET() #include "structmember.h" // PyMemberDef @@ -195,8 +194,7 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult, else if (arg && !exc) { /* `gen` is an exhausted generator: only return value if called from send(). */ - *presult = Py_None; - Py_INCREF(*presult); + *presult = Py_NewRef(Py_None); return PYGEN_RETURN; } return PYGEN_ERROR; @@ -205,12 +203,10 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult, assert(gen->gi_frame_state < FRAME_EXECUTING); /* Push arg onto the frame's value stack */ result = arg ? arg : Py_None; - Py_INCREF(result); - _PyFrame_StackPush(frame, result); + _PyFrame_StackPush(frame, Py_NewRef(result)); - frame->previous = tstate->cframe->current_frame; - - gen->gi_exc_state.previous_item = tstate->exc_info; + _PyErr_StackItem *prev_exc_info = tstate->exc_info; + gen->gi_exc_state.previous_item = prev_exc_info; tstate->exc_info = &gen->gi_exc_state; if (exc) { @@ -221,17 +217,10 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult, gen->gi_frame_state = FRAME_EXECUTING; EVAL_CALL_STAT_INC(EVAL_CALL_GENERATOR); result = _PyEval_EvalFrame(tstate, frame, exc); - if (gen->gi_frame_state == FRAME_EXECUTING) { - gen->gi_frame_state = FRAME_COMPLETED; - } - tstate->exc_info = gen->gi_exc_state.previous_item; - gen->gi_exc_state.previous_item = NULL; - - assert(tstate->cframe->current_frame == frame->previous); - /* Don't keep the reference to previous any longer than necessary. It - * may keep a chain of frames alive or it could create a reference - * cycle. */ - frame->previous = NULL; + assert(tstate->exc_info == prev_exc_info); + assert(gen->gi_exc_state.previous_item == NULL); + assert(gen->gi_frame_state != FRAME_EXECUTING); + assert(frame->previous == NULL); /* If the generator just returned (as opposed to yielding), signal * that the generator is exhausted. */ @@ -247,33 +236,16 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult, } } else { - if (PyErr_ExceptionMatches(PyExc_StopIteration)) { - const char *msg = "generator raised StopIteration"; - if (PyCoro_CheckExact(gen)) { - msg = "coroutine raised StopIteration"; - } - else if (PyAsyncGen_CheckExact(gen)) { - msg = "async generator raised StopIteration"; - } - _PyErr_FormatFromCause(PyExc_RuntimeError, "%s", msg); - } - else if (PyAsyncGen_CheckExact(gen) && - PyErr_ExceptionMatches(PyExc_StopAsyncIteration)) - { - /* code in `gen` raised a StopAsyncIteration error: - raise a RuntimeError. - */ - const char *msg = "async generator raised StopAsyncIteration"; - _PyErr_FormatFromCause(PyExc_RuntimeError, "%s", msg); - } + assert(!PyErr_ExceptionMatches(PyExc_StopIteration)); + assert(!PyAsyncGen_CheckExact(gen) || + !PyErr_ExceptionMatches(PyExc_StopAsyncIteration)); } /* generator can't be rerun, so release the frame */ /* first clean reference cycle through stored exception traceback */ _PyErr_ClearExcState(&gen->gi_exc_state); - gen->gi_frame_state = FRAME_CLEARED; - _PyFrame_Clear(frame); + assert(gen->gi_frame_state == FRAME_CLEARED); *presult = result; return result ? PYGEN_RETURN : PYGEN_ERROR; } @@ -364,13 +336,12 @@ _PyGen_yf(PyGenObject *gen) return NULL; } _Py_CODEUNIT next = frame->prev_instr[1]; - if (_PyOpcode_Deopt[_Py_OPCODE(next)] != RESUME || _Py_OPARG(next) < 2) + if (_Py_OPCODE(next) != RESUME || _Py_OPARG(next) < 2) { /* Not in a yield from */ return NULL; } - yf = _PyFrame_StackPeek(frame); - Py_INCREF(yf); + yf = Py_NewRef(_PyFrame_StackPeek(frame)); } return yf; @@ -520,10 +491,8 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, } else { /* Normalize to raise , */ - Py_XDECREF(val); - val = typ; - typ = PyExceptionInstance_Class(typ); - Py_INCREF(typ); + Py_XSETREF(val, typ); + typ = Py_NewRef(PyExceptionInstance_Class(typ)); if (tb == NULL) /* Returns NULL if there's no traceback */ @@ -651,8 +620,7 @@ _PyGen_FetchStopIterationValue(PyObject **pvalue) if (ev) { /* exception will usually be normalised already */ if (PyObject_TypeCheck(ev, (PyTypeObject *) et)) { - value = ((PyStopIterationObject *)ev)->value; - Py_INCREF(value); + value = Py_NewRef(((PyStopIterationObject *)ev)->value); Py_DECREF(ev); } else if (et == PyExc_StopIteration && !PyTuple_Check(ev)) { /* Avoid normalisation and take ev as value. @@ -671,8 +639,7 @@ _PyGen_FetchStopIterationValue(PyObject **pvalue) PyErr_Restore(et, ev, tb); return -1; } - value = ((PyStopIterationObject *)ev)->value; - Py_INCREF(value); + value = Py_NewRef(((PyStopIterationObject *)ev)->value); Py_DECREF(ev); } } @@ -682,8 +649,7 @@ _PyGen_FetchStopIterationValue(PyObject **pvalue) return -1; } if (value == NULL) { - value = Py_None; - Py_INCREF(value); + value = Py_NewRef(Py_None); } *pvalue = value; return 0; @@ -699,8 +665,7 @@ gen_repr(PyGenObject *gen) static PyObject * gen_get_name(PyGenObject *op, void *Py_UNUSED(ignored)) { - Py_INCREF(op->gi_name); - return op->gi_name; + return Py_NewRef(op->gi_name); } static int @@ -713,16 +678,14 @@ gen_set_name(PyGenObject *op, PyObject *value, void *Py_UNUSED(ignored)) "__name__ must be set to a string object"); return -1; } - Py_INCREF(value); - Py_XSETREF(op->gi_name, value); + Py_XSETREF(op->gi_name, Py_NewRef(value)); return 0; } static PyObject * gen_get_qualname(PyGenObject *op, void *Py_UNUSED(ignored)) { - Py_INCREF(op->gi_qualname); - return op->gi_qualname; + return Py_NewRef(op->gi_qualname); } static int @@ -735,8 +698,7 @@ gen_set_qualname(PyGenObject *op, PyObject *value, void *Py_UNUSED(ignored)) "__qualname__ must be set to a string object"); return -1; } - Py_INCREF(value); - Py_XSETREF(op->gi_qualname, value); + Py_XSETREF(op->gi_qualname, Py_NewRef(value)); return 0; } @@ -894,8 +856,7 @@ make_gen(PyTypeObject *type, PyFunctionObject *func) return NULL; } gen->gi_frame_state = FRAME_CLEARED; - gen->gi_code = (PyCodeObject *)func->func_code; - Py_INCREF(gen->gi_code); + gen->gi_code = (PyCodeObject *)Py_NewRef(func->func_code); gen->gi_weakreflist = NULL; gen->gi_exc_state.exc_value = NULL; gen->gi_exc_state.previous_item = NULL; @@ -981,15 +942,13 @@ gen_new_with_qualname(PyTypeObject *type, PyFrameObject *f, gen->gi_exc_state.exc_value = NULL; gen->gi_exc_state.previous_item = NULL; if (name != NULL) - gen->gi_name = name; + gen->gi_name = Py_NewRef(name); else - gen->gi_name = gen->gi_code->co_name; - Py_INCREF(gen->gi_name); + gen->gi_name = Py_NewRef(gen->gi_code->co_name); if (qualname != NULL) - gen->gi_qualname = qualname; + gen->gi_qualname = Py_NewRef(qualname); else - gen->gi_qualname = gen->gi_code->co_qualname; - Py_INCREF(gen->gi_qualname); + gen->gi_qualname = Py_NewRef(gen->gi_code->co_qualname); _PyObject_GC_TRACK(gen); return (PyObject *)gen; } @@ -1041,8 +1000,7 @@ _PyCoro_GetAwaitableIter(PyObject *o) if (PyCoro_CheckExact(o) || gen_is_coroutine(o)) { /* 'o' is a coroutine. */ - Py_INCREF(o); - return o; + return Py_NewRef(o); } ot = Py_TYPE(o); @@ -1089,8 +1047,7 @@ coro_await(PyCoroObject *coro) if (cw == NULL) { return NULL; } - Py_INCREF(coro); - cw->cw_coroutine = coro; + cw->cw_coroutine = (PyCoroObject*)Py_NewRef(coro); _PyObject_GC_TRACK(cw); return (PyObject *)cw; } @@ -1460,8 +1417,7 @@ async_gen_init_hooks(PyAsyncGenObject *o) finalizer = tstate->async_gen_finalizer; if (finalizer) { - Py_INCREF(finalizer); - o->ag_origin_or_finalizer = finalizer; + o->ag_origin_or_finalizer = Py_NewRef(finalizer); } firstiter = tstate->async_gen_firstiter; @@ -1923,11 +1879,9 @@ async_gen_asend_new(PyAsyncGenObject *gen, PyObject *sendval) } } - Py_INCREF(gen); - o->ags_gen = gen; + o->ags_gen = (PyAsyncGenObject*)Py_NewRef(gen); - Py_XINCREF(sendval); - o->ags_sendval = sendval; + o->ags_sendval = Py_XNewRef(sendval); o->ags_state = AWAITABLE_STATE_INIT; @@ -2043,8 +1997,7 @@ _PyAsyncGenValueWrapperNew(PyObject *val) return NULL; } } - o->agw_val = val; - Py_INCREF(val); + o->agw_val = Py_NewRef(val); _PyObject_GC_TRACK((PyObject*)o); return (PyObject*)o; } @@ -2327,11 +2280,9 @@ async_gen_athrow_new(PyAsyncGenObject *gen, PyObject *args) if (o == NULL) { return NULL; } - o->agt_gen = gen; - o->agt_args = args; + o->agt_gen = (PyAsyncGenObject*)Py_NewRef(gen); + o->agt_args = Py_XNewRef(args); o->agt_state = AWAITABLE_STATE_INIT; - Py_INCREF(gen); - Py_XINCREF(args); _PyObject_GC_TRACK((PyObject*)o); return (PyObject*)o; } diff --git a/Objects/iterobject.c b/Objects/iterobject.c index 62c36146d64f69..cfd6d0a7c959c9 100644 --- a/Objects/iterobject.c +++ b/Objects/iterobject.c @@ -23,8 +23,7 @@ PySeqIter_New(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = seq; + it->it_seq = Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } @@ -183,10 +182,8 @@ PyCallIter_New(PyObject *callable, PyObject *sentinel) it = PyObject_GC_New(calliterobject, &PyCallIter_Type); if (it == NULL) return NULL; - Py_INCREF(callable); - it->it_callable = callable; - Py_INCREF(sentinel); - it->it_sentinel = sentinel; + it->it_callable = Py_NewRef(callable); + it->it_sentinel = Py_NewRef(sentinel); _PyObject_GC_TRACK(it); return (PyObject *)it; } @@ -496,10 +493,8 @@ PyAnextAwaitable_New(PyObject *awaitable, PyObject *default_value) if (anext == NULL) { return NULL; } - Py_INCREF(awaitable); - anext->wrapped = awaitable; - Py_INCREF(default_value); - anext->default_value = default_value; + anext->wrapped = Py_NewRef(awaitable); + anext->default_value = Py_NewRef(default_value); _PyObject_GC_TRACK(anext); return (PyObject *)anext; } diff --git a/Objects/listobject.c b/Objects/listobject.c index 2cb3de9f147cc7..da623c9719aeb8 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -299,8 +299,7 @@ ins1(PyListObject *self, Py_ssize_t where, PyObject *v) items = self->ob_item; for (i = n; --i >= where; ) items[i+1] = items[i]; - Py_INCREF(v); - items[where] = v; + items[where] = Py_NewRef(v); return 0; } @@ -332,8 +331,7 @@ int PyList_Append(PyObject *op, PyObject *newitem) { if (PyList_Check(op) && (newitem != NULL)) { - Py_INCREF(newitem); - return _PyList_AppendTakeRef((PyListObject *)op, newitem); + return _PyList_AppendTakeRef((PyListObject *)op, Py_NewRef(newitem)); } PyErr_BadInternalCall(); return -1; @@ -461,8 +459,7 @@ list_item(PyListObject *a, Py_ssize_t i) PyErr_SetObject(PyExc_IndexError, &_Py_STR(list_err)); return NULL; } - Py_INCREF(a->ob_item[i]); - return a->ob_item[i]; + return Py_NewRef(a->ob_item[i]); } static PyObject * @@ -483,8 +480,7 @@ list_slice(PyListObject *a, Py_ssize_t ilow, Py_ssize_t ihigh) dest = np->ob_item; for (i = 0; i < len; i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } Py_SET_SIZE(np, len); return (PyObject *)np; @@ -539,15 +535,13 @@ list_concat(PyListObject *a, PyObject *bb) dest = np->ob_item; for (i = 0; i < Py_SIZE(a); i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } src = b->ob_item; dest = np->ob_item + Py_SIZE(a); for (i = 0; i < Py_SIZE(b); i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } Py_SET_SIZE(np, size); return (PyObject *)np; @@ -716,8 +710,7 @@ list_ass_slice(PyListObject *a, Py_ssize_t ilow, Py_ssize_t ihigh, PyObject *v) } for (k = 0; k < n; k++, ilow++) { PyObject *w = vitem[k]; - Py_XINCREF(w); - item[ilow] = w; + item[ilow] = Py_XNewRef(w); } for (k = norig - 1; k >= 0; --k) Py_XDECREF(recycle[k]); @@ -745,14 +738,12 @@ list_inplace_repeat(PyListObject *self, Py_ssize_t n) { Py_ssize_t input_size = PyList_GET_SIZE(self); if (input_size == 0 || n == 1) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } if (n < 1) { (void)_list_clear(self); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } if (input_size > PY_SSIZE_T_MAX / n) { @@ -770,8 +761,7 @@ list_inplace_repeat(PyListObject *self, Py_ssize_t n) _Py_memory_repeat((char *)items, sizeof(PyObject *)*output_size, sizeof(PyObject *)*input_size); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } static int @@ -784,8 +774,7 @@ list_ass_item(PyListObject *a, Py_ssize_t i, PyObject *v) } if (v == NULL) return list_ass_slice(a, i, i+1, v); - Py_INCREF(v); - Py_SETREF(a->ob_item[i], v); + Py_SETREF(a->ob_item[i], Py_NewRef(v)); return 0; } @@ -913,8 +902,7 @@ list_extend(PyListObject *self, PyObject *iterable) dest = self->ob_item + m; for (i = 0; i < n; i++) { PyObject *o = src[i]; - Py_INCREF(o); - dest[i] = o; + dest[i] = Py_NewRef(o); } Py_DECREF(iterable); Py_RETURN_NONE; @@ -1002,8 +990,7 @@ list_inplace_concat(PyListObject *self, PyObject *other) if (result == NULL) return result; Py_DECREF(result); - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } /*[clinic input] @@ -2512,8 +2499,7 @@ list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse) } PyMem_Free(final_ob_item); } - Py_XINCREF(result); - return result; + return Py_XNewRef(result); } #undef IFLT #undef ISLT @@ -2901,8 +2887,7 @@ list_subscript(PyListObject* self, PyObject* item) dest = ((PyListObject *)result)->ob_item; for (cur = start, i = 0; i < slicelength; cur += (size_t)step, i++) { - it = src[cur]; - Py_INCREF(it); + it = Py_NewRef(src[cur]); dest[i] = it; } Py_SET_SIZE(result, slicelength); @@ -3057,8 +3042,7 @@ list_ass_subscript(PyListObject* self, PyObject* item, PyObject* value) for (cur = start, i = 0; i < slicelength; cur += (size_t)step, i++) { garbage[i] = selfitems[cur]; - ins = seqitems[i]; - Py_INCREF(ins); + ins = Py_NewRef(seqitems[i]); selfitems[cur] = ins; } @@ -3199,8 +3183,7 @@ list_iter(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (PyListObject *)seq; + it->it_seq = (PyListObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } @@ -3235,8 +3218,7 @@ listiter_next(_PyListIterObject *it) if (it->it_index < PyList_GET_SIZE(seq)) { item = PyList_GET_ITEM(seq, it->it_index); ++it->it_index; - Py_INCREF(item); - return item; + return Py_NewRef(item); } it->it_seq = NULL; @@ -3350,8 +3332,7 @@ list___reversed___impl(PyListObject *self) return NULL; assert(PyList_Check(self)); it->it_index = PyList_GET_SIZE(self) - 1; - Py_INCREF(self); - it->it_seq = self; + it->it_seq = (PyListObject*)Py_NewRef(self); PyObject_GC_Track(it); return (PyObject *)it; } @@ -3389,8 +3370,7 @@ listreviter_next(listreviterobject *it) if (index>=0 && index < PyList_GET_SIZE(seq)) { item = PyList_GET_ITEM(seq, index); it->it_index--; - Py_INCREF(item); - return item; + return Py_NewRef(item); } it->it_index = -1; it->it_seq = NULL; diff --git a/Objects/longobject.c b/Objects/longobject.c index 8b13df4181ecff..c84b4d3f316d5d 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -39,6 +39,9 @@ medium_value(PyLongObject *x) #define _MAX_STR_DIGITS_ERROR_FMT_TO_INT "Exceeds the limit (%d) for integer string conversion: value has %zd digits; use sys.set_int_max_str_digits() to increase the limit" #define _MAX_STR_DIGITS_ERROR_FMT_TO_STR "Exceeds the limit (%d) for integer string conversion; use sys.set_int_max_str_digits() to increase the limit" +/* If defined, use algorithms from the _pylong.py module */ +#define WITH_PYLONG_MODULE 1 + static inline void _Py_DECREF_INT(PyLongObject *op) { @@ -59,8 +62,7 @@ get_small_int(sdigit ival) { assert(IS_SMALL_INT(ival)); PyObject *v = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + ival]; - Py_INCREF(v); - return v; + return Py_NewRef(v); } static PyLongObject * @@ -1732,6 +1734,72 @@ rem1(PyLongObject *a, digit n) ); } +#ifdef WITH_PYLONG_MODULE +/* asymptotically faster long_to_decimal_string, using _pylong.py */ +static int +pylong_int_to_decimal_string(PyObject *aa, + PyObject **p_output, + _PyUnicodeWriter *writer, + _PyBytesWriter *bytes_writer, + char **bytes_str) +{ + PyObject *s = NULL; + PyObject *mod = PyImport_ImportModule("_pylong"); + if (mod == NULL) { + return -1; + } + s = PyObject_CallMethod(mod, "int_to_decimal_string", "O", aa); + if (s == NULL) { + goto error; + } + if (!PyUnicode_Check(s)) { + PyErr_SetString(PyExc_TypeError, + "_pylong.int_to_decimal_string did not return a str"); + goto error; + } + if (writer) { + Py_ssize_t size = PyUnicode_GET_LENGTH(s); + if (_PyUnicodeWriter_Prepare(writer, size, '9') == -1) { + goto error; + } + if (_PyUnicodeWriter_WriteStr(writer, s) < 0) { + goto error; + } + goto success; + } + else if (bytes_writer) { + Py_ssize_t size = PyUnicode_GET_LENGTH(s); + const void *data = PyUnicode_DATA(s); + int kind = PyUnicode_KIND(s); + *bytes_str = _PyBytesWriter_Prepare(bytes_writer, *bytes_str, size); + if (*bytes_str == NULL) { + goto error; + } + char *p = *bytes_str; + for (Py_ssize_t i=0; i < size; i++) { + Py_UCS4 ch = PyUnicode_READ(kind, data, i); + *p++ = (char) ch; + } + (*bytes_str) = p; + goto success; + } + else { + *p_output = Py_NewRef(s); + goto success; + } + +error: + Py_DECREF(mod); + Py_XDECREF(s); + return -1; + +success: + Py_DECREF(mod); + Py_DECREF(s); + return 0; +} +#endif /* WITH_PYLONG_MODULE */ + /* Convert an integer to a base 10 string. Returns a new non-shared string. (Return value is non-shared so that callers can modify the returned value if necessary.) */ @@ -1767,7 +1835,7 @@ long_to_decimal_string_internal(PyObject *aa, if (size_a >= 10 * _PY_LONG_MAX_STR_DIGITS_THRESHOLD / (3 * PyLong_SHIFT) + 2) { PyInterpreterState *interp = _PyInterpreterState_GET(); - int max_str_digits = interp->config.int_max_str_digits; + int max_str_digits = interp->long_state.max_str_digits; if ((max_str_digits > 0) && (max_str_digits / (3 * PyLong_SHIFT) <= (size_a - 11) / 10)) { PyErr_Format(PyExc_ValueError, _MAX_STR_DIGITS_ERROR_FMT_TO_STR, @@ -1776,6 +1844,17 @@ long_to_decimal_string_internal(PyObject *aa, } } +#if WITH_PYLONG_MODULE + if (size_a > 1000) { + /* Switch to _pylong.int_to_decimal_string(). */ + return pylong_int_to_decimal_string(aa, + p_output, + writer, + bytes_writer, + bytes_str); + } +#endif + /* quick and dirty upper bound for the number of digits required to express a in base _PyLong_DECIMAL_BASE: @@ -1837,7 +1916,7 @@ long_to_decimal_string_internal(PyObject *aa, } if (strlen > _PY_LONG_MAX_STR_DIGITS_THRESHOLD) { PyInterpreterState *interp = _PyInterpreterState_GET(); - int max_str_digits = interp->config.int_max_str_digits; + int max_str_digits = interp->long_state.max_str_digits; Py_ssize_t strlen_nosign = strlen - negative; if ((max_str_digits > 0) && (strlen_nosign > max_str_digits)) { Py_DECREF(scratch); @@ -2272,6 +2351,42 @@ long_from_binary_base(const char *start, const char *end, Py_ssize_t digits, int return 0; } +static PyObject *long_neg(PyLongObject *v); + +#ifdef WITH_PYLONG_MODULE +/* asymptotically faster str-to-long conversion for base 10, using _pylong.py */ +static int +pylong_int_from_string(const char *start, const char *end, PyLongObject **res) +{ + PyObject *mod = PyImport_ImportModule("_pylong"); + if (mod == NULL) { + goto error; + } + PyObject *s = PyUnicode_FromStringAndSize(start, end-start); + if (s == NULL) { + Py_DECREF(mod); + goto error; + } + PyObject *result = PyObject_CallMethod(mod, "int_from_string", "O", s); + Py_DECREF(s); + Py_DECREF(mod); + if (result == NULL) { + goto error; + } + if (!PyLong_Check(result)) { + Py_DECREF(result); + PyErr_SetString(PyExc_TypeError, + "_pylong.int_from_string did not return an int"); + goto error; + } + *res = (PyLongObject *)result; + return 0; +error: + *res = NULL; + return 0; // See the long_from_string_base() API comment. +} +#endif /* WITH_PYLONG_MODULE */ + /*** long_from_non_binary_base: parameters and return values are the same as long_from_binary_base. @@ -2483,8 +2598,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, memcpy(tmp->ob_digit, z->ob_digit, sizeof(digit) * size_z); - Py_DECREF(z); - z = tmp; + Py_SETREF(z, tmp); z->ob_digit[size_z] = (digit)c; ++size_z; } @@ -2507,7 +2621,8 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, * Return values: * * - Returns -1 on syntax error (exception needs to be set, *res is untouched) - * - Returns 0 and sets *res to NULL for MemoryError/OverflowError. + * - Returns 0 and sets *res to NULL for MemoryError, OverflowError, or + * _pylong.int_from_string() errors. * - Returns 0 and sets *res to an unsigned, unnormalized PyLong (success!). * * Afterwards *str is set to point to the first non-digit (which may be *str!). @@ -2578,7 +2693,7 @@ long_from_string_base(const char **str, int base, PyLongObject **res) * quadratic algorithm. */ if (digits > _PY_LONG_MAX_STR_DIGITS_THRESHOLD) { PyInterpreterState *interp = _PyInterpreterState_GET(); - int max_str_digits = interp->config.int_max_str_digits; + int max_str_digits = interp->long_state.max_str_digits; if ((max_str_digits > 0) && (digits > max_str_digits)) { PyErr_Format(PyExc_ValueError, _MAX_STR_DIGITS_ERROR_FMT_TO_INT, max_str_digits, digits); @@ -2586,6 +2701,12 @@ long_from_string_base(const char **str, int base, PyLongObject **res) return 0; } } +#if WITH_PYLONG_MODULE + if (digits > 6000 && base == 10) { + /* Switch to _pylong.int_from_string() */ + return pylong_int_from_string(start, end, res); + } +#endif /* Use the quadratic algorithm for non binary bases. */ return long_from_non_binary_base(start, end, digits, base, res); } @@ -2787,8 +2908,7 @@ long_divrem(PyLongObject *a, PyLongObject *b, return -1; } PyObject *zero = _PyLong_GetZero(); - Py_INCREF(zero); - *pdiv = (PyLongObject*)zero; + *pdiv = (PyLongObject*)Py_NewRef(zero); return 0; } if (size_b == 1) { @@ -3623,10 +3743,8 @@ k_mul(PyLongObject *a, PyLongObject *b) assert(Py_SIZE(ah) > 0); /* the split isn't degenerate */ if (a == b) { - bh = ah; - bl = al; - Py_INCREF(bh); - Py_INCREF(bl); + bh = (PyLongObject*)Py_NewRef(ah); + bl = (PyLongObject*)Py_NewRef(al); } else if (kmul_split(b, shift, &bh, &bl) < 0) goto fail; @@ -3698,8 +3816,7 @@ k_mul(PyLongObject *a, PyLongObject *b) ah = al = NULL; if (a == b) { - t2 = t1; - Py_INCREF(t2); + t2 = (PyLongObject*)Py_NewRef(t1); } else if ((t2 = x_add(bh, bl)) == NULL) { Py_DECREF(t1); @@ -3913,6 +4030,46 @@ fast_floor_div(PyLongObject *a, PyLongObject *b) return PyLong_FromLong(div); } +#ifdef WITH_PYLONG_MODULE +/* asymptotically faster divmod, using _pylong.py */ +static int +pylong_int_divmod(PyLongObject *v, PyLongObject *w, + PyLongObject **pdiv, PyLongObject **pmod) +{ + PyObject *mod = PyImport_ImportModule("_pylong"); + if (mod == NULL) { + return -1; + } + PyObject *result = PyObject_CallMethod(mod, "int_divmod", "OO", v, w); + Py_DECREF(mod); + if (result == NULL) { + return -1; + } + if (!PyTuple_Check(result)) { + Py_DECREF(result); + PyErr_SetString(PyExc_ValueError, + "tuple is required from int_divmod()"); + return -1; + } + PyObject *q = PyTuple_GET_ITEM(result, 0); + PyObject *r = PyTuple_GET_ITEM(result, 1); + if (!PyLong_Check(q) || !PyLong_Check(r)) { + Py_DECREF(result); + PyErr_SetString(PyExc_ValueError, + "tuple of int is required from int_divmod()"); + return -1; + } + if (pdiv != NULL) { + *pdiv = (PyLongObject *)Py_NewRef(q); + } + if (pmod != NULL) { + *pmod = (PyLongObject *)Py_NewRef(r); + } + Py_DECREF(result); + return 0; +} +#endif /* WITH_PYLONG_MODULE */ + /* The / and % operators are now defined in terms of divmod(). The expression a mod b has the value a - b*floor(a/b). The long_divrem function gives the remainder after division of @@ -3964,14 +4121,25 @@ l_divmod(PyLongObject *v, PyLongObject *w, } return 0; } +#if WITH_PYLONG_MODULE + Py_ssize_t size_v = Py_ABS(Py_SIZE(v)); /* digits in numerator */ + Py_ssize_t size_w = Py_ABS(Py_SIZE(w)); /* digits in denominator */ + if (size_w > 300 && (size_v - size_w) > 150) { + /* Switch to _pylong.int_divmod(). If the quotient is small then + "schoolbook" division is linear-time so don't use in that case. + These limits are empirically determined and should be slightly + conservative so that _pylong is used in cases it is likely + to be faster. See Tools/scripts/divmod_threshold.py. */ + return pylong_int_divmod(v, w, pdiv, pmod); + } +#endif if (long_divrem(v, w, &div, &mod) < 0) return -1; if ((Py_SIZE(mod) < 0 && Py_SIZE(w) > 0) || (Py_SIZE(mod) > 0 && Py_SIZE(w) < 0)) { PyLongObject *temp; temp = (PyLongObject *) long_add(mod, w); - Py_DECREF(mod); - mod = temp; + Py_SETREF(mod, temp); if (mod == NULL) { Py_DECREF(div); return -1; @@ -3982,8 +4150,7 @@ l_divmod(PyLongObject *v, PyLongObject *w, Py_DECREF(div); return -1; } - Py_DECREF(div); - div = temp; + Py_SETREF(div, temp); } if (pdiv != NULL) *pdiv = div; @@ -4019,8 +4186,7 @@ l_mod(PyLongObject *v, PyLongObject *w, PyLongObject **pmod) (Py_SIZE(mod) > 0 && Py_SIZE(w) < 0)) { PyLongObject *temp; temp = (PyLongObject *) long_add(mod, w); - Py_DECREF(mod); - mod = temp; + Py_SETREF(mod, temp); if (mod == NULL) return -1; } @@ -4260,8 +4426,7 @@ long_true_divide(PyObject *v, PyObject *w) else { PyLongObject *div, *rem; div = x_divrem(x, b, &rem); - Py_DECREF(x); - x = div; + Py_SETREF(x, div); if (x == NULL) goto error; if (Py_SIZE(rem)) @@ -4391,8 +4556,7 @@ long_invmod(PyLongObject *a, PyLongObject *n) if (l_divmod(a, n, &q, &r) == -1) { goto Error; } - Py_DECREF(a); - a = n; + Py_SETREF(a, n); n = r; t = (PyLongObject *)long_mul(q, c); Py_DECREF(q); @@ -4404,8 +4568,7 @@ long_invmod(PyLongObject *a, PyLongObject *n) if (s == NULL) { goto Error; } - Py_DECREF(b); - b = c; + Py_SETREF(b, c); c = s; } /* references now owned: a, b, c, n */ @@ -4450,7 +4613,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) /* k-ary values. If the exponent is large enough, table is * precomputed so that table[i] == a**(2*i+1) % c for i in * range(EXP_TABLE_LEN). - * Note: this is uninitialzed stack trash: don't pay to set it to known + * Note: this is uninitialized stack trash: don't pay to set it to known * values unless it's needed. Instead ensure that num_table_entries is * set to the number of entries actually filled whenever a branch to the * Error or Done labels is possible. @@ -4460,11 +4623,10 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) /* a, b, c = v, w, x */ CHECK_BINOP(v, w); - a = (PyLongObject*)v; Py_INCREF(a); - b = (PyLongObject*)w; Py_INCREF(b); + a = (PyLongObject*)Py_NewRef(v); + b = (PyLongObject*)Py_NewRef(w); if (PyLong_Check(x)) { - c = (PyLongObject *)x; - Py_INCREF(x); + c = (PyLongObject *)Py_NewRef(x); } else if (x == Py_None) c = NULL; @@ -4501,8 +4663,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) temp = (PyLongObject *)_PyLong_Copy(c); if (temp == NULL) goto Error; - Py_DECREF(c); - c = temp; + Py_SETREF(c, temp); temp = NULL; _PyLong_Negate(&c); if (c == NULL) @@ -4522,8 +4683,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) temp = (PyLongObject *)_PyLong_Copy(b); if (temp == NULL) goto Error; - Py_DECREF(b); - b = temp; + Py_SETREF(b, temp); temp = NULL; _PyLong_Negate(&b); if (b == NULL) @@ -4532,8 +4692,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) temp = long_invmod(a, c); if (temp == NULL) goto Error; - Py_DECREF(a); - a = temp; + Py_SETREF(a, temp); temp = NULL; } @@ -4549,8 +4708,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) if (Py_SIZE(a) < 0 || Py_SIZE(a) > Py_SIZE(c)) { if (l_mod(a, c, &temp) < 0) goto Error; - Py_DECREF(a); - a = temp; + Py_SETREF(a, temp); temp = NULL; } } @@ -4617,9 +4775,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) * because we're primarily trying to cut overhead for small powers. */ assert(bi); /* else there is no significant bit */ - Py_INCREF(a); - Py_DECREF(z); - z = a; + Py_SETREF(z, (PyLongObject*)Py_NewRef(a)); for (bit = 2; ; bit <<= 1) { if (bit > bi) { /* found the first bit */ assert((bi & bit) == 0); @@ -4646,8 +4802,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) /* Left-to-right k-ary sliding window exponentiation * (Handbook of Applied Cryptography (HAC) Algorithm 14.85) */ - Py_INCREF(a); - table[0] = a; + table[0] = (PyLongObject*)Py_NewRef(a); num_table_entries = 1; MULT(a, a, a2); /* table[i] == a**(2*i + 1) % c */ @@ -4706,8 +4861,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) temp = (PyLongObject *)long_sub(z, c); if (temp == NULL) goto Error; - Py_DECREF(z); - z = temp; + Py_SETREF(z, temp); temp = NULL; } goto Done; @@ -5184,11 +5338,12 @@ long_or(PyObject *a, PyObject *b) static PyObject * long_long(PyObject *v) { - if (PyLong_CheckExact(v)) - Py_INCREF(v); - else - v = _PyLong_Copy((PyLongObject *)v); - return v; + if (PyLong_CheckExact(v)) { + return Py_NewRef(v); + } + else { + return _PyLong_Copy((PyLongObject *)v); + } } PyObject * @@ -5275,8 +5430,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) /* no progress; do a Euclidean step */ if (l_mod(a, b, &r) < 0) goto error; - Py_DECREF(a); - a = b; + Py_SETREF(a, b); b = r; alloc_a = alloc_b; alloc_b = Py_SIZE(b); @@ -5295,8 +5449,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) Py_SET_SIZE(c, size_a); } else if (Py_REFCNT(a) == 1) { - Py_INCREF(a); - c = a; + c = (PyLongObject*)Py_NewRef(a); } else { alloc_a = size_a; @@ -5309,8 +5462,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) Py_SET_SIZE(d, size_a); } else if (Py_REFCNT(b) == 1 && size_a <= alloc_b) { - Py_INCREF(b); - d = b; + d = (PyLongObject*)Py_NewRef(b); Py_SET_SIZE(d, size_a); } else { @@ -5599,8 +5751,7 @@ _PyLong_DivmodNear(PyObject *a, PyObject *b) goto error; if (quo_is_neg) { temp = long_neg((PyLongObject*)twice_rem); - Py_DECREF(twice_rem); - twice_rem = temp; + Py_SETREF(twice_rem, temp); if (twice_rem == NULL) goto error; } @@ -5614,8 +5765,7 @@ _PyLong_DivmodNear(PyObject *a, PyObject *b) temp = long_sub(quo, (PyLongObject *)one); else temp = long_add(quo, (PyLongObject *)one); - Py_DECREF(quo); - quo = (PyLongObject *)temp; + Py_SETREF(quo, (PyLongObject *)temp); if (quo == NULL) goto error; /* and remainder */ @@ -5623,8 +5773,7 @@ _PyLong_DivmodNear(PyObject *a, PyObject *b) temp = long_add(rem, (PyLongObject *)b); else temp = long_sub(rem, (PyLongObject *)b); - Py_DECREF(rem); - rem = (PyLongObject *)temp; + Py_SETREF(rem, (PyLongObject *)temp); if (rem == NULL) goto error; } @@ -5690,8 +5839,7 @@ int___round___impl(PyObject *self, PyObject *o_ndigits) /* result = self - divmod_near(self, 10 ** -ndigits)[1] */ temp = long_neg((PyLongObject*)ndigits); - Py_DECREF(ndigits); - ndigits = temp; + Py_SETREF(ndigits, temp); if (ndigits == NULL) return NULL; @@ -5703,21 +5851,18 @@ int___round___impl(PyObject *self, PyObject *o_ndigits) temp = long_pow(result, ndigits, Py_None); Py_DECREF(ndigits); - Py_DECREF(result); - result = temp; + Py_SETREF(result, temp); if (result == NULL) return NULL; temp = _PyLong_DivmodNear(self, result); - Py_DECREF(result); - result = temp; + Py_SETREF(result, temp); if (result == NULL) return NULL; temp = long_sub((PyLongObject *)self, (PyLongObject *)PyTuple_GET_ITEM(result, 1)); - Py_DECREF(result); - result = temp; + Py_SETREF(result, temp); return result; } @@ -5782,8 +5927,7 @@ int_bit_length_impl(PyObject *self) Py_DECREF(x); if (y == NULL) goto error; - Py_DECREF(result); - result = y; + Py_SETREF(result, y); x = (PyLongObject *)PyLong_FromLong((long)msd_bits); if (x == NULL) @@ -5792,8 +5936,7 @@ int_bit_length_impl(PyObject *self) Py_DECREF(x); if (y == NULL) goto error; - Py_DECREF(result); - result = y; + Py_SETREF(result, y); return (PyObject *)result; @@ -5859,8 +6002,7 @@ int_bit_count_impl(PyObject *self) if (y == NULL) { goto error; } - Py_DECREF(result); - result = y; + Py_SETREF(result, y); } return result; diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index c5ab0bf2dedbe4..1d6cc3b508448d 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -682,8 +682,7 @@ mbuf_add_view(_PyManagedBufferObject *mbuf, const Py_buffer *src) init_suboffsets(dest, src); init_flags(mv); - mv->mbuf = mbuf; - Py_INCREF(mbuf); + mv->mbuf = (_PyManagedBufferObject*)Py_NewRef(mbuf); mbuf->exports++; return (PyObject *)mv; @@ -713,8 +712,7 @@ mbuf_add_incomplete_view(_PyManagedBufferObject *mbuf, const Py_buffer *src, dest = &mv->view; init_shared_values(dest, src); - mv->mbuf = mbuf; - Py_INCREF(mbuf); + mv->mbuf = (_PyManagedBufferObject*)Py_NewRef(mbuf); mbuf->exports++; return (PyObject *)mv; @@ -1102,8 +1100,7 @@ static PyObject * memory_enter(PyObject *self, PyObject *args) { CHECK_RELEASED(self); - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyObject * @@ -1515,8 +1512,7 @@ memory_getbuf(PyMemoryViewObject *self, Py_buffer *view, int flags) } - view->obj = (PyObject *)self; - Py_INCREF(view->obj); + view->obj = Py_NewRef(self); self->exports++; return 0; @@ -2047,10 +2043,9 @@ struct_unpack_single(const char *ptr, struct unpacker *x) return NULL; if (PyTuple_GET_SIZE(v) == 1) { - PyObject *tmp = PyTuple_GET_ITEM(v, 0); - Py_INCREF(tmp); + PyObject *res = Py_NewRef(PyTuple_GET_ITEM(v, 0)); Py_DECREF(v); - return tmp; + return res; } return v; @@ -2496,8 +2491,7 @@ memory_subscript(PyMemoryViewObject *self, PyObject *key) return unpack_single(self, view->buf, fmt); } else if (key == Py_Ellipsis) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { PyErr_SetString(PyExc_TypeError, @@ -2957,8 +2951,7 @@ memory_richcompare(PyObject *v, PyObject *w, int op) unpacker_free(unpack_v); unpacker_free(unpack_w); - Py_XINCREF(res); - return res; + return Py_XNewRef(res); } /**************************************************************************/ @@ -3052,8 +3045,7 @@ memory_obj_get(PyMemoryViewObject *self, void *Py_UNUSED(ignored)) if (view->obj == NULL) { Py_RETURN_NONE; } - Py_INCREF(view->obj); - return view->obj; + return Py_NewRef(view->obj); } static PyObject * @@ -3281,8 +3273,7 @@ memory_iter(PyObject *seq) it->it_fmt = fmt; it->it_length = memory_length(obj); it->it_index = 0; - Py_INCREF(seq); - it->it_seq = obj; + it->it_seq = (PyMemoryViewObject*)Py_NewRef(obj); _PyObject_GC_TRACK(it); return (PyObject *)it; } diff --git a/Objects/methodobject.c b/Objects/methodobject.c index 953cf4666d33b7..51752dec3dd08c 100644 --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -88,8 +88,7 @@ PyCMethod_New(PyMethodDef *ml, PyObject *self, PyObject *module, PyTypeObject *c if (om == NULL) { return NULL; } - Py_INCREF(cls); - om->mm_class = cls; + om->mm_class = (PyTypeObject*)Py_NewRef(cls); op = (PyCFunctionObject *)om; } else { if (cls) { @@ -106,10 +105,8 @@ PyCMethod_New(PyMethodDef *ml, PyObject *self, PyObject *module, PyTypeObject *c op->m_weakreflist = NULL; op->m_ml = ml; - Py_XINCREF(self); - op->m_self = self; - Py_XINCREF(module); - op->m_module = module; + op->m_self = Py_XNewRef(self); + op->m_module = Py_XNewRef(module); op->vectorcall = vectorcall; _PyObject_GC_TRACK(op); return (PyObject *)op; @@ -260,8 +257,7 @@ meth_get__self__(PyCFunctionObject *m, void *closure) self = PyCFunction_GET_SELF(m); if (self == NULL) self = Py_None; - Py_INCREF(self); - return self; + return Py_NewRef(self); } static PyGetSetDef meth_getsets [] = { @@ -314,8 +310,7 @@ meth_richcompare(PyObject *self, PyObject *other, int op) res = eq ? Py_True : Py_False; else res = eq ? Py_False : Py_True; - Py_INCREF(res); - return res; + return Py_NewRef(res); } static Py_hash_t diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index dba20a0b36463d..8e03f2446f6fcd 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -9,7 +9,6 @@ #include "pycore_moduleobject.h" // _PyModule_GetDef() #include "structmember.h" // PyMemberDef -static Py_ssize_t max_module_number; static PyMemberDef module_members[] = { {"__dict__", T_OBJECT, offsetof(PyModuleObject, md_dict), READONLY}, @@ -43,10 +42,10 @@ PyModuleDef_Init(PyModuleDef* def) { assert(PyModuleDef_Type.tp_flags & Py_TPFLAGS_READY); if (def->m_base.m_index == 0) { - max_module_number++; + _PyRuntime.imports.last_module_index++; Py_SET_REFCNT(def, 1); Py_SET_TYPE(def, &PyModuleDef_Type); - def->m_base.m_index = max_module_number; + def->m_base.m_index = _PyRuntime.imports.last_module_index; } return (PyObject*)def; } @@ -70,8 +69,7 @@ module_init_dict(PyModuleObject *mod, PyObject *md_dict, if (PyDict_SetItem(md_dict, &_Py_ID(__spec__), Py_None) != 0) return -1; if (PyUnicode_CheckExact(name)) { - Py_INCREF(name); - Py_XSETREF(mod->md_name, name); + Py_XSETREF(mod->md_name, Py_NewRef(name)); } return 0; @@ -220,6 +218,7 @@ _PyModule_CreateInitialized(PyModuleDef* module, int module_api_version) _Py_PackageContext, and PyModule_Create*() will substitute this (if the name actually matches). */ +#define _Py_PackageContext (_PyRuntime.imports.pkgcontext) if (_Py_PackageContext != NULL) { const char *p = strrchr(_Py_PackageContext, '.'); if (p != NULL && strcmp(module->m_name, p+1) == 0) { @@ -227,6 +226,7 @@ _PyModule_CreateInitialized(PyModuleDef* module, int module_api_version) _Py_PackageContext = NULL; } } +#undef _Py_PackageContext if ((m = (PyModuleObject*)PyModule_New(name)) == NULL) return NULL; @@ -291,23 +291,27 @@ PyModule_FromDefAndSpec2(PyModuleDef* def, PyObject *spec, int module_api_versio } for (cur_slot = def->m_slots; cur_slot && cur_slot->slot; cur_slot++) { - if (cur_slot->slot == Py_mod_create) { - if (create) { + switch (cur_slot->slot) { + case Py_mod_create: + if (create) { + PyErr_Format( + PyExc_SystemError, + "module %s has multiple create slots", + name); + goto error; + } + create = cur_slot->value; + break; + case Py_mod_exec: + has_execution_slots = 1; + break; + default: + assert(cur_slot->slot < 0 || cur_slot->slot > _Py_mod_LAST_SLOT); PyErr_Format( PyExc_SystemError, - "module %s has multiple create slots", - name); + "module %s uses unknown slot ID %i", + name, cur_slot->slot); goto error; - } - create = cur_slot->value; - } else if (cur_slot->slot < 0 || cur_slot->slot > _Py_mod_LAST_SLOT) { - PyErr_Format( - PyExc_SystemError, - "module %s uses unknown slot ID %i", - name, cur_slot->slot); - goto error; - } else { - has_execution_slots = 1; } } @@ -502,8 +506,7 @@ PyModule_GetNameObject(PyObject *m) } return NULL; } - Py_INCREF(name); - return name; + return Py_NewRef(name); } const char * @@ -537,8 +540,7 @@ PyModule_GetFilenameObject(PyObject *m) } return NULL; } - Py_INCREF(fileobj); - return fileobj; + return Py_NewRef(fileobj); } const char * diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c index 7875e7cafecb65..2cc4ddd3c91daa 100644 --- a/Objects/namespaceobject.c +++ b/Objects/namespaceobject.c @@ -85,9 +85,8 @@ namespace_repr(PyObject *ns) if (pairs == NULL) goto error; - d = ((_PyNamespaceObject *)ns)->ns_dict; - assert(d != NULL); - Py_INCREF(d); + assert(((_PyNamespaceObject *)ns)->ns_dict != NULL); + d = Py_NewRef(((_PyNamespaceObject *)ns)->ns_dict); keys = PyDict_Keys(d); if (keys == NULL) diff --git a/Objects/object.c b/Objects/object.c index 26cdcef59dddbd..a499cb32b22f58 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -282,31 +282,22 @@ PyObject_Print(PyObject *op, FILE *fp, int flags) s = PyObject_Str(op); else s = PyObject_Repr(op); - if (s == NULL) + if (s == NULL) { ret = -1; - else if (PyBytes_Check(s)) { - fwrite(PyBytes_AS_STRING(s), 1, - PyBytes_GET_SIZE(s), fp); } - else if (PyUnicode_Check(s)) { - PyObject *t; - t = PyUnicode_AsEncodedString(s, "utf-8", "backslashreplace"); + else { + assert(PyUnicode_Check(s)); + const char *t; + Py_ssize_t len; + t = PyUnicode_AsUTF8AndSize(s, &len); if (t == NULL) { ret = -1; } else { - fwrite(PyBytes_AS_STRING(t), 1, - PyBytes_GET_SIZE(t), fp); - Py_DECREF(t); + fwrite(t, 1, len, fp); } + Py_DECREF(s); } - else { - PyErr_Format(PyExc_TypeError, - "str() or repr() returned '%.100s'", - Py_TYPE(s)->tp_name); - ret = -1; - } - Py_XDECREF(s); } } if (ret == 0) { @@ -464,8 +455,7 @@ PyObject_Str(PyObject *v) if (PyUnicode_READY(v) < 0) return NULL; #endif - Py_INCREF(v); - return v; + return Py_NewRef(v); } if (Py_TYPE(v)->tp_str == NULL) return PyObject_Repr(v); @@ -541,8 +531,7 @@ PyObject_Bytes(PyObject *v) return PyBytes_FromString(""); if (PyBytes_CheckExact(v)) { - Py_INCREF(v); - return v; + return Py_NewRef(v); } func = _PyObject_LookupSpecial(v, &_Py_ID(__bytes__)); @@ -698,8 +687,7 @@ do_richcompare(PyThreadState *tstate, PyObject *v, PyObject *w, int op) Py_TYPE(w)->tp_name); return NULL; } - Py_INCREF(res); - return res; + return Py_NewRef(res); } /* Perform a rich comparison with object result. This wraps do_richcompare() @@ -1105,8 +1093,7 @@ _PyObject_GetDictPtr(PyObject *obj) PyObject * PyObject_SelfIter(PyObject *obj) { - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } /* Helper used when the __next__ method is removed from a type: @@ -1490,8 +1477,7 @@ PyObject_GenericSetDict(PyObject *obj, PyObject *value, void *context) "not a '%.200s'", Py_TYPE(value)->tp_name); return -1; } - Py_INCREF(value); - Py_XSETREF(*dictptr, value); + Py_XSETREF(*dictptr, Py_NewRef(value)); return 0; } @@ -2015,7 +2001,7 @@ _PyTypes_FiniTypes(PyInterpreterState *interp) void _Py_NewReference(PyObject *op) { - if (_Py_tracemalloc_config.tracing) { + if (_PyRuntime.tracemalloc.config.tracing) { _PyTraceMalloc_NewReference(op); } #ifdef Py_REF_DEBUG diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index 1d487d8e807a22..4c08bc214cd27a 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -1,9 +1,16 @@ #include "Python.h" -#include "pycore_pymem.h" // _PyTraceMalloc_Config -#include "pycore_code.h" // stats +#include "pycore_code.h" // stats +#include "pycore_pystate.h" // _PyInterpreterState_GET + +#include "pycore_obmalloc.h" +#include "pycore_pymem.h" -#include #include // malloc() +#include + + +#undef uint +#define uint pymem_uint /* Defined in tracemalloc.c */ @@ -12,84 +19,19 @@ extern void _PyMem_DumpTraceback(int fd, const void *ptr); /* Python's malloc wrappers (see pymem.h) */ -#undef uint -#define uint unsigned int /* assuming >= 16 bits */ - -/* Forward declaration */ -static void* _PyMem_DebugRawMalloc(void *ctx, size_t size); -static void* _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize); -static void* _PyMem_DebugRawRealloc(void *ctx, void *ptr, size_t size); -static void _PyMem_DebugRawFree(void *ctx, void *ptr); - -static void* _PyMem_DebugMalloc(void *ctx, size_t size); -static void* _PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize); -static void* _PyMem_DebugRealloc(void *ctx, void *ptr, size_t size); -static void _PyMem_DebugFree(void *ctx, void *p); - static void _PyObject_DebugDumpAddress(const void *p); static void _PyMem_DebugCheckAddress(const char *func, char api_id, const void *p); static void _PyMem_SetupDebugHooksDomain(PyMemAllocatorDomain domain); -#if defined(__has_feature) /* Clang */ -# if __has_feature(address_sanitizer) /* is ASAN enabled? */ -# define _Py_NO_SANITIZE_ADDRESS \ - __attribute__((no_sanitize("address"))) -# endif -# if __has_feature(thread_sanitizer) /* is TSAN enabled? */ -# define _Py_NO_SANITIZE_THREAD __attribute__((no_sanitize_thread)) -# endif -# if __has_feature(memory_sanitizer) /* is MSAN enabled? */ -# define _Py_NO_SANITIZE_MEMORY __attribute__((no_sanitize_memory)) -# endif -#elif defined(__GNUC__) -# if defined(__SANITIZE_ADDRESS__) /* GCC 4.8+, is ASAN enabled? */ -# define _Py_NO_SANITIZE_ADDRESS \ - __attribute__((no_sanitize_address)) -# endif - // TSAN is supported since GCC 5.1, but __SANITIZE_THREAD__ macro - // is provided only since GCC 7. -# if __GNUC__ > 5 || (__GNUC__ == 5 && __GNUC_MINOR__ >= 1) -# define _Py_NO_SANITIZE_THREAD __attribute__((no_sanitize_thread)) -# endif -#endif - -#ifndef _Py_NO_SANITIZE_ADDRESS -# define _Py_NO_SANITIZE_ADDRESS -#endif -#ifndef _Py_NO_SANITIZE_THREAD -# define _Py_NO_SANITIZE_THREAD -#endif -#ifndef _Py_NO_SANITIZE_MEMORY -# define _Py_NO_SANITIZE_MEMORY -#endif - -#ifdef WITH_PYMALLOC - -#ifdef MS_WINDOWS -# include -#elif defined(HAVE_MMAP) -# include -# ifdef MAP_ANONYMOUS -# define ARENAS_USE_MMAP -# endif -#endif -/* Forward declaration */ -static void* _PyObject_Malloc(void *ctx, size_t size); -static void* _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize); -static void _PyObject_Free(void *ctx, void *p); -static void* _PyObject_Realloc(void *ctx, void *ptr, size_t size); -#endif +/***************************************/ +/* low-level allocator implementations */ +/***************************************/ +/* the default raw allocator (wraps malloc) */ -/* bpo-35053: Declare tracemalloc configuration here rather than - Modules/_tracemalloc.c because _tracemalloc can be compiled as dynamic - library, whereas _Py_NewReference() requires it. */ -struct _PyTraceMalloc_Config _Py_tracemalloc_config = _PyTraceMalloc_Config_INIT; - - -static void * +void * _PyMem_RawMalloc(void *Py_UNUSED(ctx), size_t size) { /* PyMem_RawMalloc(0) means malloc(1). Some systems would return NULL @@ -101,7 +43,7 @@ _PyMem_RawMalloc(void *Py_UNUSED(ctx), size_t size) return malloc(size); } -static void * +void * _PyMem_RawCalloc(void *Py_UNUSED(ctx), size_t nelem, size_t elsize) { /* PyMem_RawCalloc(0, 0) means calloc(1, 1). Some systems would return NULL @@ -115,7 +57,7 @@ _PyMem_RawCalloc(void *Py_UNUSED(ctx), size_t nelem, size_t elsize) return calloc(nelem, elsize); } -static void * +void * _PyMem_RawRealloc(void *Py_UNUSED(ctx), void *ptr, size_t size) { if (size == 0) @@ -123,32 +65,73 @@ _PyMem_RawRealloc(void *Py_UNUSED(ctx), void *ptr, size_t size) return realloc(ptr, size); } -static void +void _PyMem_RawFree(void *Py_UNUSED(ctx), void *ptr) { free(ptr); } +#define MALLOC_ALLOC {NULL, _PyMem_RawMalloc, _PyMem_RawCalloc, _PyMem_RawRealloc, _PyMem_RawFree} +#define PYRAW_ALLOC MALLOC_ALLOC -#ifdef MS_WINDOWS -static void * -_PyObject_ArenaVirtualAlloc(void *Py_UNUSED(ctx), size_t size) +/* the default object allocator */ + +// The actual implementation is further down. + +#ifdef WITH_PYMALLOC +void* _PyObject_Malloc(void *ctx, size_t size); +void* _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize); +void _PyObject_Free(void *ctx, void *p); +void* _PyObject_Realloc(void *ctx, void *ptr, size_t size); +# define PYMALLOC_ALLOC {NULL, _PyObject_Malloc, _PyObject_Calloc, _PyObject_Realloc, _PyObject_Free} +# define PYOBJ_ALLOC PYMALLOC_ALLOC +#else +# define PYOBJ_ALLOC MALLOC_ALLOC +#endif // WITH_PYMALLOC + +#define PYMEM_ALLOC PYOBJ_ALLOC + +/* the default debug allocators */ + +// The actual implementation is further down. + +void* _PyMem_DebugRawMalloc(void *ctx, size_t size); +void* _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize); +void* _PyMem_DebugRawRealloc(void *ctx, void *ptr, size_t size); +void _PyMem_DebugRawFree(void *ctx, void *ptr); + +void* _PyMem_DebugMalloc(void *ctx, size_t size); +void* _PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize); +void* _PyMem_DebugRealloc(void *ctx, void *ptr, size_t size); +void _PyMem_DebugFree(void *ctx, void *p); + +#define PYDBGRAW_ALLOC \ + {&_PyRuntime.allocators.debug.raw, _PyMem_DebugRawMalloc, _PyMem_DebugRawCalloc, _PyMem_DebugRawRealloc, _PyMem_DebugRawFree} +#define PYDBGMEM_ALLOC \ + {&_PyRuntime.allocators.debug.mem, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} +#define PYDBGOBJ_ALLOC \ + {&_PyRuntime.allocators.debug.obj, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} + +/* the low-level virtual memory allocator */ + +#ifdef WITH_PYMALLOC +# ifdef MS_WINDOWS +# include +# elif defined(HAVE_MMAP) +# include +# ifdef MAP_ANONYMOUS +# define ARENAS_USE_MMAP +# endif +# endif +#endif + +void * +_PyMem_ArenaAlloc(void *Py_UNUSED(ctx), size_t size) { +#ifdef MS_WINDOWS return VirtualAlloc(NULL, size, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE); -} - -static void -_PyObject_ArenaVirtualFree(void *Py_UNUSED(ctx), void *ptr, - size_t Py_UNUSED(size)) -{ - VirtualFree(ptr, 0, MEM_RELEASE); -} - #elif defined(ARENAS_USE_MMAP) -static void * -_PyObject_ArenaMmap(void *Py_UNUSED(ctx), size_t size) -{ void *ptr; ptr = mmap(NULL, size, PROT_READ|PROT_WRITE, MAP_PRIVATE|MAP_ANONYMOUS, -1, 0); @@ -156,72 +139,73 @@ _PyObject_ArenaMmap(void *Py_UNUSED(ctx), size_t size) return NULL; assert(ptr != NULL); return ptr; -} - -static void -_PyObject_ArenaMunmap(void *Py_UNUSED(ctx), void *ptr, size_t size) -{ - munmap(ptr, size); -} - #else -static void * -_PyObject_ArenaMalloc(void *Py_UNUSED(ctx), size_t size) -{ return malloc(size); +#endif } -static void -_PyObject_ArenaFree(void *Py_UNUSED(ctx), void *ptr, size_t Py_UNUSED(size)) +void +_PyMem_ArenaFree(void *Py_UNUSED(ctx), void *ptr, +#if defined(ARENAS_USE_MMAP) + size_t size +#else + size_t Py_UNUSED(size) +#endif +) { +#ifdef MS_WINDOWS + VirtualFree(ptr, 0, MEM_RELEASE); +#elif defined(ARENAS_USE_MMAP) + munmap(ptr, size); +#else free(ptr); -} #endif +} -#define MALLOC_ALLOC {NULL, _PyMem_RawMalloc, _PyMem_RawCalloc, _PyMem_RawRealloc, _PyMem_RawFree} -#ifdef WITH_PYMALLOC -# define PYMALLOC_ALLOC {NULL, _PyObject_Malloc, _PyObject_Calloc, _PyObject_Realloc, _PyObject_Free} -#endif +/*******************************************/ +/* end low-level allocator implementations */ +/*******************************************/ -#define PYRAW_ALLOC MALLOC_ALLOC -#ifdef WITH_PYMALLOC -# define PYOBJ_ALLOC PYMALLOC_ALLOC -#else -# define PYOBJ_ALLOC MALLOC_ALLOC + +#if defined(__has_feature) /* Clang */ +# if __has_feature(address_sanitizer) /* is ASAN enabled? */ +# define _Py_NO_SANITIZE_ADDRESS \ + __attribute__((no_sanitize("address"))) +# endif +# if __has_feature(thread_sanitizer) /* is TSAN enabled? */ +# define _Py_NO_SANITIZE_THREAD __attribute__((no_sanitize_thread)) +# endif +# if __has_feature(memory_sanitizer) /* is MSAN enabled? */ +# define _Py_NO_SANITIZE_MEMORY __attribute__((no_sanitize_memory)) +# endif +#elif defined(__GNUC__) +# if defined(__SANITIZE_ADDRESS__) /* GCC 4.8+, is ASAN enabled? */ +# define _Py_NO_SANITIZE_ADDRESS \ + __attribute__((no_sanitize_address)) +# endif + // TSAN is supported since GCC 5.1, but __SANITIZE_THREAD__ macro + // is provided only since GCC 7. +# if __GNUC__ > 5 || (__GNUC__ == 5 && __GNUC_MINOR__ >= 1) +# define _Py_NO_SANITIZE_THREAD __attribute__((no_sanitize_thread)) +# endif #endif -#define PYMEM_ALLOC PYOBJ_ALLOC -typedef struct { - /* We tag each block with an API ID in order to tag API violations */ - char api_id; - PyMemAllocatorEx alloc; -} debug_alloc_api_t; -static struct { - debug_alloc_api_t raw; - debug_alloc_api_t mem; - debug_alloc_api_t obj; -} _PyMem_Debug = { - {'r', PYRAW_ALLOC}, - {'m', PYMEM_ALLOC}, - {'o', PYOBJ_ALLOC} - }; +#ifndef _Py_NO_SANITIZE_ADDRESS +# define _Py_NO_SANITIZE_ADDRESS +#endif +#ifndef _Py_NO_SANITIZE_THREAD +# define _Py_NO_SANITIZE_THREAD +#endif +#ifndef _Py_NO_SANITIZE_MEMORY +# define _Py_NO_SANITIZE_MEMORY +#endif -#define PYDBGRAW_ALLOC \ - {&_PyMem_Debug.raw, _PyMem_DebugRawMalloc, _PyMem_DebugRawCalloc, _PyMem_DebugRawRealloc, _PyMem_DebugRawFree} -#define PYDBGMEM_ALLOC \ - {&_PyMem_Debug.mem, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} -#define PYDBGOBJ_ALLOC \ - {&_PyMem_Debug.obj, _PyMem_DebugMalloc, _PyMem_DebugCalloc, _PyMem_DebugRealloc, _PyMem_DebugFree} -#ifdef Py_DEBUG -static PyMemAllocatorEx _PyMem_Raw = PYDBGRAW_ALLOC; -static PyMemAllocatorEx _PyMem = PYDBGMEM_ALLOC; -static PyMemAllocatorEx _PyObject = PYDBGOBJ_ALLOC; -#else -static PyMemAllocatorEx _PyMem_Raw = PYRAW_ALLOC; -static PyMemAllocatorEx _PyMem = PYMEM_ALLOC; -static PyMemAllocatorEx _PyObject = PYOBJ_ALLOC; -#endif +#define _PyMem_Raw (_PyRuntime.allocators.standard.raw) +#define _PyMem (_PyRuntime.allocators.standard.mem) +#define _PyObject (_PyRuntime.allocators.standard.obj) +#define _PyMem_Debug (_PyRuntime.allocators.debug) +#define _PyObject_Arena (_PyRuntime.allocators.obj_arena) static int @@ -424,26 +408,6 @@ _PyMem_GetCurrentAllocatorName(void) } -#undef MALLOC_ALLOC -#undef PYMALLOC_ALLOC -#undef PYRAW_ALLOC -#undef PYMEM_ALLOC -#undef PYOBJ_ALLOC -#undef PYDBGRAW_ALLOC -#undef PYDBGMEM_ALLOC -#undef PYDBGOBJ_ALLOC - - -static PyObjectArenaAllocator _PyObject_Arena = {NULL, -#ifdef MS_WINDOWS - _PyObject_ArenaVirtualAlloc, _PyObject_ArenaVirtualFree -#elif defined(ARENAS_USE_MMAP) - _PyObject_ArenaMmap, _PyObject_ArenaMunmap -#else - _PyObject_ArenaMalloc, _PyObject_ArenaFree -#endif - }; - #ifdef WITH_PYMALLOC static int _PyMem_DebugEnabled(void) @@ -763,516 +727,15 @@ static int running_on_valgrind = -1; #endif -/* An object allocator for Python. - - Here is an introduction to the layers of the Python memory architecture, - showing where the object allocator is actually used (layer +2), It is - called for every object allocation and deallocation (PyObject_New/Del), - unless the object-specific allocators implement a proprietary allocation - scheme (ex.: ints use a simple free list). This is also the place where - the cyclic garbage collector operates selectively on container objects. - - - Object-specific allocators - _____ ______ ______ ________ - [ int ] [ dict ] [ list ] ... [ string ] Python core | -+3 | <----- Object-specific memory -----> | <-- Non-object memory --> | - _______________________________ | | - [ Python's object allocator ] | | -+2 | ####### Object memory ####### | <------ Internal buffers ------> | - ______________________________________________________________ | - [ Python's raw memory allocator (PyMem_ API) ] | -+1 | <----- Python memory (under PyMem manager's control) ------> | | - __________________________________________________________________ - [ Underlying general-purpose allocator (ex: C library malloc) ] - 0 | <------ Virtual memory allocated for the python process -------> | - - ========================================================================= - _______________________________________________________________________ - [ OS-specific Virtual Memory Manager (VMM) ] --1 | <--- Kernel dynamic storage allocation & management (page-based) ---> | - __________________________________ __________________________________ - [ ] [ ] --2 | <-- Physical memory: ROM/RAM --> | | <-- Secondary storage (swap) --> | - -*/ -/*==========================================================================*/ - -/* A fast, special-purpose memory allocator for small blocks, to be used - on top of a general-purpose malloc -- heavily based on previous art. */ - -/* Vladimir Marangozov -- August 2000 */ - -/* - * "Memory management is where the rubber meets the road -- if we do the wrong - * thing at any level, the results will not be good. And if we don't make the - * levels work well together, we are in serious trouble." (1) - * - * (1) Paul R. Wilson, Mark S. Johnstone, Michael Neely, and David Boles, - * "Dynamic Storage Allocation: A Survey and Critical Review", - * in Proc. 1995 Int'l. Workshop on Memory Management, September 1995. - */ - -/* #undef WITH_MEMORY_LIMITS */ /* disable mem limit checks */ - -/*==========================================================================*/ - -/* - * Allocation strategy abstract: - * - * For small requests, the allocator sub-allocates blocks of memory. - * Requests greater than SMALL_REQUEST_THRESHOLD bytes are routed to the - * system's allocator. - * - * Small requests are grouped in size classes spaced 8 bytes apart, due - * to the required valid alignment of the returned address. Requests of - * a particular size are serviced from memory pools of 4K (one VMM page). - * Pools are fragmented on demand and contain free lists of blocks of one - * particular size class. In other words, there is a fixed-size allocator - * for each size class. Free pools are shared by the different allocators - * thus minimizing the space reserved for a particular size class. - * - * This allocation strategy is a variant of what is known as "simple - * segregated storage based on array of free lists". The main drawback of - * simple segregated storage is that we might end up with lot of reserved - * memory for the different free lists, which degenerate in time. To avoid - * this, we partition each free list in pools and we share dynamically the - * reserved space between all free lists. This technique is quite efficient - * for memory intensive programs which allocate mainly small-sized blocks. - * - * For small requests we have the following table: - * - * Request in bytes Size of allocated block Size class idx - * ---------------------------------------------------------------- - * 1-8 8 0 - * 9-16 16 1 - * 17-24 24 2 - * 25-32 32 3 - * 33-40 40 4 - * 41-48 48 5 - * 49-56 56 6 - * 57-64 64 7 - * 65-72 72 8 - * ... ... ... - * 497-504 504 62 - * 505-512 512 63 - * - * 0, SMALL_REQUEST_THRESHOLD + 1 and up: routed to the underlying - * allocator. - */ - -/*==========================================================================*/ - -/* - * -- Main tunable settings section -- - */ - -/* - * Alignment of addresses returned to the user. 8-bytes alignment works - * on most current architectures (with 32-bit or 64-bit address buses). - * The alignment value is also used for grouping small requests in size - * classes spaced ALIGNMENT bytes apart. - * - * You shouldn't change this unless you know what you are doing. - */ - -#if SIZEOF_VOID_P > 4 -#define ALIGNMENT 16 /* must be 2^N */ -#define ALIGNMENT_SHIFT 4 -#else -#define ALIGNMENT 8 /* must be 2^N */ -#define ALIGNMENT_SHIFT 3 -#endif - -/* Return the number of bytes in size class I, as a uint. */ -#define INDEX2SIZE(I) (((uint)(I) + 1) << ALIGNMENT_SHIFT) - -/* - * Max size threshold below which malloc requests are considered to be - * small enough in order to use preallocated memory pools. You can tune - * this value according to your application behaviour and memory needs. - * - * Note: a size threshold of 512 guarantees that newly created dictionaries - * will be allocated from preallocated memory pools on 64-bit. - * - * The following invariants must hold: - * 1) ALIGNMENT <= SMALL_REQUEST_THRESHOLD <= 512 - * 2) SMALL_REQUEST_THRESHOLD is evenly divisible by ALIGNMENT - * - * Although not required, for better performance and space efficiency, - * it is recommended that SMALL_REQUEST_THRESHOLD is set to a power of 2. - */ -#define SMALL_REQUEST_THRESHOLD 512 -#define NB_SMALL_SIZE_CLASSES (SMALL_REQUEST_THRESHOLD / ALIGNMENT) - -/* - * The system's VMM page size can be obtained on most unices with a - * getpagesize() call or deduced from various header files. To make - * things simpler, we assume that it is 4K, which is OK for most systems. - * It is probably better if this is the native page size, but it doesn't - * have to be. In theory, if SYSTEM_PAGE_SIZE is larger than the native page - * size, then `POOL_ADDR(p)->arenaindex' could rarely cause a segmentation - * violation fault. 4K is apparently OK for all the platforms that python - * currently targets. - */ -#define SYSTEM_PAGE_SIZE (4 * 1024) - -/* - * Maximum amount of memory managed by the allocator for small requests. - */ -#ifdef WITH_MEMORY_LIMITS -#ifndef SMALL_MEMORY_LIMIT -#define SMALL_MEMORY_LIMIT (64 * 1024 * 1024) /* 64 MB -- more? */ -#endif -#endif - -#if !defined(WITH_PYMALLOC_RADIX_TREE) -/* Use radix-tree to track arena memory regions, for address_in_range(). - * Enable by default since it allows larger pool sizes. Can be disabled - * using -DWITH_PYMALLOC_RADIX_TREE=0 */ -#define WITH_PYMALLOC_RADIX_TREE 1 -#endif - -#if SIZEOF_VOID_P > 4 -/* on 64-bit platforms use larger pools and arenas if we can */ -#define USE_LARGE_ARENAS -#if WITH_PYMALLOC_RADIX_TREE -/* large pools only supported if radix-tree is enabled */ -#define USE_LARGE_POOLS -#endif -#endif - -/* - * The allocator sub-allocates blocks of memory (called arenas) aligned - * on a page boundary. This is a reserved virtual address space for the - * current process (obtained through a malloc()/mmap() call). In no way this - * means that the memory arenas will be used entirely. A malloc() is - * usually an address range reservation for bytes, unless all pages within - * this space are referenced subsequently. So malloc'ing big blocks and not - * using them does not mean "wasting memory". It's an addressable range - * wastage... - * - * Arenas are allocated with mmap() on systems supporting anonymous memory - * mappings to reduce heap fragmentation. - */ -#ifdef USE_LARGE_ARENAS -#define ARENA_BITS 20 /* 1 MiB */ -#else -#define ARENA_BITS 18 /* 256 KiB */ -#endif -#define ARENA_SIZE (1 << ARENA_BITS) -#define ARENA_SIZE_MASK (ARENA_SIZE - 1) - -#ifdef WITH_MEMORY_LIMITS -#define MAX_ARENAS (SMALL_MEMORY_LIMIT / ARENA_SIZE) -#endif - -/* - * Size of the pools used for small blocks. Must be a power of 2. - */ -#ifdef USE_LARGE_POOLS -#define POOL_BITS 14 /* 16 KiB */ -#else -#define POOL_BITS 12 /* 4 KiB */ -#endif -#define POOL_SIZE (1 << POOL_BITS) -#define POOL_SIZE_MASK (POOL_SIZE - 1) - -#if !WITH_PYMALLOC_RADIX_TREE -#if POOL_SIZE != SYSTEM_PAGE_SIZE -# error "pool size must be equal to system page size" -#endif -#endif - -#define MAX_POOLS_IN_ARENA (ARENA_SIZE / POOL_SIZE) -#if MAX_POOLS_IN_ARENA * POOL_SIZE != ARENA_SIZE -# error "arena size not an exact multiple of pool size" -#endif - -/* - * -- End of tunable settings section -- - */ - -/*==========================================================================*/ - -/* When you say memory, my mind reasons in terms of (pointers to) blocks */ -typedef uint8_t block; - -/* Pool for small blocks. */ -struct pool_header { - union { block *_padding; - uint count; } ref; /* number of allocated blocks */ - block *freeblock; /* pool's free list head */ - struct pool_header *nextpool; /* next pool of this size class */ - struct pool_header *prevpool; /* previous pool "" */ - uint arenaindex; /* index into arenas of base adr */ - uint szidx; /* block size class index */ - uint nextoffset; /* bytes to virgin block */ - uint maxnextoffset; /* largest valid nextoffset */ -}; - -typedef struct pool_header *poolp; - -/* Record keeping for arenas. */ -struct arena_object { - /* The address of the arena, as returned by malloc. Note that 0 - * will never be returned by a successful malloc, and is used - * here to mark an arena_object that doesn't correspond to an - * allocated arena. - */ - uintptr_t address; - - /* Pool-aligned pointer to the next pool to be carved off. */ - block* pool_address; - - /* The number of available pools in the arena: free pools + never- - * allocated pools. - */ - uint nfreepools; - - /* The total number of pools in the arena, whether or not available. */ - uint ntotalpools; - - /* Singly-linked list of available pools. */ - struct pool_header* freepools; - - /* Whenever this arena_object is not associated with an allocated - * arena, the nextarena member is used to link all unassociated - * arena_objects in the singly-linked `unused_arena_objects` list. - * The prevarena member is unused in this case. - * - * When this arena_object is associated with an allocated arena - * with at least one available pool, both members are used in the - * doubly-linked `usable_arenas` list, which is maintained in - * increasing order of `nfreepools` values. - * - * Else this arena_object is associated with an allocated arena - * all of whose pools are in use. `nextarena` and `prevarena` - * are both meaningless in this case. - */ - struct arena_object* nextarena; - struct arena_object* prevarena; -}; - -#define POOL_OVERHEAD _Py_SIZE_ROUND_UP(sizeof(struct pool_header), ALIGNMENT) - -#define DUMMY_SIZE_IDX 0xffff /* size class of newly cached pools */ - -/* Round pointer P down to the closest pool-aligned address <= P, as a poolp */ -#define POOL_ADDR(P) ((poolp)_Py_ALIGN_DOWN((P), POOL_SIZE)) - -/* Return total number of blocks in pool of size index I, as a uint. */ -#define NUMBLOCKS(I) ((uint)(POOL_SIZE - POOL_OVERHEAD) / INDEX2SIZE(I)) - -/*==========================================================================*/ - -/* - * Pool table -- headed, circular, doubly-linked lists of partially used pools. - -This is involved. For an index i, usedpools[i+i] is the header for a list of -all partially used pools holding small blocks with "size class idx" i. So -usedpools[0] corresponds to blocks of size 8, usedpools[2] to blocks of size -16, and so on: index 2*i <-> blocks of size (i+1)<freeblock points to -the start of a singly-linked list of free blocks within the pool. When a -block is freed, it's inserted at the front of its pool's freeblock list. Note -that the available blocks in a pool are *not* linked all together when a pool -is initialized. Instead only "the first two" (lowest addresses) blocks are -set up, returning the first such block, and setting pool->freeblock to a -one-block list holding the second such block. This is consistent with that -pymalloc strives at all levels (arena, pool, and block) never to touch a piece -of memory until it's actually needed. - -So long as a pool is in the used state, we're certain there *is* a block -available for allocating, and pool->freeblock is not NULL. If pool->freeblock -points to the end of the free list before we've carved the entire pool into -blocks, that means we simply haven't yet gotten to one of the higher-address -blocks. The offset from the pool_header to the start of "the next" virgin -block is stored in the pool_header nextoffset member, and the largest value -of nextoffset that makes sense is stored in the maxnextoffset member when a -pool is initialized. All the blocks in a pool have been passed out at least -once when and only when nextoffset > maxnextoffset. - - -Major obscurity: While the usedpools vector is declared to have poolp -entries, it doesn't really. It really contains two pointers per (conceptual) -poolp entry, the nextpool and prevpool members of a pool_header. The -excruciating initialization code below fools C so that - - usedpool[i+i] - -"acts like" a genuine poolp, but only so long as you only reference its -nextpool and prevpool members. The "- 2*sizeof(block *)" gibberish is -compensating for that a pool_header's nextpool and prevpool members -immediately follow a pool_header's first two members: - - union { block *_padding; - uint count; } ref; - block *freeblock; - -each of which consume sizeof(block *) bytes. So what usedpools[i+i] really -contains is a fudged-up pointer p such that *if* C believes it's a poolp -pointer, then p->nextpool and p->prevpool are both p (meaning that the headed -circular list is empty). - -It's unclear why the usedpools setup is so convoluted. It could be to -minimize the amount of cache required to hold this heavily-referenced table -(which only *needs* the two interpool pointer members of a pool_header). OTOH, -referencing code has to remember to "double the index" and doing so isn't -free, usedpools[0] isn't a strictly legal pointer, and we're crucially relying -on that C doesn't insert any padding anywhere in a pool_header at or before -the prevpool member. -**************************************************************************** */ - -#define PTA(x) ((poolp )((uint8_t *)&(usedpools[2*(x)]) - 2*sizeof(block *))) -#define PT(x) PTA(x), PTA(x) - -static poolp usedpools[2 * ((NB_SMALL_SIZE_CLASSES + 7) / 8) * 8] = { - PT(0), PT(1), PT(2), PT(3), PT(4), PT(5), PT(6), PT(7) -#if NB_SMALL_SIZE_CLASSES > 8 - , PT(8), PT(9), PT(10), PT(11), PT(12), PT(13), PT(14), PT(15) -#if NB_SMALL_SIZE_CLASSES > 16 - , PT(16), PT(17), PT(18), PT(19), PT(20), PT(21), PT(22), PT(23) -#if NB_SMALL_SIZE_CLASSES > 24 - , PT(24), PT(25), PT(26), PT(27), PT(28), PT(29), PT(30), PT(31) -#if NB_SMALL_SIZE_CLASSES > 32 - , PT(32), PT(33), PT(34), PT(35), PT(36), PT(37), PT(38), PT(39) -#if NB_SMALL_SIZE_CLASSES > 40 - , PT(40), PT(41), PT(42), PT(43), PT(44), PT(45), PT(46), PT(47) -#if NB_SMALL_SIZE_CLASSES > 48 - , PT(48), PT(49), PT(50), PT(51), PT(52), PT(53), PT(54), PT(55) -#if NB_SMALL_SIZE_CLASSES > 56 - , PT(56), PT(57), PT(58), PT(59), PT(60), PT(61), PT(62), PT(63) -#if NB_SMALL_SIZE_CLASSES > 64 -#error "NB_SMALL_SIZE_CLASSES should be less than 64" -#endif /* NB_SMALL_SIZE_CLASSES > 64 */ -#endif /* NB_SMALL_SIZE_CLASSES > 56 */ -#endif /* NB_SMALL_SIZE_CLASSES > 48 */ -#endif /* NB_SMALL_SIZE_CLASSES > 40 */ -#endif /* NB_SMALL_SIZE_CLASSES > 32 */ -#endif /* NB_SMALL_SIZE_CLASSES > 24 */ -#endif /* NB_SMALL_SIZE_CLASSES > 16 */ -#endif /* NB_SMALL_SIZE_CLASSES > 8 */ -}; - -/*========================================================================== -Arena management. - -`arenas` is a vector of arena_objects. It contains maxarenas entries, some of -which may not be currently used (== they're arena_objects that aren't -currently associated with an allocated arena). Note that arenas proper are -separately malloc'ed. - -Prior to Python 2.5, arenas were never free()'ed. Starting with Python 2.5, -we do try to free() arenas, and use some mild heuristic strategies to increase -the likelihood that arenas eventually can be freed. - -unused_arena_objects - - This is a singly-linked list of the arena_objects that are currently not - being used (no arena is associated with them). Objects are taken off the - head of the list in new_arena(), and are pushed on the head of the list in - PyObject_Free() when the arena is empty. Key invariant: an arena_object - is on this list if and only if its .address member is 0. - -usable_arenas - - This is a doubly-linked list of the arena_objects associated with arenas - that have pools available. These pools are either waiting to be reused, - or have not been used before. The list is sorted to have the most- - allocated arenas first (ascending order based on the nfreepools member). - This means that the next allocation will come from a heavily used arena, - which gives the nearly empty arenas a chance to be returned to the system. - In my unscientific tests this dramatically improved the number of arenas - that could be freed. - -Note that an arena_object associated with an arena all of whose pools are -currently in use isn't on either list. - -Changed in Python 3.8: keeping usable_arenas sorted by number of free pools -used to be done by one-at-a-time linear search when an arena's number of -free pools changed. That could, overall, consume time quadratic in the -number of arenas. That didn't really matter when there were only a few -hundred arenas (typical!), but could be a timing disaster when there were -hundreds of thousands. See bpo-37029. - -Now we have a vector of "search fingers" to eliminate the need to search: -nfp2lasta[nfp] returns the last ("rightmost") arena in usable_arenas -with nfp free pools. This is NULL if and only if there is no arena with -nfp free pools in usable_arenas. -*/ - -/* Array of objects used to track chunks of memory (arenas). */ -static struct arena_object* arenas = NULL; -/* Number of slots currently allocated in the `arenas` vector. */ -static uint maxarenas = 0; - -/* The head of the singly-linked, NULL-terminated list of available - * arena_objects. - */ -static struct arena_object* unused_arena_objects = NULL; - -/* The head of the doubly-linked, NULL-terminated at each end, list of - * arena_objects associated with arenas that have pools available. - */ -static struct arena_object* usable_arenas = NULL; - -/* nfp2lasta[nfp] is the last arena in usable_arenas with nfp free pools */ -static struct arena_object* nfp2lasta[MAX_POOLS_IN_ARENA + 1] = { NULL }; - -/* How many arena_objects do we initially allocate? - * 16 = can allocate 16 arenas = 16 * ARENA_SIZE = 4MB before growing the - * `arenas` vector. - */ -#define INITIAL_ARENA_OBJECTS 16 - -/* Number of arenas allocated that haven't been free()'d. */ -static size_t narenas_currently_allocated = 0; - -/* Total number of times malloc() called to allocate an arena. */ -static size_t ntimes_arena_allocated = 0; -/* High water mark (max value ever seen) for narenas_currently_allocated. */ -static size_t narenas_highwater = 0; - -static Py_ssize_t raw_allocated_blocks; +#define allarenas (_PyRuntime.obmalloc.mgmt.arenas) +#define maxarenas (_PyRuntime.obmalloc.mgmt.maxarenas) +#define unused_arena_objects (_PyRuntime.obmalloc.mgmt.unused_arena_objects) +#define usable_arenas (_PyRuntime.obmalloc.mgmt.usable_arenas) +#define nfp2lasta (_PyRuntime.obmalloc.mgmt.nfp2lasta) +#define narenas_currently_allocated (_PyRuntime.obmalloc.mgmt.narenas_currently_allocated) +#define ntimes_arena_allocated (_PyRuntime.obmalloc.mgmt.ntimes_arena_allocated) +#define narenas_highwater (_PyRuntime.obmalloc.mgmt.narenas_highwater) +#define raw_allocated_blocks (_PyRuntime.obmalloc.mgmt.raw_allocated_blocks) Py_ssize_t _Py_GetAllocatedBlocks(void) @@ -1281,15 +744,15 @@ _Py_GetAllocatedBlocks(void) /* add up allocated blocks for used pools */ for (uint i = 0; i < maxarenas; ++i) { /* Skip arenas which are not allocated. */ - if (arenas[i].address == 0) { + if (allarenas[i].address == 0) { continue; } - uintptr_t base = (uintptr_t)_Py_ALIGN_UP(arenas[i].address, POOL_SIZE); + uintptr_t base = (uintptr_t)_Py_ALIGN_UP(allarenas[i].address, POOL_SIZE); /* visit every pool in the arena */ - assert(base <= (uintptr_t) arenas[i].pool_address); - for (; base < (uintptr_t) arenas[i].pool_address; base += POOL_SIZE) { + assert(base <= (uintptr_t) allarenas[i].pool_address); + for (; base < (uintptr_t) allarenas[i].pool_address; base += POOL_SIZE) { poolp p = (poolp)base; n += p->ref.count; } @@ -1299,155 +762,18 @@ _Py_GetAllocatedBlocks(void) #if WITH_PYMALLOC_RADIX_TREE /*==========================================================================*/ -/* radix tree for tracking arena usage. If enabled, used to implement - address_in_range(). - - memory address bit allocation for keys - - 64-bit pointers, IGNORE_BITS=0 and 2^20 arena size: - 15 -> MAP_TOP_BITS - 15 -> MAP_MID_BITS - 14 -> MAP_BOT_BITS - 20 -> ideal aligned arena - ---- - 64 - - 64-bit pointers, IGNORE_BITS=16, and 2^20 arena size: - 16 -> IGNORE_BITS - 10 -> MAP_TOP_BITS - 10 -> MAP_MID_BITS - 8 -> MAP_BOT_BITS - 20 -> ideal aligned arena - ---- - 64 - - 32-bit pointers and 2^18 arena size: - 14 -> MAP_BOT_BITS - 18 -> ideal aligned arena - ---- - 32 - -*/ - -#if SIZEOF_VOID_P == 8 - -/* number of bits in a pointer */ -#define POINTER_BITS 64 - -/* High bits of memory addresses that will be ignored when indexing into the - * radix tree. Setting this to zero is the safe default. For most 64-bit - * machines, setting this to 16 would be safe. The kernel would not give - * user-space virtual memory addresses that have significant information in - * those high bits. The main advantage to setting IGNORE_BITS > 0 is that less - * virtual memory will be used for the top and middle radix tree arrays. Those - * arrays are allocated in the BSS segment and so will typically consume real - * memory only if actually accessed. - */ -#define IGNORE_BITS 0 - -/* use the top and mid layers of the radix tree */ -#define USE_INTERIOR_NODES - -#elif SIZEOF_VOID_P == 4 - -#define POINTER_BITS 32 -#define IGNORE_BITS 0 - -#else - - /* Currently this code works for 64-bit or 32-bit pointers only. */ -#error "obmalloc radix tree requires 64-bit or 32-bit pointers." - -#endif /* SIZEOF_VOID_P */ - -/* arena_coverage_t members require this to be true */ -#if ARENA_BITS >= 32 -# error "arena size must be < 2^32" -#endif - -/* the lower bits of the address that are not ignored */ -#define ADDRESS_BITS (POINTER_BITS - IGNORE_BITS) +/* radix tree for tracking arena usage. */ +#define arena_map_root (_PyRuntime.obmalloc.usage.arena_map_root) #ifdef USE_INTERIOR_NODES -/* number of bits used for MAP_TOP and MAP_MID nodes */ -#define INTERIOR_BITS ((ADDRESS_BITS - ARENA_BITS + 2) / 3) -#else -#define INTERIOR_BITS 0 -#endif - -#define MAP_TOP_BITS INTERIOR_BITS -#define MAP_TOP_LENGTH (1 << MAP_TOP_BITS) -#define MAP_TOP_MASK (MAP_TOP_LENGTH - 1) - -#define MAP_MID_BITS INTERIOR_BITS -#define MAP_MID_LENGTH (1 << MAP_MID_BITS) -#define MAP_MID_MASK (MAP_MID_LENGTH - 1) - -#define MAP_BOT_BITS (ADDRESS_BITS - ARENA_BITS - 2*INTERIOR_BITS) -#define MAP_BOT_LENGTH (1 << MAP_BOT_BITS) -#define MAP_BOT_MASK (MAP_BOT_LENGTH - 1) - -#define MAP_BOT_SHIFT ARENA_BITS -#define MAP_MID_SHIFT (MAP_BOT_BITS + MAP_BOT_SHIFT) -#define MAP_TOP_SHIFT (MAP_MID_BITS + MAP_MID_SHIFT) - -#define AS_UINT(p) ((uintptr_t)(p)) -#define MAP_BOT_INDEX(p) ((AS_UINT(p) >> MAP_BOT_SHIFT) & MAP_BOT_MASK) -#define MAP_MID_INDEX(p) ((AS_UINT(p) >> MAP_MID_SHIFT) & MAP_MID_MASK) -#define MAP_TOP_INDEX(p) ((AS_UINT(p) >> MAP_TOP_SHIFT) & MAP_TOP_MASK) - -#if IGNORE_BITS > 0 -/* Return the ignored part of the pointer address. Those bits should be same - * for all valid pointers if IGNORE_BITS is set correctly. - */ -#define HIGH_BITS(p) (AS_UINT(p) >> ADDRESS_BITS) -#else -#define HIGH_BITS(p) 0 -#endif - - -/* This is the leaf of the radix tree. See arena_map_mark_used() for the - * meaning of these members. */ -typedef struct { - int32_t tail_hi; - int32_t tail_lo; -} arena_coverage_t; - -typedef struct arena_map_bot { - /* The members tail_hi and tail_lo are accessed together. So, it - * better to have them as an array of structs, rather than two - * arrays. - */ - arena_coverage_t arenas[MAP_BOT_LENGTH]; -} arena_map_bot_t; - -#ifdef USE_INTERIOR_NODES -typedef struct arena_map_mid { - struct arena_map_bot *ptrs[MAP_MID_LENGTH]; -} arena_map_mid_t; - -typedef struct arena_map_top { - struct arena_map_mid *ptrs[MAP_TOP_LENGTH]; -} arena_map_top_t; -#endif - -/* The root of radix tree. Note that by initializing like this, the memory - * should be in the BSS. The OS will only memory map pages as the MAP_MID - * nodes get used (OS pages are demand loaded as needed). - */ -#ifdef USE_INTERIOR_NODES -static arena_map_top_t arena_map_root; -/* accounting for number of used interior nodes */ -static int arena_map_mid_count; -static int arena_map_bot_count; -#else -static arena_map_bot_t arena_map_root; +#define arena_map_mid_count (_PyRuntime.obmalloc.usage.arena_map_mid_count) +#define arena_map_bot_count (_PyRuntime.obmalloc.usage.arena_map_bot_count) #endif /* Return a pointer to a bottom tree node, return NULL if it doesn't exist or * it cannot be created */ static Py_ALWAYS_INLINE arena_map_bot_t * -arena_map_get(block *p, int create) +arena_map_get(pymem_block *p, int create) { #ifdef USE_INTERIOR_NODES /* sanity check that IGNORE_BITS is correct */ @@ -1512,12 +838,12 @@ arena_map_mark_used(uintptr_t arena_base, int is_used) { /* sanity check that IGNORE_BITS is correct */ assert(HIGH_BITS(arena_base) == HIGH_BITS(&arena_map_root)); - arena_map_bot_t *n_hi = arena_map_get((block *)arena_base, is_used); + arena_map_bot_t *n_hi = arena_map_get((pymem_block *)arena_base, is_used); if (n_hi == NULL) { assert(is_used); /* otherwise node should already exist */ return 0; /* failed to allocate space for node */ } - int i3 = MAP_BOT_INDEX((block *)arena_base); + int i3 = MAP_BOT_INDEX((pymem_block *)arena_base); int32_t tail = (int32_t)(arena_base & ARENA_SIZE_MASK); if (tail == 0) { /* is ideal arena address */ @@ -1537,7 +863,7 @@ arena_map_mark_used(uintptr_t arena_base, int is_used) * must overflow to 0. However, that would mean arena_base was * "ideal" and we should not be in this case. */ assert(arena_base < arena_base_next); - arena_map_bot_t *n_lo = arena_map_get((block *)arena_base_next, is_used); + arena_map_bot_t *n_lo = arena_map_get((pymem_block *)arena_base_next, is_used); if (n_lo == NULL) { assert(is_used); /* otherwise should already exist */ n_hi->arenas[i3].tail_hi = 0; @@ -1552,7 +878,7 @@ arena_map_mark_used(uintptr_t arena_base, int is_used) /* Return true if 'p' is a pointer inside an obmalloc arena. * _PyObject_Free() calls this so it needs to be very fast. */ static int -arena_map_is_used(block *p) +arena_map_is_used(pymem_block *p) { arena_map_bot_t *n = arena_map_get(p, 0); if (n == NULL) { @@ -1604,14 +930,14 @@ new_arena(void) if (numarenas <= maxarenas) return NULL; /* overflow */ #if SIZEOF_SIZE_T <= SIZEOF_INT - if (numarenas > SIZE_MAX / sizeof(*arenas)) + if (numarenas > SIZE_MAX / sizeof(*allarenas)) return NULL; /* overflow */ #endif - nbytes = numarenas * sizeof(*arenas); - arenaobj = (struct arena_object *)PyMem_RawRealloc(arenas, nbytes); + nbytes = numarenas * sizeof(*allarenas); + arenaobj = (struct arena_object *)PyMem_RawRealloc(allarenas, nbytes); if (arenaobj == NULL) return NULL; - arenas = arenaobj; + allarenas = arenaobj; /* We might need to fix pointers that were copied. However, * new_arena only gets called when all the pages in the @@ -1624,13 +950,13 @@ new_arena(void) /* Put the new arenas on the unused_arena_objects list. */ for (i = maxarenas; i < numarenas; ++i) { - arenas[i].address = 0; /* mark as unassociated */ - arenas[i].nextarena = i < numarenas - 1 ? - &arenas[i+1] : NULL; + allarenas[i].address = 0; /* mark as unassociated */ + allarenas[i].nextarena = i < numarenas - 1 ? + &allarenas[i+1] : NULL; } /* Update globals. */ - unused_arena_objects = &arenas[maxarenas]; + unused_arena_objects = &allarenas[maxarenas]; maxarenas = numarenas; } @@ -1666,7 +992,7 @@ new_arena(void) arenaobj->freepools = NULL; /* pool_address <- first pool-aligned address in the arena nfreepools <- number of whole pools that fit after alignment */ - arenaobj->pool_address = (block*)arenaobj->address; + arenaobj->pool_address = (pymem_block*)arenaobj->address; arenaobj->nfreepools = MAX_POOLS_IN_ARENA; excess = (uint)(arenaobj->address & POOL_SIZE_MASK); if (excess != 0) { @@ -1777,14 +1103,16 @@ address_in_range(void *p, poolp pool) // only once. uint arenaindex = *((volatile uint *)&pool->arenaindex); return arenaindex < maxarenas && - (uintptr_t)p - arenas[arenaindex].address < ARENA_SIZE && - arenas[arenaindex].address != 0; + (uintptr_t)p - allarenas[arenaindex].address < ARENA_SIZE && + allarenas[arenaindex].address != 0; } #endif /* !WITH_PYMALLOC_RADIX_TREE */ /*==========================================================================*/ +#define usedpools (_PyRuntime.obmalloc.pools.used) + // Called when freelist is exhausted. Extend the freelist if there is // space for a block. Otherwise, remove this pool from usedpools. static void @@ -1792,9 +1120,9 @@ pymalloc_pool_extend(poolp pool, uint size) { if (UNLIKELY(pool->nextoffset <= pool->maxnextoffset)) { /* There is room for another block. */ - pool->freeblock = (block*)pool + pool->nextoffset; + pool->freeblock = (pymem_block*)pool + pool->nextoffset; pool->nextoffset += INDEX2SIZE(size); - *(block **)(pool->freeblock) = NULL; + *(pymem_block **)(pool->freeblock) = NULL; return; } @@ -1874,7 +1202,7 @@ allocate_from_new_pool(uint size) */ assert(usable_arenas->freepools != NULL || usable_arenas->pool_address <= - (block*)usable_arenas->address + + (pymem_block*)usable_arenas->address + ARENA_SIZE - POOL_SIZE); } } @@ -1883,10 +1211,10 @@ allocate_from_new_pool(uint size) assert(usable_arenas->nfreepools > 0); assert(usable_arenas->freepools == NULL); pool = (poolp)usable_arenas->pool_address; - assert((block*)pool <= (block*)usable_arenas->address + + assert((pymem_block*)pool <= (pymem_block*)usable_arenas->address + ARENA_SIZE - POOL_SIZE); - pool->arenaindex = (uint)(usable_arenas - arenas); - assert(&arenas[pool->arenaindex] == usable_arenas); + pool->arenaindex = (uint)(usable_arenas - allarenas); + assert(&allarenas[pool->arenaindex] == usable_arenas); pool->szidx = DUMMY_SIZE_IDX; usable_arenas->pool_address += POOL_SIZE; --usable_arenas->nfreepools; @@ -1905,7 +1233,7 @@ allocate_from_new_pool(uint size) } /* Frontlink to used pools. */ - block *bp; + pymem_block *bp; poolp next = usedpools[size + size]; /* == prev */ pool->nextpool = next; pool->prevpool = next; @@ -1919,7 +1247,7 @@ allocate_from_new_pool(uint size) */ bp = pool->freeblock; assert(bp != NULL); - pool->freeblock = *(block **)bp; + pool->freeblock = *(pymem_block **)bp; return bp; } /* @@ -1929,11 +1257,11 @@ allocate_from_new_pool(uint size) */ pool->szidx = size; size = INDEX2SIZE(size); - bp = (block *)pool + POOL_OVERHEAD; + bp = (pymem_block *)pool + POOL_OVERHEAD; pool->nextoffset = POOL_OVERHEAD + (size << 1); pool->maxnextoffset = POOL_SIZE - size; pool->freeblock = bp + size; - *(block **)(pool->freeblock) = NULL; + *(pymem_block **)(pool->freeblock) = NULL; return bp; } @@ -1966,7 +1294,7 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes) uint size = (uint)(nbytes - 1) >> ALIGNMENT_SHIFT; poolp pool = usedpools[size + size]; - block *bp; + pymem_block *bp; if (LIKELY(pool != pool->nextpool)) { /* @@ -1977,7 +1305,7 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes) bp = pool->freeblock; assert(bp != NULL); - if (UNLIKELY((pool->freeblock = *(block **)bp) == NULL)) { + if (UNLIKELY((pool->freeblock = *(pymem_block **)bp) == NULL)) { // Reached the end of the free list, try to extend it. pymalloc_pool_extend(pool, size); } @@ -1993,7 +1321,7 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes) } -static void * +void * _PyObject_Malloc(void *ctx, size_t nbytes) { void* ptr = pymalloc_alloc(ctx, nbytes); @@ -2009,7 +1337,7 @@ _PyObject_Malloc(void *ctx, size_t nbytes) } -static void * +void * _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize) { assert(elsize == 0 || nelem <= (size_t)PY_SSIZE_T_MAX / elsize); @@ -2056,7 +1384,7 @@ insert_to_freepool(poolp pool) /* Link the pool to freepools. This is a singly-linked * list, and pool->prevpool isn't used there. */ - struct arena_object *ao = &arenas[pool->arenaindex]; + struct arena_object *ao = &allarenas[pool->arenaindex]; pool->nextpool = ao->freepools; ao->freepools = pool; uint nf = ao->nfreepools; @@ -2239,9 +1567,9 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p) * list in any case). */ assert(pool->ref.count > 0); /* else it was empty */ - block *lastfree = pool->freeblock; - *(block **)p = lastfree; - pool->freeblock = (block *)p; + pymem_block *lastfree = pool->freeblock; + *(pymem_block **)p = lastfree; + pool->freeblock = (pymem_block *)p; pool->ref.count--; if (UNLIKELY(lastfree == NULL)) { @@ -2273,7 +1601,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p) } -static void +void _PyObject_Free(void *ctx, void *p) { /* PyObject_Free(NULL) has no effect */ @@ -2359,7 +1687,7 @@ pymalloc_realloc(void *ctx, void **newptr_p, void *p, size_t nbytes) } -static void * +void * _PyObject_Realloc(void *ctx, void *ptr, size_t nbytes) { void *ptr2; @@ -2536,13 +1864,13 @@ _PyMem_DebugRawAlloc(int use_calloc, void *ctx, size_t nbytes) return data; } -static void * +void * _PyMem_DebugRawMalloc(void *ctx, size_t nbytes) { return _PyMem_DebugRawAlloc(0, ctx, nbytes); } -static void * +void * _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize) { size_t nbytes; @@ -2557,7 +1885,7 @@ _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize) Then fills the original bytes with PYMEM_DEADBYTE. Then calls the underlying free. */ -static void +void _PyMem_DebugRawFree(void *ctx, void *p) { /* PyMem_Free(NULL) has no effect */ @@ -2577,7 +1905,7 @@ _PyMem_DebugRawFree(void *ctx, void *p) } -static void * +void * _PyMem_DebugRawRealloc(void *ctx, void *p, size_t nbytes) { if (p == NULL) { @@ -2687,14 +2015,14 @@ _PyMem_DebugCheckGIL(const char *func) } } -static void * +void * _PyMem_DebugMalloc(void *ctx, size_t nbytes) { _PyMem_DebugCheckGIL(__func__); return _PyMem_DebugRawMalloc(ctx, nbytes); } -static void * +void * _PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize) { _PyMem_DebugCheckGIL(__func__); @@ -2702,7 +2030,7 @@ _PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize) } -static void +void _PyMem_DebugFree(void *ctx, void *ptr) { _PyMem_DebugCheckGIL(__func__); @@ -2710,7 +2038,7 @@ _PyMem_DebugFree(void *ctx, void *ptr) } -static void * +void * _PyMem_DebugRealloc(void *ctx, void *ptr, size_t nbytes) { _PyMem_DebugCheckGIL(__func__); @@ -3000,15 +2328,14 @@ _PyObject_DebugMallocStats(FILE *out) * will be living in full pools -- would be a shame to miss them. */ for (i = 0; i < maxarenas; ++i) { - uint j; - uintptr_t base = arenas[i].address; + uintptr_t base = allarenas[i].address; /* Skip arenas which are not allocated. */ - if (arenas[i].address == (uintptr_t)NULL) + if (allarenas[i].address == (uintptr_t)NULL) continue; narenas += 1; - numfreepools += arenas[i].nfreepools; + numfreepools += allarenas[i].nfreepools; /* round up to pool alignment */ if (base & (uintptr_t)POOL_SIZE_MASK) { @@ -3018,9 +2345,8 @@ _PyObject_DebugMallocStats(FILE *out) } /* visit every pool in the arena */ - assert(base <= (uintptr_t) arenas[i].pool_address); - for (j = 0; base < (uintptr_t) arenas[i].pool_address; - ++j, base += POOL_SIZE) { + assert(base <= (uintptr_t) allarenas[i].pool_address); + for (; base < (uintptr_t) allarenas[i].pool_address; base += POOL_SIZE) { poolp p = (poolp)base; const uint sz = p->szidx; uint freeblocks; @@ -3028,7 +2354,7 @@ _PyObject_DebugMallocStats(FILE *out) if (p->ref.count == 0) { /* currently unused */ #ifdef Py_DEBUG - assert(pool_is_in_list(p, arenas[i].freepools)); + assert(pool_is_in_list(p, allarenas[i].freepools)); #endif continue; } diff --git a/Objects/odictobject.c b/Objects/odictobject.c index bd2a7677fe1cf4..4976b70b5dff5a 100644 --- a/Objects/odictobject.c +++ b/Objects/odictobject.c @@ -889,8 +889,7 @@ odict_inplace_or(PyObject *self, PyObject *other) if (mutablemapping_update_arg(self, other) < 0) { return NULL; } - Py_INCREF(self); - return self; + return Py_NewRef(self); } /* tp_as_number */ @@ -1007,8 +1006,7 @@ OrderedDict_setdefault_impl(PyODictObject *self, PyObject *key, return NULL; assert(_odict_find_node(self, key) == NULL); if (PyODict_SetItem((PyObject *)self, key, default_value) >= 0) { - result = default_value; - Py_INCREF(result); + result = Py_NewRef(default_value); } } else { @@ -1024,8 +1022,7 @@ OrderedDict_setdefault_impl(PyODictObject *self, PyObject *key, result = PyObject_GetItem((PyObject *)self, key); } else if (PyObject_SetItem((PyObject *)self, key, default_value) >= 0) { - result = default_value; - Py_INCREF(result); + result = Py_NewRef(default_value); } } @@ -1055,8 +1052,7 @@ _odict_popkey_hash(PyObject *od, PyObject *key, PyObject *failobj, else if (value == NULL && !PyErr_Occurred()) { /* Apply the fallback value, if necessary. */ if (failobj) { - value = failobj; - Py_INCREF(failobj); + value = Py_NewRef(failobj); } else { PyErr_SetObject(PyExc_KeyError, key); @@ -1118,8 +1114,7 @@ OrderedDict_popitem_impl(PyODictObject *self, int last) } node = last ? _odict_LAST(self) : _odict_FIRST(self); - key = _odictnode_KEY(node); - Py_INCREF(key); + key = Py_NewRef(_odictnode_KEY(node)); value = _odict_popkey_hash((PyObject *)self, key, NULL, _odictnode_HASH(node)); if (value == NULL) return NULL; @@ -1497,8 +1492,7 @@ odict_richcompare(PyObject *v, PyObject *w, int op) return NULL; res = (eq == (op == Py_EQ)) ? Py_True : Py_False; - Py_INCREF(res); - return res; + return Py_NewRef(res); } else { Py_RETURN_NOTIMPLEMENTED; } @@ -1714,8 +1708,7 @@ odictiter_nextkey(odictiterobject *di) di->di_current = NULL; } else { - di->di_current = _odictnode_KEY(node); - Py_INCREF(di->di_current); + di->di_current = Py_NewRef(_odictnode_KEY(node)); } return key; @@ -1872,12 +1865,10 @@ odictiter_new(PyODictObject *od, int kind) di->kind = kind; node = reversed ? _odict_LAST(od) : _odict_FIRST(od); - di->di_current = node ? _odictnode_KEY(node) : NULL; - Py_XINCREF(di->di_current); + di->di_current = node ? Py_NewRef(_odictnode_KEY(node)) : NULL; di->di_size = PyODict_SIZE(od); di->di_state = od->od_state; - di->di_odict = od; - Py_INCREF(od); + di->di_odict = (PyODictObject*)Py_NewRef(od); _PyObject_GC_TRACK(di); return (PyObject *)di; diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index f0b06de1c7d2a0..a889aa04db81f0 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -105,10 +105,8 @@ range_from_array(PyTypeObject *type, PyObject *const *args, Py_ssize_t num_args) if (!stop) { return NULL; } - start = _PyLong_GetZero(); - Py_INCREF(start); - step = _PyLong_GetOne(); - Py_INCREF(step); + start = Py_NewRef(_PyLong_GetZero()); + step = Py_NewRef(_PyLong_GetOne()); break; case 0: PyErr_SetString(PyExc_TypeError, @@ -216,8 +214,7 @@ compute_range_length(PyObject *start, PyObject *stop, PyObject *step) if (cmp_result < 0) return NULL; result = zero; - Py_INCREF(result); - return result; + return Py_NewRef(result); } if ((tmp1 = PyNumber_Subtract(hi, lo)) == NULL) @@ -297,8 +294,7 @@ compute_range_item(rangeobject *r, PyObject *arg) return NULL; } } else { - i = arg; - Py_INCREF(i); + i = Py_NewRef(arg); } /* PyLong equivalent to: @@ -522,30 +518,24 @@ range_hash(rangeobject *r) t = PyTuple_New(3); if (!t) return -1; - Py_INCREF(r->length); - PyTuple_SET_ITEM(t, 0, r->length); + PyTuple_SET_ITEM(t, 0, Py_NewRef(r->length)); cmp_result = PyObject_Not(r->length); if (cmp_result == -1) goto end; if (cmp_result == 1) { - Py_INCREF(Py_None); - Py_INCREF(Py_None); - PyTuple_SET_ITEM(t, 1, Py_None); - PyTuple_SET_ITEM(t, 2, Py_None); + PyTuple_SET_ITEM(t, 1, Py_NewRef(Py_None)); + PyTuple_SET_ITEM(t, 2, Py_NewRef(Py_None)); } else { - Py_INCREF(r->start); - PyTuple_SET_ITEM(t, 1, r->start); + PyTuple_SET_ITEM(t, 1, Py_NewRef(r->start)); cmp_result = PyObject_RichCompareBool(r->length, _PyLong_GetOne(), Py_EQ); if (cmp_result == -1) goto end; if (cmp_result == 1) { - Py_INCREF(Py_None); - PyTuple_SET_ITEM(t, 2, Py_None); + PyTuple_SET_ITEM(t, 2, Py_NewRef(Py_None)); } else { - Py_INCREF(r->step); - PyTuple_SET_ITEM(t, 2, r->step); + PyTuple_SET_ITEM(t, 2, Py_NewRef(r->step)); } } result = PyObject_Hash(t); @@ -946,10 +936,8 @@ longrangeiter_reduce(longrangeiterobject *r, PyObject *Py_UNUSED(ignored)) Py_DECREF(product); if (stop == NULL) return NULL; - Py_INCREF(r->start); - Py_INCREF(r->step); range = (PyObject*)make_range_object(&PyRange_Type, - r->start, stop, r->step); + Py_NewRef(r->start), stop, Py_NewRef(r->step)); if (range == NULL) { Py_DECREF(r->start); Py_DECREF(stop); @@ -982,8 +970,7 @@ longrangeiter_setstate(longrangeiterobject *r, PyObject *state) if (cmp > 0) state = r->len; } - Py_INCREF(state); - Py_XSETREF(r->index, state); + Py_XSETREF(r->index, Py_NewRef(state)); Py_RETURN_NONE; } @@ -1118,14 +1105,10 @@ range_iter(PyObject *seq) if (it == NULL) return NULL; - it->start = r->start; - it->step = r->step; - it->len = r->length; - it->index = _PyLong_GetZero(); - Py_INCREF(it->start); - Py_INCREF(it->step); - Py_INCREF(it->len); - Py_INCREF(it->index); + it->start = Py_NewRef(r->start); + it->step = Py_NewRef(r->step); + it->len = Py_NewRef(r->length); + it->index = Py_NewRef(_PyLong_GetZero()); return (PyObject *)it; } @@ -1206,8 +1189,7 @@ range_reverse(PyObject *seq, PyObject *Py_UNUSED(ignored)) it->index = it->start = it->step = NULL; /* start + (len - 1) * step */ - it->len = range->length; - Py_INCREF(it->len); + it->len = Py_NewRef(range->length); diff = PyNumber_Subtract(it->len, _PyLong_GetOne()); if (!diff) @@ -1228,8 +1210,7 @@ range_reverse(PyObject *seq, PyObject *Py_UNUSED(ignored)) if (!it->step) goto create_failure; - it->index = _PyLong_GetZero(); - Py_INCREF(it->index); + it->index = Py_NewRef(_PyLong_GetZero()); return (PyObject *)it; create_failure: diff --git a/Objects/setobject.c b/Objects/setobject.c index dd55a943010ab8..ae9e9b99446116 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -588,8 +588,7 @@ set_merge(PySetObject *so, PyObject *otherset) key = other_entry->key; if (key != NULL) { assert(so_entry->key == NULL); - Py_INCREF(key); - so_entry->key = key; + so_entry->key = Py_NewRef(key); so_entry->hash = other_entry->hash; } } @@ -607,8 +606,8 @@ set_merge(PySetObject *so, PyObject *otherset) for (i = other->mask + 1; i > 0 ; i--, other_entry++) { key = other_entry->key; if (key != NULL && key != dummy) { - Py_INCREF(key); - set_insert_clean(newtable, newmask, key, other_entry->hash); + set_insert_clean(newtable, newmask, Py_NewRef(key), + other_entry->hash); } } return 0; @@ -820,8 +819,7 @@ static PyObject *setiter_iternext(setiterobject *si) goto fail; si->len--; key = entry[i].key; - Py_INCREF(key); - return key; + return Py_NewRef(key); fail: si->si_set = NULL; @@ -868,8 +866,7 @@ set_iter(PySetObject *so) setiterobject *si = PyObject_GC_New(setiterobject, &PySetIter_Type); if (si == NULL) return NULL; - Py_INCREF(so); - si->si_set = so; + si->si_set = (PySetObject*)Py_NewRef(so); si->si_used = so->used; si->si_pos = 0; si->len = so->used; @@ -997,8 +994,7 @@ make_new_frozenset(PyTypeObject *type, PyObject *iterable) if (iterable != NULL && PyFrozenSet_CheckExact(iterable)) { /* frozenset(f) is idempotent */ - Py_INCREF(iterable); - return iterable; + return Py_NewRef(iterable); } return make_new_set(type, iterable); } @@ -1100,8 +1096,7 @@ static PyObject * frozenset_copy(PySetObject *so, PyObject *Py_UNUSED(ignored)) { if (PyFrozenSet_CheckExact(so)) { - Py_INCREF(so); - return (PyObject *)so; + return Py_NewRef(so); } return set_copy(so, NULL); } @@ -1173,8 +1168,7 @@ set_ior(PySetObject *so, PyObject *other) if (set_update_internal(so, other)) return NULL; - Py_INCREF(so); - return (PyObject *)so; + return Py_NewRef(so); } static PyObject * @@ -1264,12 +1258,11 @@ static PyObject * set_intersection_multi(PySetObject *so, PyObject *args) { Py_ssize_t i; - PyObject *result = (PyObject *)so; if (PyTuple_GET_SIZE(args) == 0) return set_copy(so, NULL); - Py_INCREF(so); + PyObject *result = Py_NewRef(so); for (i=0 ; istep == Py_None) { - step = _PyLong_GetOne(); - Py_INCREF(step); + step = Py_NewRef(_PyLong_GetOne()); step_is_negative = 0; } else { @@ -435,16 +432,13 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, goto error; } else { - lower = _PyLong_GetZero(); - Py_INCREF(lower); - upper = length; - Py_INCREF(upper); + lower = Py_NewRef(_PyLong_GetZero()); + upper = Py_NewRef(length); } /* Compute start. */ if (self->start == Py_None) { - start = step_is_negative ? upper : lower; - Py_INCREF(start); + start = Py_NewRef(step_is_negative ? upper : lower); } else { start = evaluate_slice_index(self->start); @@ -454,8 +448,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (_PyLong_Sign(start) < 0) { /* start += length */ PyObject *tmp = PyNumber_Add(start, length); - Py_DECREF(start); - start = tmp; + Py_SETREF(start, tmp); if (start == NULL) goto error; @@ -463,9 +456,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (cmp_result < 0) goto error; if (cmp_result) { - Py_INCREF(lower); - Py_DECREF(start); - start = lower; + Py_SETREF(start, Py_NewRef(lower)); } } else { @@ -473,17 +464,14 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (cmp_result < 0) goto error; if (cmp_result) { - Py_INCREF(upper); - Py_DECREF(start); - start = upper; + Py_SETREF(start, Py_NewRef(upper)); } } } /* Compute stop. */ if (self->stop == Py_None) { - stop = step_is_negative ? lower : upper; - Py_INCREF(stop); + stop = Py_NewRef(step_is_negative ? lower : upper); } else { stop = evaluate_slice_index(self->stop); @@ -493,8 +481,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (_PyLong_Sign(stop) < 0) { /* stop += length */ PyObject *tmp = PyNumber_Add(stop, length); - Py_DECREF(stop); - stop = tmp; + Py_SETREF(stop, tmp); if (stop == NULL) goto error; @@ -502,9 +489,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (cmp_result < 0) goto error; if (cmp_result) { - Py_INCREF(lower); - Py_DECREF(stop); - stop = lower; + Py_SETREF(stop, Py_NewRef(lower)); } } else { @@ -512,9 +497,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (cmp_result < 0) goto error; if (cmp_result) { - Py_INCREF(upper); - Py_DECREF(stop); - stop = upper; + Py_SETREF(stop, Py_NewRef(upper)); } } } @@ -609,8 +592,7 @@ slice_richcompare(PyObject *v, PyObject *w, int op) res = Py_False; break; } - Py_INCREF(res); - return res; + return Py_NewRef(res); } diff --git a/Objects/stringlib/count.h b/Objects/stringlib/count.h index f48500bf561f2c..e20edcd104b175 100644 --- a/Objects/stringlib/count.h +++ b/Objects/stringlib/count.h @@ -4,6 +4,11 @@ #error must include "stringlib/fastsearch.h" before including this module #endif +// gh-97982: Implementing asciilib_count() is not worth it, FASTSEARCH() does +// not specialize the code for ASCII strings. Use ucs1lib_count() for ASCII and +// UCS1 strings: it's the same than asciilib_count(). +#if !STRINGLIB_IS_UNICODE || STRINGLIB_MAX_CHAR > 0x7Fu + Py_LOCAL_INLINE(Py_ssize_t) STRINGLIB(count)(const STRINGLIB_CHAR* str, Py_ssize_t str_len, const STRINGLIB_CHAR* sub, Py_ssize_t sub_len, @@ -24,4 +29,4 @@ STRINGLIB(count)(const STRINGLIB_CHAR* str, Py_ssize_t str_len, return count; } - +#endif diff --git a/Objects/stringlib/join.h b/Objects/stringlib/join.h index bb011f7db796d8..de6bd83ffe4c8b 100644 --- a/Objects/stringlib/join.h +++ b/Objects/stringlib/join.h @@ -63,8 +63,7 @@ STRINGLIB(bytes_join)(PyObject *sep, PyObject *iterable) item = PySequence_Fast_GET_ITEM(seq, i); if (PyBytes_CheckExact(item)) { /* Fast path. */ - Py_INCREF(item); - buffers[i].obj = item; + buffers[i].obj = Py_NewRef(item); buffers[i].buf = PyBytes_AS_STRING(item); buffers[i].len = PyBytes_GET_SIZE(item); } diff --git a/Objects/stringlib/transmogrify.h b/Objects/stringlib/transmogrify.h index e1165ea38e8256..71099bb586e809 100644 --- a/Objects/stringlib/transmogrify.h +++ b/Objects/stringlib/transmogrify.h @@ -17,8 +17,7 @@ return_self(PyObject *self) { #if !STRINGLIB_MUTABLE if (STRINGLIB_CHECK_EXACT(self)) { - Py_INCREF(self); - return self; + return Py_NewRef(self); } #endif return STRINGLIB_NEW(STRINGLIB_STR(self), STRINGLIB_LEN(self)); diff --git a/Objects/stringlib/unicode_format.h b/Objects/stringlib/unicode_format.h index a4eea7b91988b9..ccd7c77c0a03fd 100644 --- a/Objects/stringlib/unicode_format.h +++ b/Objects/stringlib/unicode_format.h @@ -473,8 +473,7 @@ get_field_object(SubString *input, PyObject *args, PyObject *kwargs, goto error; /* assign to obj */ - Py_DECREF(obj); - obj = tmp; + Py_SETREF(obj, tmp); } /* end of iterator, this is the non-error case */ if (ok == 1) @@ -825,8 +824,7 @@ output_markup(SubString *field_name, SubString *format_spec, goto done; /* do the assignment, transferring ownership: fieldobj = tmp */ - Py_DECREF(fieldobj); - fieldobj = tmp; + Py_SETREF(fieldobj, tmp); tmp = NULL; } @@ -1042,8 +1040,7 @@ formatteriter_next(formatteriterobject *it) otherwise create a one length string with the conversion character */ if (conversion == '\0') { - conversion_str = Py_None; - Py_INCREF(conversion_str); + conversion_str = Py_NewRef(Py_None); } else conversion_str = PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, @@ -1121,8 +1118,7 @@ formatter_parser(PyObject *ignored, PyObject *self) return NULL; /* take ownership, give the object to the iterator */ - Py_INCREF(self); - it->str = self; + it->str = Py_NewRef(self); /* initialize the contained MarkupIterator */ MarkupIterator_init(&it->it_markup, (PyObject*)self, 0, PyUnicode_GET_LENGTH(self)); @@ -1265,8 +1261,7 @@ formatter_field_name_split(PyObject *ignored, PyObject *self) /* take ownership, give the object to the iterator. this is just to keep the field_name alive */ - Py_INCREF(self); - it->str = self; + it->str = Py_NewRef(self); /* Pass in auto_number = NULL. We'll return an empty string for first_obj in that case. */ diff --git a/Objects/structseq.c b/Objects/structseq.c index 9a7013372e688c..100ccfef0a23c4 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -200,8 +200,7 @@ structseq_new_impl(PyTypeObject *type, PyObject *arg, PyObject *dict) } for (i = 0; i < len; ++i) { PyObject *v = PySequence_Fast_GET_ITEM(arg, i); - Py_INCREF(v); - res->ob_item[i] = v; + res->ob_item[i] = Py_NewRef(v); } Py_DECREF(arg); for (; i < max_len; ++i) { @@ -219,8 +218,7 @@ structseq_new_impl(PyTypeObject *type, PyObject *arg, PyObject *dict) ob = Py_None; } } - Py_INCREF(ob); - res->ob_item[i] = ob; + res->ob_item[i] = Py_NewRef(ob); } _PyObject_GC_TRACK(res); diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 240af0a9075271..4405125d45e7cc 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -61,8 +61,7 @@ tuple_alloc(Py_ssize_t size) static inline PyObject * tuple_get_empty(void) { - Py_INCREF(&_Py_SINGLETON(tuple_empty)); - return (PyObject *)&_Py_SINGLETON(tuple_empty); + return Py_NewRef(&_Py_SINGLETON(tuple_empty)); } PyObject * @@ -171,8 +170,7 @@ PyTuple_Pack(Py_ssize_t n, ...) items = result->ob_item; for (i = 0; i < n; i++) { o = va_arg(vargs, PyObject *); - Py_INCREF(o); - items[i] = o; + items[i] = Py_NewRef(o); } va_end(vargs); _PyObject_GC_TRACK(result); @@ -367,8 +365,7 @@ tupleitem(PyTupleObject *a, Py_ssize_t i) PyErr_SetString(PyExc_IndexError, "tuple index out of range"); return NULL; } - Py_INCREF(a->ob_item[i]); - return a->ob_item[i]; + return Py_NewRef(a->ob_item[i]); } PyObject * @@ -385,8 +382,7 @@ _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) PyObject **dst = tuple->ob_item; for (Py_ssize_t i = 0; i < n; i++) { PyObject *item = src[i]; - Py_INCREF(item); - dst[i] = item; + dst[i] = Py_NewRef(item); } _PyObject_GC_TRACK(tuple); return (PyObject *)tuple; @@ -425,8 +421,7 @@ tupleslice(PyTupleObject *a, Py_ssize_t ilow, if (ihigh < ilow) ihigh = ilow; if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) { - Py_INCREF(a); - return (PyObject *)a; + return Py_NewRef(a); } return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow); } @@ -449,8 +444,7 @@ tupleconcat(PyTupleObject *a, PyObject *bb) PyObject **src, **dest; PyTupleObject *np; if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) { - Py_INCREF(bb); - return bb; + return Py_NewRef(bb); } if (!PyTuple_Check(bb)) { PyErr_Format(PyExc_TypeError, @@ -461,8 +455,7 @@ tupleconcat(PyTupleObject *a, PyObject *bb) PyTupleObject *b = (PyTupleObject *)bb; if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) { - Py_INCREF(a); - return (PyObject *)a; + return Py_NewRef(a); } assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX); size = Py_SIZE(a) + Py_SIZE(b); @@ -478,15 +471,13 @@ tupleconcat(PyTupleObject *a, PyObject *bb) dest = np->ob_item; for (i = 0; i < Py_SIZE(a); i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } src = b->ob_item; dest = np->ob_item + Py_SIZE(a); for (i = 0; i < Py_SIZE(b); i++) { PyObject *v = src[i]; - Py_INCREF(v); - dest[i] = v; + dest[i] = Py_NewRef(v); } _PyObject_GC_TRACK(np); return (PyObject *)np; @@ -500,8 +491,7 @@ tuplerepeat(PyTupleObject *a, Py_ssize_t n) if (PyTuple_CheckExact(a)) { /* Since tuples are immutable, we can return a shared copy in this case */ - Py_INCREF(a); - return (PyObject *)a; + return Py_NewRef(a); } } if (input_size == 0 || n <= 0) { @@ -747,8 +737,7 @@ tuple_subtype_new(PyTypeObject *type, PyObject *iterable) } for (i = 0; i < n; i++) { item = PyTuple_GET_ITEM(tmp, i); - Py_INCREF(item); - PyTuple_SET_ITEM(newobj, i, item); + PyTuple_SET_ITEM(newobj, i, Py_NewRef(item)); } Py_DECREF(tmp); @@ -799,8 +788,7 @@ tuplesubscript(PyTupleObject* self, PyObject* item) else if (start == 0 && step == 1 && slicelength == PyTuple_GET_SIZE(self) && PyTuple_CheckExact(self)) { - Py_INCREF(self); - return (PyObject *)self; + return Py_NewRef(self); } else { PyTupleObject* result = tuple_alloc(slicelength); @@ -810,8 +798,7 @@ tuplesubscript(PyTupleObject* self, PyObject* item) dest = result->ob_item; for (cur = start, i = 0; i < slicelength; cur += step, i++) { - it = src[cur]; - Py_INCREF(it); + it = Py_NewRef(src[cur]); dest[i] = it; } @@ -1044,8 +1031,7 @@ tupleiter_next(tupleiterobject *it) if (it->it_index < PyTuple_GET_SIZE(seq)) { item = PyTuple_GET_ITEM(seq, it->it_index); ++it->it_index; - Py_INCREF(item); - return item; + return Py_NewRef(item); } it->it_seq = NULL; @@ -1146,8 +1132,7 @@ tuple_iter(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = (PyTupleObject *)seq; + it->it_seq = (PyTupleObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 196a6aee4993b8..b993aa405f6b6a 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -4,6 +4,7 @@ #include "pycore_call.h" #include "pycore_code.h" // CO_FAST_FREE #include "pycore_compile.h" // _Py_Mangle() +#include "pycore_dict.h" // _PyDict_KeysSize() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_moduleobject.h" // _PyModule_GetDef() #include "pycore_object.h" // _PyType_HasFeature() @@ -43,9 +44,7 @@ class object "PyObject *" "&PyBaseObject_Type" PyUnicode_IS_READY(name) && \ (PyUnicode_GET_LENGTH(name) <= MCACHE_MAX_ATTR_SIZE) -// bpo-42745: next_version_tag remains shared by all interpreters because of static types -// Used to set PyTypeObject.tp_version_tag -static unsigned int next_version_tag = 1; +#define next_version_tag (_PyRuntime.types.next_version_tag) typedef struct PySlot_Offset { short subslot_offset; @@ -372,6 +371,83 @@ _PyTypes_Fini(PyInterpreterState *interp) static PyObject * lookup_subclasses(PyTypeObject *); +int +PyType_AddWatcher(PyType_WatchCallback callback) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + + for (int i = 0; i < TYPE_MAX_WATCHERS; i++) { + if (!interp->type_watchers[i]) { + interp->type_watchers[i] = callback; + return i; + } + } + + PyErr_SetString(PyExc_RuntimeError, "no more type watcher IDs available"); + return -1; +} + +static inline int +validate_watcher_id(PyInterpreterState *interp, int watcher_id) +{ + if (watcher_id < 0 || watcher_id >= TYPE_MAX_WATCHERS) { + PyErr_Format(PyExc_ValueError, "Invalid type watcher ID %d", watcher_id); + return -1; + } + if (!interp->type_watchers[watcher_id]) { + PyErr_Format(PyExc_ValueError, "No type watcher set for ID %d", watcher_id); + return -1; + } + return 0; +} + +int +PyType_ClearWatcher(int watcher_id) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (validate_watcher_id(interp, watcher_id) < 0) { + return -1; + } + interp->type_watchers[watcher_id] = NULL; + return 0; +} + +static int assign_version_tag(PyTypeObject *type); + +int +PyType_Watch(int watcher_id, PyObject* obj) +{ + if (!PyType_Check(obj)) { + PyErr_SetString(PyExc_ValueError, "Cannot watch non-type"); + return -1; + } + PyTypeObject *type = (PyTypeObject *)obj; + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (validate_watcher_id(interp, watcher_id) < 0) { + return -1; + } + // ensure we will get a callback on the next modification + assign_version_tag(type); + type->tp_watched |= (1 << watcher_id); + return 0; +} + +int +PyType_Unwatch(int watcher_id, PyObject* obj) +{ + if (!PyType_Check(obj)) { + PyErr_SetString(PyExc_ValueError, "Cannot watch non-type"); + return -1; + } + PyTypeObject *type = (PyTypeObject *)obj; + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (validate_watcher_id(interp, watcher_id)) { + return -1; + } + type->tp_watched &= ~(1 << watcher_id); + return 0; +} + void PyType_Modified(PyTypeObject *type) { @@ -409,6 +485,23 @@ PyType_Modified(PyTypeObject *type) } } + if (type->tp_watched) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + int bits = type->tp_watched; + int i = 0; + while(bits && i < TYPE_MAX_WATCHERS) { + if (bits & 1) { + PyType_WatchCallback cb = interp->type_watchers[i]; + if (cb && (cb(type) < 0)) { + PyErr_WriteUnraisable((PyObject *)type); + } + } + i += 1; + bits >>= 1; + } + } + + type->tp_flags &= ~Py_TPFLAGS_VALID_VERSION_TAG; type->tp_version_tag = 0; /* 0 is not a valid version tag */ } @@ -467,7 +560,7 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) { } static int -assign_version_tag(struct type_cache *cache, PyTypeObject *type) +assign_version_tag(PyTypeObject *type) { /* Ensure that the tp_version_tag is valid and set Py_TPFLAGS_VALID_VERSION_TAG. To respect the invariant, this @@ -492,7 +585,7 @@ assign_version_tag(struct type_cache *cache, PyTypeObject *type) Py_ssize_t n = PyTuple_GET_SIZE(bases); for (Py_ssize_t i = 0; i < n; i++) { PyObject *b = PyTuple_GET_ITEM(bases, i); - if (!assign_version_tag(cache, _PyType_CAST(b))) + if (!assign_version_tag(_PyType_CAST(b))) return 0; } type->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; @@ -559,8 +652,7 @@ type_name(PyTypeObject *type, void *context) if (type->tp_flags & Py_TPFLAGS_HEAPTYPE) { PyHeapTypeObject* et = (PyHeapTypeObject*)type; - Py_INCREF(et->ht_name); - return et->ht_name; + return Py_NewRef(et->ht_name); } else { return PyUnicode_FromString(_PyType_Name(type)); @@ -572,8 +664,7 @@ type_qualname(PyTypeObject *type, void *context) { if (type->tp_flags & Py_TPFLAGS_HEAPTYPE) { PyHeapTypeObject* et = (PyHeapTypeObject*)type; - Py_INCREF(et->ht_qualname); - return et->ht_qualname; + return Py_NewRef(et->ht_qualname); } else { return PyUnicode_FromString(_PyType_Name(type)); @@ -605,8 +696,7 @@ type_set_name(PyTypeObject *type, PyObject *value, void *context) } type->tp_name = tp_name; - Py_INCREF(value); - Py_SETREF(((PyHeapTypeObject*)type)->ht_name, value); + Py_SETREF(((PyHeapTypeObject*)type)->ht_name, Py_NewRef(value)); return 0; } @@ -626,8 +716,7 @@ type_set_qualname(PyTypeObject *type, PyObject *value, void *context) } et = (PyHeapTypeObject*)type; - Py_INCREF(value); - Py_SETREF(et->ht_qualname, value); + Py_SETREF(et->ht_qualname, Py_NewRef(value)); return 0; } @@ -655,8 +744,7 @@ type_module(PyTypeObject *type, void *context) PyUnicode_InternInPlace(&mod); } else { - mod = &_Py_ID(builtins); - Py_INCREF(mod); + mod = Py_NewRef(&_Py_ID(builtins)); } } return mod; @@ -688,8 +776,7 @@ type_abstractmethods(PyTypeObject *type, void *context) } return NULL; } - Py_INCREF(mod); - return mod; + return Py_NewRef(mod); } static int @@ -727,8 +814,7 @@ type_set_abstractmethods(PyTypeObject *type, PyObject *value, void *context) static PyObject * type_get_bases(PyTypeObject *type, void *context) { - Py_INCREF(type->tp_bases); - return type->tp_bases; + return Py_NewRef(type->tp_bases); } static PyTypeObject *best_base(PyObject *); @@ -922,8 +1008,7 @@ type_set_bases(PyTypeObject *type, PyObject *new_bases, void *context) "", 2, 3, &cls, &new_mro, &old_mro); /* Do not rollback if cls has a newer version of MRO. */ if (cls->tp_mro == new_mro) { - Py_XINCREF(old_mro); - cls->tp_mro = old_mro; + cls->tp_mro = Py_XNewRef(old_mro); Py_DECREF(new_mro); } } @@ -967,8 +1052,7 @@ type_get_doc(PyTypeObject *type, void *context) result = PyDict_GetItemWithError(type->tp_dict, &_Py_ID(__doc__)); if (result == NULL) { if (!PyErr_Occurred()) { - result = Py_None; - Py_INCREF(result); + result = Py_NewRef(Py_None); } } else if (Py_TYPE(result)->tp_descr_get) { @@ -1129,8 +1213,7 @@ type_repr(PyTypeObject *type) if (mod == NULL) PyErr_Clear(); else if (!PyUnicode_Check(mod)) { - Py_DECREF(mod); - mod = NULL; + Py_SETREF(mod, NULL); } name = type_qualname(type, NULL); if (name == NULL) { @@ -1170,8 +1253,7 @@ type_call(PyTypeObject *type, PyObject *args, PyObject *kwds) if (nargs == 1 && (kwds == NULL || !PyDict_GET_SIZE(kwds))) { obj = (PyObject *) Py_TYPE(PyTuple_GET_ITEM(args, 0)); - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } /* SF bug 475327 -- if that didn't trigger, we need 3 @@ -1205,8 +1287,7 @@ type_call(PyTypeObject *type, PyObject *args, PyObject *kwds) int res = type->tp_init(obj, args, kwds); if (res < 0) { assert(_PyErr_Occurred(tstate)); - Py_DECREF(obj); - obj = NULL; + Py_SETREF(obj, NULL); } else { assert(!_PyErr_Occurred(tstate)); @@ -2050,12 +2131,11 @@ mro_implementation(PyTypeObject *type) return NULL; } - Py_INCREF(type); - PyTuple_SET_ITEM(result, 0, (PyObject *) type); + ; + PyTuple_SET_ITEM(result, 0, Py_NewRef(type)); for (Py_ssize_t i = 0; i < k; i++) { PyObject *cls = PyTuple_GET_ITEM(base->tp_mro, i); - Py_INCREF(cls); - PyTuple_SET_ITEM(result, i + 1, cls); + PyTuple_SET_ITEM(result, i + 1, Py_NewRef(cls)); } return result; } @@ -2091,8 +2171,7 @@ mro_implementation(PyTypeObject *type) return NULL; } - Py_INCREF(type); - PyList_SET_ITEM(result, 0, (PyObject *)type); + PyList_SET_ITEM(result, 0, Py_NewRef(type)); if (pmerge(result, to_merge, n + 1) < 0) { Py_CLEAR(result); } @@ -2237,8 +2316,7 @@ mro_internal(PyTypeObject *type, PyObject **p_old_mro) /* Keep a reference to be able to do a reentrancy check below. Don't let old_mro be GC'ed and its address be reused for another object, like (suddenly!) a new tp_mro. */ - old_mro = type->tp_mro; - Py_XINCREF(old_mro); + old_mro = Py_XNewRef(type->tp_mro); new_mro = mro_invoke(type); /* might cause reentrance */ reent = (type->tp_mro != old_mro); Py_XDECREF(old_mro); @@ -2456,8 +2534,7 @@ subtype_setdict(PyObject *obj, PyObject *value, void *context) "not a '%.200s'", Py_TYPE(value)->tp_name); return -1; } - Py_XINCREF(value); - Py_XSETREF(*dictptr, value); + Py_XSETREF(*dictptr, Py_XNewRef(value)); return 0; } @@ -2484,8 +2561,7 @@ subtype_getweakref(PyObject *obj, void *context) result = Py_None; else result = *weaklistptr; - Py_INCREF(result); - return result; + return Py_NewRef(result); } /* Three variants on the subtype_getsets list. */ @@ -4111,7 +4187,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) return NULL; } - if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(cache, type)) { + if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(type)) { h = MCACHE_HASH_METHOD(type, name); struct type_cache_entry *entry = &cache->hashtable[h]; entry->version = type->tp_version_tag; @@ -4861,9 +4937,10 @@ object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) PyObject *abstract_methods; PyObject *sorted_methods; PyObject *joined; + PyObject* comma_w_quotes_sep; Py_ssize_t method_count; - /* Compute ", ".join(sorted(type.__abstractmethods__)) + /* Compute "', '".join(sorted(type.__abstractmethods__)) into joined. */ abstract_methods = type_abstractmethods(type, NULL); if (abstract_methods == NULL) @@ -4876,22 +4953,28 @@ object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) Py_DECREF(sorted_methods); return NULL; } - _Py_DECLARE_STR(comma_sep, ", "); - joined = PyUnicode_Join(&_Py_STR(comma_sep), sorted_methods); + comma_w_quotes_sep = PyUnicode_FromString("', '"); + joined = PyUnicode_Join(comma_w_quotes_sep, sorted_methods); method_count = PyObject_Length(sorted_methods); Py_DECREF(sorted_methods); - if (joined == NULL) + if (joined == NULL) { + Py_DECREF(comma_w_quotes_sep); return NULL; - if (method_count == -1) + } + if (method_count == -1) { + Py_DECREF(comma_w_quotes_sep); + Py_DECREF(joined); return NULL; + } PyErr_Format(PyExc_TypeError, "Can't instantiate abstract class %s " - "without an implementation for abstract method%s %U", + "without an implementation for abstract method%s '%U'", type->tp_name, method_count > 1 ? "s" : "", joined); Py_DECREF(joined); + Py_DECREF(comma_w_quotes_sep); return NULL; } PyObject *obj = type->tp_alloc(type, 0); @@ -4922,8 +5005,7 @@ object_repr(PyObject *self) if (mod == NULL) PyErr_Clear(); else if (!PyUnicode_Check(mod)) { - Py_DECREF(mod); - mod = NULL; + Py_SETREF(mod, NULL); } name = type_qualname(type, NULL); if (name == NULL) { @@ -4962,16 +5044,14 @@ object_richcompare(PyObject *self, PyObject *other, int op) /* Return NotImplemented instead of False, so if two objects are compared, both get a chance at the comparison. See issue #1393. */ - res = (self == other) ? Py_True : Py_NotImplemented; - Py_INCREF(res); + res = Py_NewRef((self == other) ? Py_True : Py_NotImplemented); break; case Py_NE: /* By default, __ne__() delegates to __eq__() and inverts the result, unless the latter returns NotImplemented. */ if (Py_TYPE(self)->tp_richcompare == NULL) { - res = Py_NotImplemented; - Py_INCREF(res); + res = Py_NewRef(Py_NotImplemented); break; } res = (*Py_TYPE(self)->tp_richcompare)(self, other, Py_EQ); @@ -4982,17 +5062,15 @@ object_richcompare(PyObject *self, PyObject *other, int op) res = NULL; else { if (ok) - res = Py_False; + res = Py_NewRef(Py_False); else - res = Py_True; - Py_INCREF(res); + res = Py_NewRef(Py_True); } } break; default: - res = Py_NotImplemented; - Py_INCREF(res); + res = Py_NewRef(Py_NotImplemented); break; } @@ -5002,8 +5080,7 @@ object_richcompare(PyObject *self, PyObject *other, int op) static PyObject * object_get_class(PyObject *self, void *closure) { - Py_INCREF(Py_TYPE(self)); - return (PyObject *)(Py_TYPE(self)); + return Py_NewRef(Py_TYPE(self)); } static int @@ -5258,8 +5335,7 @@ _PyType_GetSlotNames(PyTypeObject *cls) cls->tp_name, Py_TYPE(slotnames)->tp_name); return NULL; } - Py_INCREF(slotnames); - return slotnames; + return Py_NewRef(slotnames); } else { if (PyErr_Occurred()) { @@ -5305,8 +5381,7 @@ object_getstate_default(PyObject *obj, int required) } if (_PyObject_IsInstanceDictEmpty(obj)) { - state = Py_None; - Py_INCREF(state); + state = Py_NewRef(Py_None); } else { state = PyObject_GenericGetDict(obj, NULL); @@ -5360,8 +5435,7 @@ object_getstate_default(PyObject *obj, int required) for (i = 0; i < slotnames_size; i++) { PyObject *name, *value; - name = PyList_GET_ITEM(slotnames, i); - Py_INCREF(name); + name = Py_NewRef(PyList_GET_ITEM(slotnames, i)); if (_PyObject_LookupAttr(obj, name, &value) < 0) { Py_DECREF(name); goto error; @@ -5493,10 +5567,8 @@ _PyObject_GetNewArguments(PyObject *obj, PyObject **args, PyObject **kwargs) Py_DECREF(newargs); return -1; } - *args = PyTuple_GET_ITEM(newargs, 0); - Py_INCREF(*args); - *kwargs = PyTuple_GET_ITEM(newargs, 1); - Py_INCREF(*kwargs); + *args = Py_NewRef(PyTuple_GET_ITEM(newargs, 0)); + *kwargs = Py_NewRef(PyTuple_GET_ITEM(newargs, 1)); Py_DECREF(newargs); /* XXX We should perhaps allow None to be passed here. */ @@ -5564,8 +5636,7 @@ _PyObject_GetItemsIter(PyObject *obj, PyObject **listitems, } if (!PyList_Check(obj)) { - *listitems = Py_None; - Py_INCREF(*listitems); + *listitems = Py_NewRef(Py_None); } else { *listitems = PyObject_GetIter(obj); @@ -5574,8 +5645,7 @@ _PyObject_GetItemsIter(PyObject *obj, PyObject **listitems, } if (!PyDict_Check(obj)) { - *dictitems = Py_None; - Py_INCREF(*dictitems); + *dictitems = Py_NewRef(Py_None); } else { PyObject *items = PyObject_CallMethodNoArgs(obj, &_Py_ID(items)); @@ -5640,12 +5710,10 @@ reduce_newobj(PyObject *obj) return NULL; } cls = (PyObject *) Py_TYPE(obj); - Py_INCREF(cls); - PyTuple_SET_ITEM(newargs, 0, cls); + PyTuple_SET_ITEM(newargs, 0, Py_NewRef(cls)); for (i = 0; i < n; i++) { PyObject *v = PyTuple_GET_ITEM(args, i); - Py_INCREF(v); - PyTuple_SET_ITEM(newargs, i+1, v); + PyTuple_SET_ITEM(newargs, i+1, Py_NewRef(v)); } Py_XDECREF(args); } @@ -5753,7 +5821,8 @@ static PyObject * object___reduce_ex___impl(PyObject *self, int protocol) /*[clinic end generated code: output=2e157766f6b50094 input=f326b43fb8a4c5ff]*/ { - static PyObject *objreduce; +#define objreduce \ + (_Py_INTERP_CACHED_OBJECT(_PyInterpreterState_Get(), objreduce)) PyObject *reduce, *res; if (objreduce == NULL) { @@ -5789,6 +5858,7 @@ object___reduce_ex___impl(PyObject *self, int protocol) } return _common_reduce(self, protocol); +#undef objreduce } static PyObject * @@ -5895,8 +5965,7 @@ object___dir___impl(PyObject *self) else { /* Copy __dict__ to avoid mutating it. */ PyObject *temp = PyDict_Copy(dict); - Py_DECREF(dict); - dict = temp; + Py_SETREF(dict, temp); } if (dict == NULL) @@ -8035,8 +8104,7 @@ slot_tp_hash(PyObject *self) func = lookup_maybe_method(self, &_Py_ID(__hash__), &unbound); if (func == Py_None) { - Py_DECREF(func); - func = NULL; + Py_SETREF(func, NULL); } if (func == NULL) { @@ -8280,8 +8348,7 @@ slot_tp_descr_get(PyObject *self, PyObject *obj, PyObject *type) /* Avoid further slowdowns */ if (tp->tp_descr_get == slot_tp_descr_get) tp->tp_descr_get = NULL; - Py_INCREF(self); - return self; + return Py_NewRef(self); } if (obj == NULL) obj = Py_None; @@ -8450,8 +8517,6 @@ __ne__ etc. all map to tp_richcompare) and one name may map to multiple slots an all-zero entry. */ -typedef struct wrapperbase slotdef; - #undef TPSLOT #undef FLSLOT #undef AMSLOT @@ -8500,7 +8565,7 @@ typedef struct wrapperbase slotdef; ETSLOT(NAME, as_number.SLOT, FUNCTION, wrap_binaryfunc_r, \ #NAME "($self, value, /)\n--\n\n" DOC) -static slotdef slotdefs[] = { +static pytype_slotdef slotdefs[] = { TPSLOT(__getattribute__, tp_getattr, NULL, NULL, ""), TPSLOT(__getattr__, tp_getattr, NULL, NULL, ""), TPSLOT(__setattr__, tp_setattr, NULL, NULL, ""), @@ -8725,12 +8790,6 @@ slotptr(PyTypeObject *type, int ioffset) return (void **)ptr; } -/* Length of array of slotdef pointers used to store slots with the - same __name__. There should be at most MAX_EQUIV-1 slotdef entries with - the same __name__, for any __name__. Since that's a static property, it is - appropriate to declare fixed-size arrays for this. */ -#define MAX_EQUIV 10 - /* Return a slot pointer for a given name, but ONLY if the attribute has exactly one slot function. The name must be an interned string. */ static void ** @@ -8739,9 +8798,10 @@ resolve_slotdups(PyTypeObject *type, PyObject *name) /* XXX Maybe this could be optimized more -- but is it worth it? */ /* pname and ptrs act as a little cache */ - static PyObject *pname; - static slotdef *ptrs[MAX_EQUIV]; - slotdef *p, **pp; + PyInterpreterState *interp = _PyInterpreterState_Get(); +#define pname _Py_INTERP_CACHED_OBJECT(interp, type_slots_pname) +#define ptrs _Py_INTERP_CACHED_OBJECT(interp, type_slots_ptrs) + pytype_slotdef *p, **pp; void **res, **ptr; if (pname != name) { @@ -8768,6 +8828,8 @@ resolve_slotdups(PyTypeObject *type, PyObject *name) res = ptr; } return res; +#undef pname +#undef ptrs } @@ -8825,8 +8887,8 @@ resolve_slotdups(PyTypeObject *type, PyObject *name) * When done, return a pointer to the next slotdef with a different offset, * because that's convenient for fixup_slot_dispatchers(). This function never * sets an exception: if an internal error happens (unlikely), it's ignored. */ -static slotdef * -update_one_slot(PyTypeObject *type, slotdef *p) +static pytype_slotdef * +update_one_slot(PyTypeObject *type, pytype_slotdef *p) { PyObject *descr; PyWrapperDescrObject *d; @@ -8941,7 +9003,7 @@ update_one_slot(PyTypeObject *type, slotdef *p) static int update_slots_callback(PyTypeObject *type, void *data) { - slotdef **pp = (slotdef **)data; + pytype_slotdef **pp = (pytype_slotdef **)data; for (; *pp; pp++) { update_one_slot(type, *pp); } @@ -8952,9 +9014,9 @@ update_slots_callback(PyTypeObject *type, void *data) static int update_slot(PyTypeObject *type, PyObject *name) { - slotdef *ptrs[MAX_EQUIV]; - slotdef *p; - slotdef **pp; + pytype_slotdef *ptrs[MAX_EQUIV]; + pytype_slotdef *p; + pytype_slotdef **pp; int offset; assert(PyUnicode_CheckExact(name)); @@ -8991,7 +9053,7 @@ static void fixup_slot_dispatchers(PyTypeObject *type) { assert(!PyErr_Occurred()); - for (slotdef *p = slotdefs; p->name; ) { + for (pytype_slotdef *p = slotdefs; p->name; ) { p = update_one_slot(type, p); } } @@ -8999,7 +9061,7 @@ fixup_slot_dispatchers(PyTypeObject *type) static void update_all_slots(PyTypeObject* type) { - slotdef *p; + pytype_slotdef *p; /* Clear the VALID_VERSION flag of 'type' and all its subclasses. */ PyType_Modified(type); @@ -9170,7 +9232,7 @@ static int add_operators(PyTypeObject *type) { PyObject *dict = type->tp_dict; - slotdef *p; + pytype_slotdef *p; PyObject *descr; void **ptr; @@ -9310,8 +9372,7 @@ super_getattro(PyObject *self, PyObject *name) (See SF ID #743627) */ (su->obj == (PyObject *)starttype) ? NULL : su->obj, (PyObject *)starttype); - Py_DECREF(res); - res = res2; + Py_SETREF(res, res2); } Py_DECREF(mro); @@ -9350,14 +9411,12 @@ supercheck(PyTypeObject *type, PyObject *obj) /* Check for first bullet above (special case) */ if (PyType_Check(obj) && PyType_IsSubtype((PyTypeObject *)obj, type)) { - Py_INCREF(obj); - return (PyTypeObject *)obj; + return (PyTypeObject *)Py_NewRef(obj); } /* Normal case */ if (PyType_IsSubtype(Py_TYPE(obj), type)) { - Py_INCREF(Py_TYPE(obj)); - return Py_TYPE(obj); + return (PyTypeObject*)Py_NewRef(Py_TYPE(obj)); } else { /* Try the slow way */ @@ -9393,8 +9452,7 @@ super_descr_get(PyObject *self, PyObject *obj, PyObject *type) if (obj == NULL || obj == Py_None || su->obj != NULL) { /* Not binding to an object, or already bound */ - Py_INCREF(self); - return self; + return Py_NewRef(self); } if (!Py_IS_TYPE(su, &PySuper_Type)) /* If su is an instance of a (strict) subclass of super, @@ -9410,10 +9468,8 @@ super_descr_get(PyObject *self, PyObject *obj, PyObject *type) NULL, NULL); if (newobj == NULL) return NULL; - Py_INCREF(su->type); - Py_INCREF(obj); - newobj->type = su->type; - newobj->obj = obj; + newobj->type = (PyTypeObject*)Py_NewRef(su->type); + newobj->obj = Py_NewRef(obj); newobj->obj_type = obj_type; return (PyObject *)newobj; } @@ -9537,8 +9593,7 @@ super_init_impl(PyObject *self, PyTypeObject *type, PyObject *obj) { return -1; Py_INCREF(obj); } - Py_INCREF(type); - Py_XSETREF(su->type, type); + Py_XSETREF(su->type, (PyTypeObject*)Py_NewRef(type)); Py_XSETREF(su->obj, obj); Py_XSETREF(su->obj_type, obj_type); return 0; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index bd169ed714212d..55f029dd504ca0 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -52,9 +52,9 @@ OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. #include "pycore_pathconfig.h" // _Py_DumpPathConfig() #include "pycore_pylifecycle.h" // _Py_SetFileSystemEncoding() #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "pycore_runtime_init.h" // _PyUnicode_InitStaticStrings() #include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI #include "pycore_unicodeobject.h" // struct _Py_unicode_state +#include "pycore_unicodeobject_generated.h" // _PyUnicode_InitStaticStrings() #include "stringlib/eq.h" // unicode_eq() #ifdef MS_WINDOWS @@ -221,8 +221,7 @@ static inline PyObject* unicode_get_empty(void) static inline PyObject* unicode_new_empty(void) { PyObject *empty = unicode_get_empty(); - Py_INCREF(empty); - return empty; + return Py_NewRef(empty); } /* This dictionary holds all interned unicode strings. Note that references @@ -611,8 +610,7 @@ static PyObject* unicode_result_unchanged(PyObject *unicode) { if (PyUnicode_CheckExact(unicode)) { - Py_INCREF(unicode); - return unicode; + return Py_NewRef(unicode); } else /* Subtype -- return genuine unicode string with the same value. */ @@ -1858,7 +1856,7 @@ _PyUnicode_FromId(_Py_Identifier *id) Py_ssize_t index = _Py_atomic_size_get(&id->index); if (index < 0) { - struct _Py_unicode_runtime_ids *rt_ids = &interp->runtime->unicode_ids; + struct _Py_unicode_runtime_ids *rt_ids = &interp->runtime->unicode_state.ids; PyThread_acquire_lock(rt_ids->lock, WAIT_LOCK); // Check again to detect concurrent access. Another thread can have @@ -2947,8 +2945,7 @@ PyUnicode_FromObject(PyObject *obj) /* XXX Perhaps we should make this API an alias of PyObject_Str() instead ?! */ if (PyUnicode_CheckExact(obj)) { - Py_INCREF(obj); - return obj; + return Py_NewRef(obj); } if (PyUnicode_Check(obj)) { /* For a Unicode subtype that's not a Unicode object, @@ -3619,48 +3616,25 @@ PyUnicode_FSConverter(PyObject* arg, void* addr) int PyUnicode_FSDecoder(PyObject* arg, void* addr) { - int is_buffer = 0; - PyObject *path = NULL; - PyObject *output = NULL; if (arg == NULL) { Py_DECREF(*(PyObject**)addr); *(PyObject**)addr = NULL; return 1; } - is_buffer = PyObject_CheckBuffer(arg); - if (!is_buffer) { - path = PyOS_FSPath(arg); - if (path == NULL) { - return 0; - } - } - else { - path = arg; - Py_INCREF(arg); + PyObject *path = PyOS_FSPath(arg); + if (path == NULL) { + return 0; } + PyObject *output = NULL; if (PyUnicode_Check(path)) { output = path; } - else if (PyBytes_Check(path) || is_buffer) { - PyObject *path_bytes = NULL; - - if (!PyBytes_Check(path) && - PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "path should be string, bytes, or os.PathLike, not %.200s", - Py_TYPE(arg)->tp_name)) { - Py_DECREF(path); - return 0; - } - path_bytes = PyBytes_FromObject(path); + else if (PyBytes_Check(path)) { + output = PyUnicode_DecodeFSDefaultAndSize(PyBytes_AS_STRING(path), + PyBytes_GET_SIZE(path)); Py_DECREF(path); - if (!path_bytes) { - return 0; - } - output = PyUnicode_DecodeFSDefaultAndSize(PyBytes_AS_STRING(path_bytes), - PyBytes_GET_SIZE(path_bytes)); - Py_DECREF(path_bytes); if (!output) { return 0; } @@ -3672,6 +3646,7 @@ PyUnicode_FSDecoder(PyObject* arg, void* addr) Py_DECREF(path); return 0; } + if (findchar(PyUnicode_DATA(output), PyUnicode_KIND(output), PyUnicode_GET_LENGTH(output), 0, 1) >= 0) { PyErr_SetString(PyExc_ValueError, "embedded null character"); @@ -8690,8 +8665,7 @@ _PyUnicode_TransformDecimalAndSpaceToASCII(PyObject *unicode) } if (PyUnicode_IS_ASCII(unicode)) { /* If the string is already ASCII, just return the same string */ - Py_INCREF(unicode); - return unicode; + return Py_NewRef(unicode); } Py_ssize_t len = PyUnicode_GET_LENGTH(unicode); @@ -8964,21 +8938,20 @@ _PyUnicode_InsertThousandsGrouping( return count; } - -Py_ssize_t -PyUnicode_Count(PyObject *str, - PyObject *substr, - Py_ssize_t start, - Py_ssize_t end) +static Py_ssize_t +unicode_count_impl(PyObject *str, + PyObject *substr, + Py_ssize_t start, + Py_ssize_t end) { + assert(PyUnicode_Check(str)); + assert(PyUnicode_Check(substr)); + Py_ssize_t result; int kind1, kind2; const void *buf1 = NULL, *buf2 = NULL; Py_ssize_t len1, len2; - if (ensure_unicode(str) < 0 || ensure_unicode(substr) < 0) - return -1; - kind1 = PyUnicode_KIND(str); kind2 = PyUnicode_KIND(substr); if (kind1 < kind2) @@ -8998,18 +8971,13 @@ PyUnicode_Count(PyObject *str, goto onError; } + // We don't reuse `anylib_count` here because of the explicit casts. switch (kind1) { case PyUnicode_1BYTE_KIND: - if (PyUnicode_IS_ASCII(str) && PyUnicode_IS_ASCII(substr)) - result = asciilib_count( - ((const Py_UCS1*)buf1) + start, end - start, - buf2, len2, PY_SSIZE_T_MAX - ); - else - result = ucs1lib_count( - ((const Py_UCS1*)buf1) + start, end - start, - buf2, len2, PY_SSIZE_T_MAX - ); + result = ucs1lib_count( + ((const Py_UCS1*)buf1) + start, end - start, + buf2, len2, PY_SSIZE_T_MAX + ); break; case PyUnicode_2BYTE_KIND: result = ucs2lib_count( @@ -9039,6 +9007,18 @@ PyUnicode_Count(PyObject *str, return -1; } +Py_ssize_t +PyUnicode_Count(PyObject *str, + PyObject *substr, + Py_ssize_t start, + Py_ssize_t end) +{ + if (ensure_unicode(str) < 0 || ensure_unicode(substr) < 0) + return -1; + + return unicode_count_impl(str, substr, start, end); +} + Py_ssize_t PyUnicode_Find(PyObject *str, PyObject *substr, @@ -9429,8 +9409,7 @@ _PyUnicode_JoinArray(PyObject *separator, PyObject *const *items, Py_ssize_t seq if (seqlen == 1) { if (PyUnicode_CheckExact(items[0])) { res = items[0]; - Py_INCREF(res); - return res; + return Py_NewRef(res); } seplen = 0; maxchar = 0; @@ -9749,8 +9728,7 @@ split(PyObject *self, out = PyList_New(1); if (out == NULL) return NULL; - Py_INCREF(self); - PyList_SET_ITEM(out, 0, self); + PyList_SET_ITEM(out, 0, Py_NewRef(self)); return out; } buf1 = PyUnicode_DATA(self); @@ -9842,8 +9820,7 @@ rsplit(PyObject *self, out = PyList_New(1); if (out == NULL) return NULL; - Py_INCREF(self); - PyList_SET_ITEM(out, 0, self); + PyList_SET_ITEM(out, 0, Py_NewRef(self)); return out; } buf1 = PyUnicode_DATA(self); @@ -9904,10 +9881,7 @@ anylib_count(int kind, PyObject *sstr, const void* sbuf, Py_ssize_t slen, { switch (kind) { case PyUnicode_1BYTE_KIND: - if (PyUnicode_IS_ASCII(sstr) && PyUnicode_IS_ASCII(str1)) - return asciilib_count(sbuf, slen, buf1, len1, maxcount); - else - return ucs1lib_count(sbuf, slen, buf1, len1, maxcount); + return ucs1lib_count(sbuf, slen, buf1, len1, maxcount); case PyUnicode_2BYTE_KIND: return ucs2lib_count(sbuf, slen, buf1, len1, maxcount); case PyUnicode_4BYTE_KIND: @@ -10463,8 +10437,8 @@ unicode_compare_eq(PyObject *str1, PyObject *str2) int _PyUnicode_Equal(PyObject *str1, PyObject *str2) { - assert(PyUnicode_CheckExact(str1)); - assert(PyUnicode_CheckExact(str2)); + assert(PyUnicode_Check(str1)); + assert(PyUnicode_Check(str2)); if (str1 == str2) { return 1; } @@ -10765,8 +10739,7 @@ PyUnicode_Append(PyObject **p_left, PyObject *right) PyObject *empty = unicode_get_empty(); // Borrowed reference if (left == empty) { Py_DECREF(left); - Py_INCREF(right); - *p_left = right; + *p_left = Py_NewRef(right); return; } if (right == empty) { @@ -10857,62 +10830,16 @@ unicode_count(PyObject *self, PyObject *args) PyObject *substring = NULL; /* initialize to fix a compiler warning */ Py_ssize_t start = 0; Py_ssize_t end = PY_SSIZE_T_MAX; - PyObject *result; - int kind1, kind2; - const void *buf1, *buf2; - Py_ssize_t len1, len2, iresult; + Py_ssize_t result; if (!parse_args_finds_unicode("count", args, &substring, &start, &end)) return NULL; - kind1 = PyUnicode_KIND(self); - kind2 = PyUnicode_KIND(substring); - if (kind1 < kind2) - return PyLong_FromLong(0); - - len1 = PyUnicode_GET_LENGTH(self); - len2 = PyUnicode_GET_LENGTH(substring); - ADJUST_INDICES(start, end, len1); - if (end - start < len2) - return PyLong_FromLong(0); - - buf1 = PyUnicode_DATA(self); - buf2 = PyUnicode_DATA(substring); - if (kind2 != kind1) { - buf2 = unicode_askind(kind2, buf2, len2, kind1); - if (!buf2) - return NULL; - } - switch (kind1) { - case PyUnicode_1BYTE_KIND: - iresult = ucs1lib_count( - ((const Py_UCS1*)buf1) + start, end - start, - buf2, len2, PY_SSIZE_T_MAX - ); - break; - case PyUnicode_2BYTE_KIND: - iresult = ucs2lib_count( - ((const Py_UCS2*)buf1) + start, end - start, - buf2, len2, PY_SSIZE_T_MAX - ); - break; - case PyUnicode_4BYTE_KIND: - iresult = ucs4lib_count( - ((const Py_UCS4*)buf1) + start, end - start, - buf2, len2, PY_SSIZE_T_MAX - ); - break; - default: - Py_UNREACHABLE(); - } - - result = PyLong_FromSsize_t(iresult); - - assert((kind2 == kind1) == (buf2 == PyUnicode_DATA(substring))); - if (kind2 != kind1) - PyMem_Free((void *)buf2); + result = unicode_count_impl(self, substring, start, end); + if (result == -1) + return NULL; - return result; + return PyLong_FromSsize_t(result); } /*[clinic input] @@ -13042,8 +12969,7 @@ _PyUnicodeWriter_WriteStr(_PyUnicodeWriter *writer, PyObject *str) if (writer->buffer == NULL && !writer->overallocate) { assert(_PyUnicode_CheckConsistency(str, 1)); writer->readonly = 1; - Py_INCREF(str); - writer->buffer = str; + writer->buffer = Py_NewRef(str); _PyUnicodeWriter_Update(writer); writer->pos += len; return 0; @@ -13646,8 +13572,7 @@ _PyUnicode_FormatLong(PyObject *val, int alt, int prec, int type) for (i = 0; i < numdigits; i++) *b1++ = *buf++; *b1 = '\0'; - Py_DECREF(result); - result = r1; + Py_SETREF(result, r1); buf = PyBytes_AS_STRING(result); len = numnondigits + prec; } @@ -13664,8 +13589,7 @@ _PyUnicode_FormatLong(PyObject *val, int alt, int prec, int type) || buf != PyUnicode_DATA(result)) { PyObject *unicode; unicode = _PyUnicode_FromASCII(buf, len); - Py_DECREF(result); - result = unicode; + Py_SETREF(result, unicode); } else if (len != PyUnicode_GET_LENGTH(result)) { if (PyUnicode_Resize(&result, len) < 0) @@ -13706,8 +13630,7 @@ mainformatlong(PyObject *v, assert(PyLong_Check(iobj)); } else { - iobj = v; - Py_INCREF(iobj); + iobj = Py_NewRef(v); } if (PyLong_CheckExact(v) @@ -14030,8 +13953,7 @@ unicode_format_arg_format(struct unicode_formatter_t *ctx, } if (PyUnicode_CheckExact(v) && arg->ch == 's') { - *p_str = v; - Py_INCREF(*p_str); + *p_str = Py_NewRef(v); } else { if (arg->ch == 's') @@ -14567,12 +14489,14 @@ PyTypeObject PyUnicode_Type = { /* Initialize the Unicode implementation */ -void -_PyUnicode_InitState(PyInterpreterState *interp) +static void +_init_global_state(void) { - if (!_Py_IsMainInterpreter(interp)) { + static int initialized = 0; + if (initialized) { return; } + initialized = 1; /* initialize the linebreak bloom filter */ const Py_UCS2 linebreak[] = { @@ -14590,6 +14514,15 @@ _PyUnicode_InitState(PyInterpreterState *interp) Py_ARRAY_LENGTH(linebreak)); } +void +_PyUnicode_InitState(PyInterpreterState *interp) +{ + if (!_Py_IsMainInterpreter(interp)) { + return; + } + _init_global_state(); +} + PyStatus _PyUnicode_InitGlobalObjects(PyInterpreterState *interp) @@ -14681,8 +14614,7 @@ PyUnicode_InternInPlace(PyObject **p) } if (t != s) { - Py_INCREF(t); - Py_SETREF(*p, t); + Py_SETREF(*p, Py_NewRef(t)); return; } @@ -14952,8 +14884,7 @@ unicode_iter(PyObject *seq) if (it == NULL) return NULL; it->it_index = 0; - Py_INCREF(seq); - it->it_seq = seq; + it->it_seq = Py_NewRef(seq); _PyObject_GC_TRACK(it); return (PyObject *)it; } diff --git a/Objects/unionobject.c b/Objects/unionobject.c index 5eee27c08fa7e2..f273f7d15ef25f 100644 --- a/Objects/unionobject.c +++ b/Objects/unionobject.c @@ -114,12 +114,10 @@ merge(PyObject **items1, Py_ssize_t size1, } for (; pos < size1; pos++) { PyObject *a = items1[pos]; - Py_INCREF(a); - PyTuple_SET_ITEM(tuple, pos, a); + PyTuple_SET_ITEM(tuple, pos, Py_NewRef(a)); } } - Py_INCREF(arg); - PyTuple_SET_ITEM(tuple, pos, arg); + PyTuple_SET_ITEM(tuple, pos, Py_NewRef(arg)); pos++; } @@ -170,8 +168,7 @@ _Py_union_type_or(PyObject* self, PyObject* other) if (PyErr_Occurred()) { return NULL; } - Py_INCREF(self); - return self; + return Py_NewRef(self); } PyObject *new_union = make_union(tuple); @@ -298,8 +295,7 @@ union_getitem(PyObject *self, PyObject *item) res = make_union(newargs); } else { - res = PyTuple_GET_ITEM(newargs, 0); - Py_INCREF(res); + res = Py_NewRef(PyTuple_GET_ITEM(newargs, 0)); for (Py_ssize_t iarg = 1; iarg < nargs; iarg++) { PyObject *arg = PyTuple_GET_ITEM(newargs, iarg); Py_SETREF(res, PyNumber_Or(res, arg)); @@ -326,8 +322,7 @@ union_parameters(PyObject *self, void *Py_UNUSED(unused)) return NULL; } } - Py_INCREF(alias->parameters); - return alias->parameters; + return Py_NewRef(alias->parameters); } static PyGetSetDef union_properties[] = { @@ -400,9 +395,8 @@ make_union(PyObject *args) return NULL; } - Py_INCREF(args); result->parameters = NULL; - result->args = args; + result->args = Py_NewRef(args); _PyObject_GC_TRACK(result); return (PyObject*)result; } diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index cf89a9231d204d..bd7720e2753307 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -311,8 +311,7 @@ weakref___new__(PyTypeObject *type, PyObject *args, PyObject *kwargs) if (callback == NULL && type == &_PyWeakref_RefType) { if (ref != NULL) { /* We can re-use an existing reference. */ - Py_INCREF(ref); - return (PyObject *)ref; + return Py_NewRef(ref); } } /* We have to create a new reference. */ @@ -825,9 +824,7 @@ PyWeakref_NewRef(PyObject *ob, PyObject *callback) during GC. Return that one instead of this one to avoid violating the invariants of the list of weakrefs for ob. */ - Py_DECREF(result); - Py_INCREF(ref); - result = ref; + Py_SETREF(result, (PyWeakReference*)Py_NewRef(ref)); } } else { @@ -890,9 +887,7 @@ PyWeakref_NewProxy(PyObject *ob, PyObject *callback) during GC. Return that one instead of this one to avoid violating the invariants of the list of weakrefs for ob. */ - Py_DECREF(result); - result = proxy; - Py_INCREF(result); + Py_SETREF(result, (PyWeakReference*)Py_NewRef(proxy)); goto skip_insert; } prev = ref; @@ -993,8 +988,7 @@ PyObject_ClearWeakRefs(PyObject *object) PyWeakReference *next = current->wr_next; if (Py_REFCNT((PyObject *)current) > 0) { - Py_INCREF(current); - PyTuple_SET_ITEM(tuple, i * 2, (PyObject *) current); + PyTuple_SET_ITEM(tuple, i * 2, Py_NewRef(current)); PyTuple_SET_ITEM(tuple, i * 2 + 1, current->wr_callback); } else { diff --git a/PC/_msi.c b/PC/_msi.c index 3686b9bb6f2a6d..b104e3c6ef548c 100644 --- a/PC/_msi.c +++ b/PC/_msi.c @@ -172,9 +172,7 @@ static FNFCIGETTEMPFILE(cb_gettempfile) static FNFCISTATUS(cb_status) { if (pv) { - _Py_IDENTIFIER(status); - - PyObject *result = _PyObject_CallMethodId(pv, &PyId_status, "iii", typeStatus, cb1, cb2); + PyObject *result = PyObject_CallMethod(pv, "status", "iii", typeStatus, cb1, cb2); if (result == NULL) return -1; Py_DECREF(result); @@ -185,9 +183,7 @@ static FNFCISTATUS(cb_status) static FNFCIGETNEXTCABINET(cb_getnextcabinet) { if (pv) { - _Py_IDENTIFIER(getnextcabinet); - - PyObject *result = _PyObject_CallMethodId(pv, &PyId_getnextcabinet, "i", pccab->iCab); + PyObject *result = PyObject_CallMethod(pv, "getnextcabinet", "i", pccab->iCab); if (result == NULL) return -1; if (!PyBytes_Check(result)) { @@ -705,8 +701,7 @@ _msi_SummaryInformation_GetProperty_impl(msiobj *self, int field) result = PyBytes_FromStringAndSize(sval, ssize); break; case VT_EMPTY: - Py_INCREF(Py_None); - result = Py_None; + result = Py_NewRef(Py_None); break; default: PyErr_Format(PyExc_NotImplementedError, "result of type %d", type); diff --git a/PC/launcher2.c b/PC/launcher2.c index 23eaa19dde38e8..9b3db04aa48b72 100644 --- a/PC/launcher2.c +++ b/PC/launcher2.c @@ -491,62 +491,39 @@ dumpSearchInfo(SearchInfo *search) int -findArgumentLength(const wchar_t *buffer, int bufferLength) +findArgv0Length(const wchar_t *buffer, int bufferLength) { - if (bufferLength < 0) { - bufferLength = (int)wcsnlen_s(buffer, MAXLEN); - } - if (bufferLength == 0) { - return 0; - } - const wchar_t *end; - int i; - - if (buffer[0] != L'"') { - end = wcschr(buffer, L' '); - if (!end) { - return bufferLength; - } - i = (int)(end - buffer); - return i < bufferLength ? i : bufferLength; - } - - i = 0; - while (i < bufferLength) { - end = wcschr(&buffer[i + 1], L'"'); - if (!end) { - return bufferLength; - } - - i = (int)(end - buffer); - if (i >= bufferLength) { - return bufferLength; - } - - int j = i; - while (j > 1 && buffer[--j] == L'\\') { - if (j > 0 && buffer[--j] == L'\\') { - // Even number, so back up and keep counting - } else { - // Odd number, so it's escaped and we want to keep searching - continue; + // Note: this implements semantics that are only valid for argv0. + // Specifically, there is no escaping of quotes, and quotes within + // the argument have no effect. A quoted argv0 must start and end + // with a double quote character; otherwise, it ends at the first + // ' ' or '\t'. + int quoted = buffer[0] == L'"'; + for (int i = 1; bufferLength < 0 || i < bufferLength; ++i) { + switch (buffer[i]) { + case L'\0': + return i; + case L' ': + case L'\t': + if (!quoted) { + return i; } - } - - // Non-escaped quote with space after it - end of the argument! - if (i + 1 >= bufferLength || isspace(buffer[i + 1])) { - return i + 1; + break; + case L'"': + if (quoted) { + return i + 1; + } + break; } } - return bufferLength; } const wchar_t * -findArgumentEnd(const wchar_t *buffer, int bufferLength) +findArgv0End(const wchar_t *buffer, int bufferLength) { - return &buffer[findArgumentLength(buffer, bufferLength)]; + return &buffer[findArgv0Length(buffer, bufferLength)]; } @@ -562,11 +539,16 @@ parseCommandLine(SearchInfo *search) return RC_NO_COMMANDLINE; } - const wchar_t *tail = findArgumentEnd(search->originalCmdLine, -1); - const wchar_t *end = tail; - search->restOfCmdLine = tail; + const wchar_t *argv0End = findArgv0End(search->originalCmdLine, -1); + const wchar_t *tail = argv0End; // will be start of the executable name + const wchar_t *end = argv0End; // will be end of the executable name + search->restOfCmdLine = argv0End; // will be first space after argv0 while (--tail != search->originalCmdLine) { - if (*tail == L'.' && end == search->restOfCmdLine) { + if (*tail == L'"' && end == argv0End) { + // Move the "end" up to the quote, so we also allow moving for + // a period later on. + end = argv0End = tail; + } else if (*tail == L'.' && end == argv0End) { end = tail; } else if (*tail == L'\\' || *tail == L'/') { ++tail; @@ -653,6 +635,7 @@ parseCommandLine(SearchInfo *search) search->tag = argStart; } search->tagLength = (int)(tail - search->tag); + search->allowDefaults = false; search->restOfCmdLine = tail; } else if (MATCHES(L"0") || MATCHES(L"-list")) { search->list = true; @@ -870,6 +853,62 @@ _findCommand(SearchInfo *search, const wchar_t *command, int commandLength) } +int +_useShebangAsExecutable(SearchInfo *search, const wchar_t *shebang, int shebangLength) +{ + wchar_t buffer[MAXLEN]; + wchar_t script[MAXLEN]; + wchar_t command[MAXLEN]; + + int commandLength = 0; + int inQuote = 0; + + if (!shebang || !shebangLength) { + return 0; + } + + wchar_t *pC = command; + for (int i = 0; i < shebangLength; ++i) { + wchar_t c = shebang[i]; + if (isspace(c) && !inQuote) { + commandLength = i; + break; + } else if (c == L'"') { + inQuote = !inQuote; + } else if (c == L'/' || c == L'\\') { + *pC++ = L'\\'; + } else { + *pC++ = c; + } + } + *pC = L'\0'; + + if (!GetCurrentDirectoryW(MAXLEN, buffer) || + wcsncpy_s(script, MAXLEN, search->scriptFile, search->scriptFileLength) || + FAILED(PathCchCombineEx(buffer, MAXLEN, buffer, script, + PATHCCH_ALLOW_LONG_PATHS)) || + FAILED(PathCchRemoveFileSpec(buffer, MAXLEN)) || + FAILED(PathCchCombineEx(buffer, MAXLEN, buffer, command, + PATHCCH_ALLOW_LONG_PATHS)) + ) { + return RC_NO_MEMORY; + } + + int n = (int)wcsnlen(buffer, MAXLEN); + wchar_t *path = allocSearchInfoBuffer(search, n + 1); + if (!path) { + return RC_NO_MEMORY; + } + wcscpy_s(path, n + 1, buffer); + search->executablePath = path; + if (commandLength) { + search->executableArgs = &shebang[commandLength]; + search->executableArgsLength = shebangLength - commandLength; + } + return 0; +} + + int checkShebang(SearchInfo *search) { @@ -962,13 +1001,19 @@ checkShebang(SearchInfo *search) L"/usr/bin/env ", L"/usr/bin/", L"/usr/local/bin/", - L"", + L"python", NULL }; for (const wchar_t **tmpl = shebangTemplates; *tmpl; ++tmpl) { if (_shebangStartsWith(shebang, shebangLength, *tmpl, &command)) { commandLength = 0; + // Normally "python" is the start of the command, but we also need it + // as a shebang prefix for back-compat. We move the command marker back + // if we match on that one. + if (0 == wcscmp(*tmpl, L"python")) { + command -= 6; + } while (command[commandLength] && !isspace(command[commandLength])) { commandLength += 1; } @@ -1011,11 +1056,14 @@ checkShebang(SearchInfo *search) debug(L"# Found shebang command but could not execute it: %.*s\n", commandLength, command); } - break; + // search is done by this point + return 0; } } - return 0; + // Unrecognised commands are joined to the script's directory and treated + // as the executable path + return _useShebangAsExecutable(search, shebang, shebangLength); } @@ -1672,6 +1720,7 @@ struct AppxSearchInfo { struct AppxSearchInfo APPX_SEARCH[] = { // Releases made through the Store + { L"PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0", L"3.12", 10 }, { L"PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0", L"3.11", 10 }, { L"PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0", L"3.10", 10 }, { L"PythonSoftwareFoundation.Python.3.9_qbz5n2kfra8p0", L"3.9", 10 }, @@ -1680,6 +1729,7 @@ struct AppxSearchInfo APPX_SEARCH[] = { // Side-loadable releases. Note that the publisher ID changes whenever we // renew our code-signing certificate, so the newer ID has a higher // priority (lower sortKey) + { L"PythonSoftwareFoundation.Python.3.12_3847v3x7pw1km", L"3.12", 11 }, { L"PythonSoftwareFoundation.Python.3.11_3847v3x7pw1km", L"3.11", 11 }, { L"PythonSoftwareFoundation.Python.3.11_hd69rhyc2wevp", L"3.11", 12 }, { L"PythonSoftwareFoundation.Python.3.10_3847v3x7pw1km", L"3.10", 11 }, @@ -1754,7 +1804,8 @@ struct StoreSearchInfo { struct StoreSearchInfo STORE_SEARCH[] = { - { L"3", /* 3.10 */ L"9PJPW5LDXLZ5" }, + { L"3", /* 3.11 */ L"9NRWMJP3717K" }, + { L"3.12", L"9NCVDN91XZQP" }, { L"3.11", L"9NRWMJP3717K" }, { L"3.10", L"9PJPW5LDXLZ5" }, { L"3.9", L"9P7QFQMJRFP7" }, diff --git a/PC/layout/support/options.py b/PC/layout/support/options.py index 3d93e892adae8a..26d13f5377ad59 100644 --- a/PC/layout/support/options.py +++ b/PC/layout/support/options.py @@ -18,7 +18,6 @@ def public(f): "stable": {"help": "stable ABI stub"}, "pip": {"help": "pip"}, "pip-user": {"help": "pip.ini file for default --user"}, - "distutils": {"help": "distutils"}, "tcltk": {"help": "Tcl, Tk and tkinter"}, "idle": {"help": "Idle"}, "tests": {"help": "test suite"}, @@ -43,7 +42,6 @@ def public(f): "stable", "pip", "pip-user", - "distutils", "tcltk", "idle", "venv", @@ -59,7 +57,6 @@ def public(f): "dev", "pip", "stable", - "distutils", "venv", "props", "nuspec", @@ -71,7 +68,6 @@ def public(f): "options": [ "stable", "pip", - "distutils", "tcltk", "idle", "tests", diff --git a/PC/layout/support/props.py b/PC/layout/support/props.py index 1eb9b7c06da513..c7a7a0ce777a6e 100644 --- a/PC/layout/support/props.py +++ b/PC/layout/support/props.py @@ -36,7 +36,6 @@ {PYTHON_VERSION} true - false false false @@ -68,7 +67,6 @@ DLLs\%(Filename)%(Extension) <_PythonRuntimeLib Include="$(PythonHome)\Lib\**\*" Exclude="$(PythonHome)\Lib\**\*.pyc;$(PythonHome)\Lib\site-packages\**\*" /> - <_PythonRuntimeLib Remove="$(PythonHome)\Lib\distutils\**\*" Condition="$(IncludeDistutils) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\lib2to3\**\*" Condition="$(IncludeLib2To3) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\ensurepip\**\*" Condition="$(IncludeVEnv) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\venv\**\*" Condition="$(IncludeVEnv) != 'true'" /> diff --git a/PC/layout/support/python.props b/PC/layout/support/python.props index 4cc70083ebe693..e46891aafcb9fc 100644 --- a/PC/layout/support/python.props +++ b/PC/layout/support/python.props @@ -6,9 +6,8 @@ $(PythonHome)\libs $$PYTHON_TAG$$ $$PYTHON_VERSION$$ - + true - false false false @@ -41,7 +40,6 @@ DLLs\%(Filename)%(Extension) <_PythonRuntimeLib Include="$(PythonHome)\Lib\**\*" Exclude="$(PythonHome)\Lib\**\*.pyc;$(PythonHome)\Lib\site-packages\**\*" /> - <_PythonRuntimeLib Remove="$(PythonHome)\Lib\distutils\**\*" Condition="$(IncludeDistutils) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\lib2to3\**\*" Condition="$(IncludeLib2To3) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\ensurepip\**\*" Condition="$(IncludeVEnv) != 'true'" /> <_PythonRuntimeLib Remove="$(PythonHome)\Lib\venv\**\*" Condition="$(IncludeVEnv) != 'true'" /> @@ -50,7 +48,7 @@ - + diff --git a/PC/python3dll.c b/PC/python3dll.c index 89bbd05932b853..931f316bb99843 100755 --- a/PC/python3dll.c +++ b/PC/python3dll.c @@ -1,7 +1,7 @@ /* Re-export stable Python ABI */ -/* Generated by Tools/scripts/stable_abi.py */ +/* Generated by Tools/build/stable_abi.py */ #ifdef _M_IX86 #define DECORATE "_" @@ -485,6 +485,8 @@ EXPORT_FUNC(PyObject_SetItem) EXPORT_FUNC(PyObject_Size) EXPORT_FUNC(PyObject_Str) EXPORT_FUNC(PyObject_Type) +EXPORT_FUNC(PyObject_Vectorcall) +EXPORT_FUNC(PyObject_VectorcallMethod) EXPORT_FUNC(PyOS_CheckStack) EXPORT_FUNC(PyOS_double_to_string) EXPORT_FUNC(PyOS_FSPath) diff --git a/PC/python_uwp.cpp b/PC/python_uwp.cpp index 88369e8fbfeb38..2beea60e5af1ef 100644 --- a/PC/python_uwp.cpp +++ b/PC/python_uwp.cpp @@ -10,6 +10,7 @@ #include +#include #include #include @@ -28,37 +29,49 @@ const wchar_t *PROGNAME = L"python.exe"; #endif static std::wstring -get_user_base() +get_package_family() { try { - const auto appData = winrt::Windows::Storage::ApplicationData::Current(); - if (appData) { - const auto localCache = appData.LocalCacheFolder(); - if (localCache) { - auto path = localCache.Path(); - if (!path.empty()) { - return std::wstring(path) + L"\\local-packages"; - } - } + UINT32 nameLength = MAX_PATH; + std::wstring name; + name.resize(nameLength); + DWORD rc = GetCurrentPackageFamilyName(&nameLength, name.data()); + if (rc == ERROR_SUCCESS) { + name.resize(nameLength - 1); + return name; } - } catch (...) { + else if (rc != ERROR_INSUFFICIENT_BUFFER) { + throw rc; + } + name.resize(nameLength); + rc = GetCurrentPackageFamilyName(&nameLength, name.data()); + if (rc != ERROR_SUCCESS) { + throw rc; + } + name.resize(nameLength - 1); + return name; } + catch (...) { + } + return std::wstring(); } static std::wstring -get_package_family() +get_user_base() { try { - const auto package = winrt::Windows::ApplicationModel::Package::Current(); - if (package) { - const auto id = package.Id(); - if (id) { - return std::wstring(id.FamilyName()); + const auto appData = winrt::Windows::Storage::ApplicationData::Current(); + if (appData) { + const auto localCache = appData.LocalCacheFolder(); + if (localCache) { + std::wstring path { localCache.Path().c_str() }; + if (!path.empty()) { + return path + L"\\local-packages"; + } } } - } - catch (...) { + } catch (...) { } return std::wstring(); @@ -68,13 +81,24 @@ static std::wstring get_package_home() { try { - const auto package = winrt::Windows::ApplicationModel::Package::Current(); - if (package) { - const auto path = package.InstalledLocation(); - if (path) { - return std::wstring(path.Path()); - } + UINT32 pathLength = MAX_PATH; + std::wstring path; + path.resize(pathLength); + DWORD rc = GetCurrentPackagePath(&pathLength, path.data()); + if (rc == ERROR_SUCCESS) { + path.resize(pathLength - 1); + return path; + } + else if (rc != ERROR_INSUFFICIENT_BUFFER) { + throw rc; + } + path.resize(pathLength); + rc = GetCurrentPackagePath(&pathLength, path.data()); + if (rc != ERROR_SUCCESS) { + throw rc; } + path.resize(pathLength - 1); + return path; } catch (...) { } diff --git a/PC/winreg.c b/PC/winreg.c index 6ae0d8169cc56b..df34e8cf5a77a9 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -308,8 +308,7 @@ static PyHKEYObject * winreg_HKEYType___enter___impl(PyHKEYObject *self) /*[clinic end generated code: output=52c34986dab28990 input=c40fab1f0690a8e2]*/ { - Py_XINCREF(self); - return self; + return (PyHKEYObject*)Py_XNewRef(self); } @@ -784,8 +783,7 @@ Reg2Py(BYTE *retDataBuf, DWORD retDataSize, DWORD typ) support it natively, we should handle the bits. */ default: if (retDataSize == 0) { - Py_INCREF(Py_None); - obData = Py_None; + obData = Py_NewRef(Py_None); } else obData = PyBytes_FromStringAndSize( diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj index 39939a7ba98345..8454bd67b1db1b 100644 --- a/PCbuild/_freeze_module.vcxproj +++ b/PCbuild/_freeze_module.vcxproj @@ -129,7 +129,6 @@ - @@ -211,6 +210,7 @@ + @@ -395,7 +395,7 @@ DependsOnTargets="FindPythonForBuild" Condition="$(Configuration) != 'PGUpdate'"> - Source Files - + Source Files diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index b7d40c8cdc30eb..58bf4e1eacbf21 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -94,10 +94,19 @@ + + + + + + + + + diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index fc2c4345fe142e..101c5322761634 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -12,6 +12,9 @@ Source Files + + Source Files + Source Files @@ -24,6 +27,30 @@ Source Files + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + diff --git a/PCbuild/_testsinglephase.vcxproj b/PCbuild/_testsinglephase.vcxproj new file mode 100644 index 00000000000000..fb4bcd953923f8 --- /dev/null +++ b/PCbuild/_testsinglephase.vcxproj @@ -0,0 +1,115 @@ + + + + + Debug + ARM + + + Debug + ARM64 + + + Debug + Win32 + + + Debug + x64 + + + PGInstrument + ARM + + + PGInstrument + ARM64 + + + PGInstrument + Win32 + + + PGInstrument + x64 + + + PGUpdate + ARM + + + PGUpdate + ARM64 + + + PGUpdate + Win32 + + + PGUpdate + x64 + + + Release + ARM + + + Release + ARM64 + + + Release + Win32 + + + Release + x64 + + + + {2097F1C1-597C-4167-93E3-656A7D6339B2} + Win32Proj + _testsinglephase + false + + + + + DynamicLibrary + NotSet + + + + .pyd + + + + + + + + + + + _CONSOLE;%(PreprocessorDefinitions) + + + Console + + + + + + + + + + + {cf7ac3d1-e2df-41d2-bea6-1e2556cdea26} + false + + + + + + diff --git a/PCbuild/_testsinglephase.vcxproj.filters b/PCbuild/_testsinglephase.vcxproj.filters new file mode 100644 index 00000000000000..2a0e0ef66c116f --- /dev/null +++ b/PCbuild/_testsinglephase.vcxproj.filters @@ -0,0 +1,23 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + Source Files + + + + + Resource Files + + + diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index 9c6bc4eac496db..98cca979fdfcd0 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -54,12 +54,12 @@ set libraries= set libraries=%libraries% bzip2-1.0.8 if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.4.3 if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1q -set libraries=%libraries% sqlite-3.38.4.0 +set libraries=%libraries% sqlite-3.39.4.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.12.1 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.12.1 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tix-8.4.3.6 set libraries=%libraries% xz-5.2.5 -set libraries=%libraries% zlib-1.2.12 +set libraries=%libraries% zlib-1.2.13 for %%e in (%libraries%) do ( if exist "%EXTERNALS_DIR%\%%e" ( diff --git a/PCbuild/lib.pyproj b/PCbuild/lib.pyproj deleted file mode 100644 index 9934dc577bf36a..00000000000000 --- a/PCbuild/lib.pyproj +++ /dev/null @@ -1,1819 +0,0 @@ - - - - Debug - 2.0 - {cb12a4c2-3757-4e67-8f51-c533876cefd1} - ..\Lib\ - abc.py - - . - . - {888888a0-9f3d-457c-b088-3a5042f75d52} - Standard Python launcher - - - - - - 10.0 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/PCbuild/pcbuild.proj b/PCbuild/pcbuild.proj index d9e4d981aa2189..e13a0d409293f4 100644 --- a/PCbuild/pcbuild.proj +++ b/PCbuild/pcbuild.proj @@ -77,7 +77,7 @@ - + diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index 9f374abd152b17..848d59504381cc 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -10,7 +10,42 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "python", "python.vcxproj", "{B11D750F-CD1F-4A96-85CE-E69A5C5259F9}" ProjectSection(ProjectDependencies) = postProject + {9E48B300-37D1-11DD-8C41-005056C00008} = {9E48B300-37D1-11DD-8C41-005056C00008} + {9EC7190A-249F-4180-A900-548FDCF3055F} = {9EC7190A-249F-4180-A900-548FDCF3055F} + {78D80A15-BD8C-44E2-B49E-1F05B0A0A687} = {78D80A15-BD8C-44E2-B49E-1F05B0A0A687} + {6901D91C-6E48-4BB7-9FEC-700C8131DF1D} = {6901D91C-6E48-4BB7-9FEC-700C8131DF1D} + {54B1431F-B86B-4ACB-B28C-88BCF93191D8} = {54B1431F-B86B-4ACB-B28C-88BCF93191D8} + {F749B822-B489-4CA5-A3AD-CE078F5F338A} = {F749B822-B489-4CA5-A3AD-CE078F5F338A} + {D06B6426-4762-44CC-8BAD-D79052507F2F} = {D06B6426-4762-44CC-8BAD-D79052507F2F} + {36D0C52C-DF4E-45D0-8BC7-E294C3ABC781} = {36D0C52C-DF4E-45D0-8BC7-E294C3ABC781} + {CB435430-EBB1-478B-8F4E-C256F6838F55} = {CB435430-EBB1-478B-8F4E-C256F6838F55} + {17E1E049-C309-4D79-843F-AE483C264AEA} = {17E1E049-C309-4D79-843F-AE483C264AEA} + {384C224A-7474-476E-A01B-750EA7DE918C} = {384C224A-7474-476E-A01B-750EA7DE918C} + {12728250-16EC-4DC6-94D7-E21DD88947F8} = {12728250-16EC-4DC6-94D7-E21DD88947F8} + {86937F53-C189-40EF-8CE8-8759D8E7D480} = {86937F53-C189-40EF-8CE8-8759D8E7D480} + {28B5D777-DDF2-4B6B-B34F-31D938813856} = {28B5D777-DDF2-4B6B-B34F-31D938813856} + {31FFC478-7B4A-43E8-9954-8D03E2187E9C} = {31FFC478-7B4A-43E8-9954-8D03E2187E9C} + {F9D71780-F393-11E0-BE50-0800200C9A66} = {F9D71780-F393-11E0-BE50-0800200C9A66} + {494BAC80-A60C-43A9-99E7-ACB691CE2C4D} = {494BAC80-A60C-43A9-99E7-ACB691CE2C4D} + {C6E20F84-3247-4AD6-B051-B073268F73BA} = {C6E20F84-3247-4AD6-B051-B073268F73BA} + {B244E787-C445-441C-BDF4-5A4F1A3A1E51} = {B244E787-C445-441C-BDF4-5A4F1A3A1E51} + {18CAE28C-B454-46C1-87A0-493D91D97F03} = {18CAE28C-B454-46C1-87A0-493D91D97F03} + {13CECB97-4119-4316-9D42-8534019A5A44} = {13CECB97-4119-4316-9D42-8534019A5A44} + {885D4898-D08D-4091-9C40-C700CFE3FC5A} = {885D4898-D08D-4091-9C40-C700CFE3FC5A} + {447F05A8-F581-4CAC-A466-5AC7936E207E} = {447F05A8-F581-4CAC-A466-5AC7936E207E} + {ECC7CEAC-A5E5-458E-BB9E-2413CC847881} = {ECC7CEAC-A5E5-458E-BB9E-2413CC847881} + {4946ECAC-2E69-4BF8-A90A-F5136F5094DF} = {4946ECAC-2E69-4BF8-A90A-F5136F5094DF} + {FDB84CBB-2FB6-47C8-A2D6-091E0833239D} = {FDB84CBB-2FB6-47C8-A2D6-091E0833239D} + {73FCD2BD-F133-46B7-8EC1-144CD82A59D5} = {73FCD2BD-F133-46B7-8EC1-144CD82A59D5} + {2097F1C1-597C-4167-93E3-656A7D6339B2} = {2097F1C1-597C-4167-93E3-656A7D6339B2} + {A2697BD3-28C1-4AEC-9106-8B748639FD16} = {A2697BD3-28C1-4AEC-9106-8B748639FD16} + {900342D7-516A-4469-B1AD-59A66E49A25F} = {900342D7-516A-4469-B1AD-59A66E49A25F} + {6DAC66D9-E703-4624-BE03-49112AB5AA62} = {6DAC66D9-E703-4624-BE03-49112AB5AA62} + {0E9791DB-593A-465F-98BC-681011311617} = {0E9791DB-593A-465F-98BC-681011311617} {0E9791DB-593A-465F-98BC-681011311618} = {0E9791DB-593A-465F-98BC-681011311618} + {EB6E69DD-04BF-4543-9B92-49FAABCEAC2E} = {EB6E69DD-04BF-4543-9B92-49FAABCEAC2E} + {16BFE6F0-22EF-40B5-B831-7E937119EF10} = {16BFE6F0-22EF-40B5-B831-7E937119EF10} + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742} = {FCBE1EF2-E0F0-40B1-88B5-00A35D378742} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythoncore", "pythoncore.vcxproj", "{CF7AC3D1-E2DF-41D2-BEA6-1E2556CDEA26}" @@ -19,6 +54,9 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythoncore", "pythoncore.vc EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythonw", "pythonw.vcxproj", "{F4229CC3-873C-49AE-9729-DD308ED4CD4A}" + ProjectSection(ProjectDependencies) = postProject + {B11D750F-CD1F-4A96-85CE-E69A5C5259F9} = {B11D750F-CD1F-4A96-85CE-E69A5C5259F9} + EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winsound", "winsound.vcxproj", "{28B5D777-DDF2-4B6B-B34F-31D938813856}" EndProject @@ -86,6 +124,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testembed", "_testembed.vc EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testmultiphase", "_testmultiphase.vcxproj", "{16BFE6F0-22EF-40B5-B831-7E937119EF10}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testsinglephase", "_testsinglephase.vcxproj", "{2097F1C1-597C-4167-93E3-656A7D6339B2}" +EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pyshellext", "pyshellext.vcxproj", "{0F6EE4A4-C75F-4578-B4B3-2D64F4B9B782}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testconsole", "_testconsole.vcxproj", "{B244E787-C445-441C-BDF4-5A4F1A3A1E51}" @@ -99,12 +139,18 @@ EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "liblzma", "liblzma.vcxproj", "{12728250-16EC-4DC6-94D7-E21DD88947F8}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "python_uwp", "python_uwp.vcxproj", "{9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}" + ProjectSection(ProjectDependencies) = postProject + {B11D750F-CD1F-4A96-85CE-E69A5C5259F9} = {B11D750F-CD1F-4A96-85CE-E69A5C5259F9} + EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "venvlauncher", "venvlauncher.vcxproj", "{494BAC80-A60C-43A9-99E7-ACB691CE2C4D}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "venvwlauncher", "venvwlauncher.vcxproj", "{FDB84CBB-2FB6-47C8-A2D6-091E0833239D}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythonw_uwp", "pythonw_uwp.vcxproj", "{AB603547-1E2A-45B3-9E09-B04596006393}" + ProjectSection(ProjectDependencies) = postProject + {F4229CC3-873C-49AE-9729-DD308ED4CD4A} = {F4229CC3-873C-49AE-9729-DD308ED4CD4A} + EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_uuid", "_uuid.vcxproj", "{CB435430-EBB1-478B-8F4E-C256F6838F55}" EndProject @@ -1168,6 +1214,36 @@ Global {16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|Win32.Build.0 = Release|Win32 {16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|x64.ActiveCfg = Release|x64 {16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|x64.Build.0 = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|ARM.ActiveCfg = Debug|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|ARM.Build.0 = Debug|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|ARM64.ActiveCfg = Debug|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|ARM64.Build.0 = Debug|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|Win32.ActiveCfg = Debug|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|Win32.Build.0 = Debug|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|x64.ActiveCfg = Debug|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Debug|x64.Build.0 = Debug|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|ARM.ActiveCfg = PGInstrument|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|ARM.Build.0 = PGInstrument|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|ARM64.ActiveCfg = PGInstrument|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|ARM64.Build.0 = PGInstrument|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|Win32.ActiveCfg = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGInstrument|x64.ActiveCfg = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|ARM.ActiveCfg = PGUpdate|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|ARM.Build.0 = PGUpdate|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|ARM64.ActiveCfg = PGUpdate|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|ARM64.Build.0 = PGUpdate|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|Win32.ActiveCfg = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|Win32.Build.0 = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|x64.ActiveCfg = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.PGUpdate|x64.Build.0 = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|ARM.ActiveCfg = Release|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|ARM.Build.0 = Release|ARM + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|ARM64.ActiveCfg = Release|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|ARM64.Build.0 = Release|ARM64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|Win32.ActiveCfg = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|Win32.Build.0 = Release|Win32 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|x64.ActiveCfg = Release|x64 + {2097F1C1-597C-4167-93E3-656A7D6339B2}.Release|x64.Build.0 = Release|x64 {0F6EE4A4-C75F-4578-B4B3-2D64F4B9B782}.Debug|ARM.ActiveCfg = Debug|ARM {0F6EE4A4-C75F-4578-B4B3-2D64F4B9B782}.Debug|ARM.Build.0 = Debug|ARM {0F6EE4A4-C75F-4578-B4B3-2D64F4B9B782}.Debug|ARM64.ActiveCfg = Debug|ARM64 diff --git a/PCbuild/python.props b/PCbuild/python.props index c3ab12e7c67250..320d41f4cc20d8 100644 --- a/PCbuild/python.props +++ b/PCbuild/python.props @@ -61,7 +61,7 @@ $(EXTERNALS_DIR) $([System.IO.Path]::GetFullPath(`$(PySourcePath)externals`)) $(ExternalsDir)\ - $(ExternalsDir)sqlite-3.38.4.0\ + $(ExternalsDir)sqlite-3.39.4.0\ $(ExternalsDir)bzip2-1.0.8\ $(ExternalsDir)xz-5.2.5\ $(ExternalsDir)libffi-3.4.3\ @@ -71,7 +71,7 @@ $(ExternalsDir)openssl-bin-1.1.1q\$(ArchName)\ $(opensslOutDir)include $(ExternalsDir)\nasm-2.11.06\ - $(ExternalsDir)\zlib-1.2.12\ + $(ExternalsDir)\zlib-1.2.13\ _d diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index ff17304032cded..f62434370cfdf7 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -155,6 +155,7 @@ + @@ -208,6 +209,8 @@ + + @@ -219,6 +222,7 @@ + @@ -230,12 +234,15 @@ + + + @@ -254,6 +261,7 @@ + @@ -429,7 +437,6 @@ - @@ -513,6 +520,7 @@ + @@ -588,7 +596,7 @@ - + GITVERSION="$(GitVersion)";GITTAG="$(GitTag)";GITBRANCH="$(GitBranch)";%(PreprocessorDefinitions) diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 7d7fe7267c8ff0..f44a1ad8550a38 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -384,6 +384,9 @@ Include + + Include + Include @@ -477,6 +480,9 @@ Include\internal + + Include\internal + Include\internal @@ -528,6 +534,12 @@ Include\internal + + Include\internal + + + Include\internal + Include\internal @@ -558,6 +570,9 @@ Include\internal + + Include\internal + Include\internal @@ -591,6 +606,12 @@ Include\internal + + Include\internal + + + Include\internal + Include\internal @@ -609,6 +630,9 @@ Include\internal + + Include\internal + Include\internal @@ -923,9 +947,6 @@ Objects - - Objects - Objects @@ -1127,6 +1148,9 @@ Python + + Python + Python diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt index c536fac94cc680..3ed26a47b066b9 100644 --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -1,7 +1,7 @@ Quick Start Guide ----------------- -1. Install Microsoft Visual Studio 2017 with Python workload and +1. Install Microsoft Visual Studio 2017 or later with Python workload and Python native development component. 1a. Optionally install Python 3.6 or later. If not installed, get_externals.bat (via build.bat) will download and use Python via @@ -147,6 +147,7 @@ _testcapi _testconsole _testimportmultiple _testmultiphase +_testsinglephase _tkinter pyexpat select @@ -187,7 +188,7 @@ _ssl again when building. _sqlite3 - Wraps SQLite 3.38.4, which is itself built by sqlite3.vcxproj + Wraps SQLite 3.39.4, which is itself built by sqlite3.vcxproj Homepage: https://www.sqlite.org/ _tkinter diff --git a/PCbuild/regen.targets b/PCbuild/regen.targets index 3938b66678eee0..aeb7e2e185d9f8 100644 --- a/PCbuild/regen.targets +++ b/PCbuild/regen.targets @@ -13,7 +13,7 @@ <_ASTOutputs Include="$(PySourcePath)Python\Python-ast.c"> -C - <_OpcodeSources Include="$(PySourcePath)Tools\scripts\generate_opcode_h.py;$(PySourcePath)Lib\opcode.py" /> + <_OpcodeSources Include="$(PySourcePath)Tools\build\generate_opcode_h.py;$(PySourcePath)Lib\opcode.py" /> <_OpcodeOutputs Include="$(PySourcePath)Include\opcode.h;$(PySourcePath)Include\internal\pycore_opcode.h;$(PySourcePath)Python\opcode_targets.h" /> <_TokenSources Include="$(PySourcePath)Grammar\Tokens" /> <_TokenOutputs Include="$(PySourcePath)Doc\library\token-list.inc"> @@ -59,7 +59,7 @@ Inputs="@(_OpcodeSources)" Outputs="@(_OpcodeOutputs)" DependsOnTargets="FindPythonForBuild"> - @@ -69,7 +69,7 @@ Inputs="@(_TokenSources)" Outputs="@(_TokenOutputs)" DependsOnTargets="FindPythonForBuild"> - @@ -85,7 +85,7 @@ - diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c index d1be679aff2e7b..27c093332f6725 100644 --- a/Parser/action_helpers.c +++ b/Parser/action_helpers.c @@ -12,6 +12,7 @@ _create_dummy_identifier(Parser *p) void * _PyPegen_dummy_name(Parser *p, ...) { + // XXX This leaks memory from the initial arena. static void *cache = NULL; if (cache != NULL) { diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 6bd2e66c804d1f..3e307610b635f4 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -675,8 +675,7 @@ def visitField(self, field, name, sum=None, prod=None, depth=0): self.emit("if (%s == NULL) goto failed;" % field.name, depth+1) self.emit("for (i = 0; i < len; i++) {", depth+1) self.emit("%s val;" % ctype, depth+2) - self.emit("PyObject *tmp2 = PyList_GET_ITEM(tmp, i);", depth+2) - self.emit("Py_INCREF(tmp2);", depth+2) + self.emit("PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i));", depth+2) with self.recursive_call(name, depth+2): self.emit("res = obj2ast_%s(state, tmp2, &val, arena);" % field.type, depth+2, reflow=False) @@ -995,10 +994,11 @@ def visitModule(self, mod): static PyObject* ast2obj_object(struct ast_state *Py_UNUSED(state), void *o) { - if (!o) - o = Py_None; - Py_INCREF((PyObject*)o); - return (PyObject*)o; + PyObject *op = (PyObject*)o; + if (!op) { + op = Py_None; + } + return Py_NewRef(op); } #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object @@ -1020,9 +1020,11 @@ def visitModule(self, mod): *out = NULL; return -1; } - Py_INCREF(obj); + *out = Py_NewRef(obj); + } + else { + *out = NULL; } - *out = obj; return 0; } @@ -1032,8 +1034,7 @@ def visitModule(self, mod): *out = NULL; return -1; } - Py_INCREF(obj); - *out = obj; + *out = Py_NewRef(obj); return 0; } @@ -1301,8 +1302,7 @@ def simpleSum(self, sum, name): self.emit("switch(o) {", 1) for t in sum.types: self.emit("case %s:" % t.name, 2) - self.emit("Py_INCREF(state->%s_singleton);" % t.name, 3) - self.emit("return state->%s_singleton;" % t.name, 3) + self.emit("return Py_NewRef(state->%s_singleton);" % t.name, 3) self.emit("}", 1) self.emit("Py_UNREACHABLE();", 1); self.emit("}", 0) @@ -1484,6 +1484,10 @@ def generate_ast_fini(module_state, f): for s in module_state: f.write(" Py_CLEAR(state->" + s + ');\n') f.write(textwrap.dedent(""" + if (_PyInterpreterState_Get() == _PyInterpreterState_Main()) { + Py_CLEAR(_Py_CACHED_OBJECT(str_replace_inf)); + } + #if !defined(NDEBUG) state->initialized = -1; #else diff --git a/Parser/parser.c b/Parser/parser.c index bd3204af3e9388..d0bc25927bece6 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -17,28 +17,28 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) { - {"if", 634}, - {"as", 632}, - {"in", 643}, + {"if", 641}, + {"as", 639}, + {"in", 650}, {"or", 574}, {"is", 582}, {NULL, -1}, }, (KeywordToken[]) { {"del", 603}, - {"def", 644}, - {"for", 642}, - {"try", 618}, + {"def", 651}, + {"for", 649}, + {"try", 623}, {"and", 575}, {"not", 581}, {NULL, -1}, }, (KeywordToken[]) { - {"from", 572}, + {"from", 607}, {"pass", 504}, - {"with", 612}, - {"elif", 636}, - {"else", 637}, + {"with", 614}, + {"elif", 643}, + {"else", 644}, {"None", 601}, {"True", 600}, {NULL, -1}, @@ -47,22 +47,22 @@ static KeywordToken *reserved_keywords[] = { {"raise", 522}, {"yield", 573}, {"break", 508}, - {"class", 646}, - {"while", 639}, + {"class", 653}, + {"while", 646}, {"False", 602}, {NULL, -1}, }, (KeywordToken[]) { {"return", 519}, - {"import", 531}, + {"import", 606}, {"assert", 526}, {"global", 523}, - {"except", 629}, + {"except", 636}, {"lambda", 586}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 625}, + {"finally", 632}, {NULL, -1}, }, (KeywordToken[]) { @@ -284,277 +284,281 @@ static char *soft_keywords[] = { #define invalid_with_item_type 1204 #define invalid_for_target_type 1205 #define invalid_group_type 1206 -#define invalid_import_from_targets_type 1207 -#define invalid_with_stmt_type 1208 -#define invalid_with_stmt_indent_type 1209 -#define invalid_try_stmt_type 1210 -#define invalid_except_stmt_type 1211 -#define invalid_finally_stmt_type 1212 -#define invalid_except_stmt_indent_type 1213 -#define invalid_except_star_stmt_indent_type 1214 -#define invalid_match_stmt_type 1215 -#define invalid_case_block_type 1216 -#define invalid_as_pattern_type 1217 -#define invalid_class_pattern_type 1218 -#define invalid_class_argument_pattern_type 1219 -#define invalid_if_stmt_type 1220 -#define invalid_elif_stmt_type 1221 -#define invalid_else_stmt_type 1222 -#define invalid_while_stmt_type 1223 -#define invalid_for_stmt_type 1224 -#define invalid_def_raw_type 1225 -#define invalid_class_def_raw_type 1226 -#define invalid_double_starred_kvpairs_type 1227 -#define invalid_kvpair_type 1228 -#define _loop0_1_type 1229 -#define _loop0_2_type 1230 -#define _loop1_3_type 1231 -#define _loop0_5_type 1232 -#define _gather_4_type 1233 -#define _tmp_6_type 1234 -#define _tmp_7_type 1235 -#define _tmp_8_type 1236 -#define _tmp_9_type 1237 -#define _tmp_10_type 1238 -#define _tmp_11_type 1239 -#define _tmp_12_type 1240 -#define _tmp_13_type 1241 -#define _loop1_14_type 1242 -#define _tmp_15_type 1243 -#define _tmp_16_type 1244 -#define _tmp_17_type 1245 -#define _loop0_19_type 1246 -#define _gather_18_type 1247 -#define _loop0_21_type 1248 -#define _gather_20_type 1249 -#define _tmp_22_type 1250 -#define _tmp_23_type 1251 -#define _loop0_24_type 1252 -#define _loop1_25_type 1253 -#define _loop0_27_type 1254 -#define _gather_26_type 1255 -#define _tmp_28_type 1256 -#define _loop0_30_type 1257 -#define _gather_29_type 1258 -#define _tmp_31_type 1259 -#define _loop1_32_type 1260 -#define _tmp_33_type 1261 -#define _tmp_34_type 1262 -#define _tmp_35_type 1263 -#define _loop0_36_type 1264 -#define _loop0_37_type 1265 -#define _loop0_38_type 1266 -#define _loop1_39_type 1267 -#define _loop0_40_type 1268 -#define _loop1_41_type 1269 -#define _loop1_42_type 1270 -#define _loop1_43_type 1271 -#define _loop0_44_type 1272 -#define _loop1_45_type 1273 -#define _loop0_46_type 1274 -#define _loop1_47_type 1275 -#define _loop0_48_type 1276 -#define _loop0_49_type 1277 -#define _loop1_50_type 1278 -#define _loop0_52_type 1279 -#define _gather_51_type 1280 -#define _loop0_54_type 1281 -#define _gather_53_type 1282 -#define _loop0_56_type 1283 -#define _gather_55_type 1284 -#define _loop0_58_type 1285 -#define _gather_57_type 1286 -#define _tmp_59_type 1287 -#define _loop1_60_type 1288 -#define _loop1_61_type 1289 -#define _tmp_62_type 1290 -#define _tmp_63_type 1291 -#define _loop1_64_type 1292 -#define _loop0_66_type 1293 -#define _gather_65_type 1294 -#define _tmp_67_type 1295 -#define _tmp_68_type 1296 -#define _tmp_69_type 1297 -#define _tmp_70_type 1298 -#define _loop0_72_type 1299 -#define _gather_71_type 1300 -#define _loop0_74_type 1301 -#define _gather_73_type 1302 -#define _tmp_75_type 1303 -#define _loop0_77_type 1304 -#define _gather_76_type 1305 -#define _loop0_79_type 1306 -#define _gather_78_type 1307 -#define _loop1_80_type 1308 -#define _loop1_81_type 1309 -#define _loop0_83_type 1310 -#define _gather_82_type 1311 -#define _loop1_84_type 1312 -#define _loop1_85_type 1313 -#define _loop1_86_type 1314 -#define _tmp_87_type 1315 -#define _loop0_89_type 1316 -#define _gather_88_type 1317 -#define _tmp_90_type 1318 -#define _tmp_91_type 1319 -#define _tmp_92_type 1320 -#define _tmp_93_type 1321 -#define _tmp_94_type 1322 -#define _loop0_95_type 1323 -#define _loop0_96_type 1324 -#define _loop0_97_type 1325 -#define _loop1_98_type 1326 -#define _loop0_99_type 1327 -#define _loop1_100_type 1328 -#define _loop1_101_type 1329 -#define _loop1_102_type 1330 -#define _loop0_103_type 1331 -#define _loop1_104_type 1332 -#define _loop0_105_type 1333 -#define _loop1_106_type 1334 -#define _loop0_107_type 1335 -#define _loop1_108_type 1336 -#define _loop1_109_type 1337 -#define _tmp_110_type 1338 -#define _loop0_112_type 1339 -#define _gather_111_type 1340 -#define _loop1_113_type 1341 -#define _loop0_114_type 1342 -#define _loop0_115_type 1343 -#define _tmp_116_type 1344 -#define _loop0_118_type 1345 -#define _gather_117_type 1346 -#define _tmp_119_type 1347 -#define _loop0_121_type 1348 -#define _gather_120_type 1349 -#define _loop0_123_type 1350 -#define _gather_122_type 1351 -#define _loop0_125_type 1352 -#define _gather_124_type 1353 -#define _loop0_127_type 1354 -#define _gather_126_type 1355 -#define _loop0_128_type 1356 -#define _loop0_130_type 1357 -#define _gather_129_type 1358 -#define _loop1_131_type 1359 -#define _tmp_132_type 1360 -#define _loop0_134_type 1361 -#define _gather_133_type 1362 -#define _loop0_136_type 1363 -#define _gather_135_type 1364 -#define _loop0_138_type 1365 -#define _gather_137_type 1366 -#define _loop0_140_type 1367 -#define _gather_139_type 1368 -#define _loop0_142_type 1369 -#define _gather_141_type 1370 -#define _tmp_143_type 1371 -#define _tmp_144_type 1372 -#define _tmp_145_type 1373 -#define _tmp_146_type 1374 -#define _tmp_147_type 1375 -#define _tmp_148_type 1376 -#define _tmp_149_type 1377 -#define _tmp_150_type 1378 -#define _tmp_151_type 1379 -#define _loop0_152_type 1380 -#define _loop0_153_type 1381 -#define _loop0_154_type 1382 -#define _tmp_155_type 1383 -#define _tmp_156_type 1384 -#define _tmp_157_type 1385 -#define _tmp_158_type 1386 -#define _tmp_159_type 1387 -#define _loop0_160_type 1388 -#define _loop0_161_type 1389 -#define _loop0_162_type 1390 -#define _loop1_163_type 1391 -#define _tmp_164_type 1392 -#define _loop0_165_type 1393 -#define _tmp_166_type 1394 -#define _loop0_167_type 1395 -#define _loop1_168_type 1396 -#define _tmp_169_type 1397 -#define _tmp_170_type 1398 -#define _tmp_171_type 1399 -#define _loop0_172_type 1400 -#define _tmp_173_type 1401 -#define _tmp_174_type 1402 -#define _loop1_175_type 1403 -#define _tmp_176_type 1404 -#define _loop0_177_type 1405 -#define _loop0_178_type 1406 -#define _loop0_179_type 1407 -#define _loop0_181_type 1408 -#define _gather_180_type 1409 -#define _tmp_182_type 1410 -#define _loop0_183_type 1411 -#define _tmp_184_type 1412 -#define _loop0_185_type 1413 -#define _loop1_186_type 1414 -#define _loop1_187_type 1415 -#define _tmp_188_type 1416 -#define _tmp_189_type 1417 -#define _loop0_190_type 1418 -#define _tmp_191_type 1419 -#define _tmp_192_type 1420 -#define _tmp_193_type 1421 -#define _loop0_195_type 1422 -#define _gather_194_type 1423 -#define _loop0_197_type 1424 -#define _gather_196_type 1425 -#define _loop0_199_type 1426 -#define _gather_198_type 1427 -#define _loop0_201_type 1428 -#define _gather_200_type 1429 -#define _tmp_202_type 1430 -#define _loop0_203_type 1431 -#define _tmp_204_type 1432 -#define _loop0_205_type 1433 -#define _tmp_206_type 1434 -#define _tmp_207_type 1435 -#define _tmp_208_type 1436 -#define _tmp_209_type 1437 -#define _tmp_210_type 1438 -#define _tmp_211_type 1439 -#define _tmp_212_type 1440 -#define _tmp_213_type 1441 -#define _tmp_214_type 1442 -#define _loop0_216_type 1443 -#define _gather_215_type 1444 -#define _tmp_217_type 1445 -#define _tmp_218_type 1446 -#define _tmp_219_type 1447 -#define _tmp_220_type 1448 -#define _tmp_221_type 1449 -#define _tmp_222_type 1450 -#define _tmp_223_type 1451 -#define _tmp_224_type 1452 -#define _tmp_225_type 1453 -#define _tmp_226_type 1454 -#define _tmp_227_type 1455 -#define _tmp_228_type 1456 -#define _tmp_229_type 1457 -#define _tmp_230_type 1458 -#define _tmp_231_type 1459 -#define _tmp_232_type 1460 -#define _tmp_233_type 1461 -#define _tmp_234_type 1462 -#define _tmp_235_type 1463 -#define _tmp_236_type 1464 -#define _tmp_237_type 1465 -#define _tmp_238_type 1466 -#define _tmp_239_type 1467 -#define _tmp_240_type 1468 -#define _tmp_241_type 1469 -#define _tmp_242_type 1470 -#define _tmp_243_type 1471 -#define _tmp_244_type 1472 -#define _tmp_245_type 1473 -#define _tmp_246_type 1474 -#define _tmp_247_type 1475 -#define _loop1_248_type 1476 -#define _loop1_249_type 1477 +#define invalid_import_type 1207 +#define invalid_import_from_targets_type 1208 +#define invalid_with_stmt_type 1209 +#define invalid_with_stmt_indent_type 1210 +#define invalid_try_stmt_type 1211 +#define invalid_except_stmt_type 1212 +#define invalid_finally_stmt_type 1213 +#define invalid_except_stmt_indent_type 1214 +#define invalid_except_star_stmt_indent_type 1215 +#define invalid_match_stmt_type 1216 +#define invalid_case_block_type 1217 +#define invalid_as_pattern_type 1218 +#define invalid_class_pattern_type 1219 +#define invalid_class_argument_pattern_type 1220 +#define invalid_if_stmt_type 1221 +#define invalid_elif_stmt_type 1222 +#define invalid_else_stmt_type 1223 +#define invalid_while_stmt_type 1224 +#define invalid_for_stmt_type 1225 +#define invalid_def_raw_type 1226 +#define invalid_class_def_raw_type 1227 +#define invalid_double_starred_kvpairs_type 1228 +#define invalid_kvpair_type 1229 +#define invalid_starred_expression_type 1230 +#define _loop0_1_type 1231 +#define _loop0_2_type 1232 +#define _loop1_3_type 1233 +#define _loop0_5_type 1234 +#define _gather_4_type 1235 +#define _tmp_6_type 1236 +#define _tmp_7_type 1237 +#define _tmp_8_type 1238 +#define _tmp_9_type 1239 +#define _tmp_10_type 1240 +#define _tmp_11_type 1241 +#define _tmp_12_type 1242 +#define _tmp_13_type 1243 +#define _loop1_14_type 1244 +#define _tmp_15_type 1245 +#define _tmp_16_type 1246 +#define _tmp_17_type 1247 +#define _loop0_19_type 1248 +#define _gather_18_type 1249 +#define _loop0_21_type 1250 +#define _gather_20_type 1251 +#define _tmp_22_type 1252 +#define _tmp_23_type 1253 +#define _loop0_24_type 1254 +#define _loop1_25_type 1255 +#define _loop0_27_type 1256 +#define _gather_26_type 1257 +#define _tmp_28_type 1258 +#define _loop0_30_type 1259 +#define _gather_29_type 1260 +#define _tmp_31_type 1261 +#define _loop1_32_type 1262 +#define _tmp_33_type 1263 +#define _tmp_34_type 1264 +#define _tmp_35_type 1265 +#define _loop0_36_type 1266 +#define _loop0_37_type 1267 +#define _loop0_38_type 1268 +#define _loop1_39_type 1269 +#define _loop0_40_type 1270 +#define _loop1_41_type 1271 +#define _loop1_42_type 1272 +#define _loop1_43_type 1273 +#define _loop0_44_type 1274 +#define _loop1_45_type 1275 +#define _loop0_46_type 1276 +#define _loop1_47_type 1277 +#define _loop0_48_type 1278 +#define _loop0_49_type 1279 +#define _loop1_50_type 1280 +#define _loop0_52_type 1281 +#define _gather_51_type 1282 +#define _loop0_54_type 1283 +#define _gather_53_type 1284 +#define _loop0_56_type 1285 +#define _gather_55_type 1286 +#define _loop0_58_type 1287 +#define _gather_57_type 1288 +#define _tmp_59_type 1289 +#define _loop1_60_type 1290 +#define _loop1_61_type 1291 +#define _tmp_62_type 1292 +#define _tmp_63_type 1293 +#define _loop1_64_type 1294 +#define _loop0_66_type 1295 +#define _gather_65_type 1296 +#define _tmp_67_type 1297 +#define _tmp_68_type 1298 +#define _tmp_69_type 1299 +#define _tmp_70_type 1300 +#define _loop0_72_type 1301 +#define _gather_71_type 1302 +#define _loop0_74_type 1303 +#define _gather_73_type 1304 +#define _tmp_75_type 1305 +#define _loop0_77_type 1306 +#define _gather_76_type 1307 +#define _loop0_79_type 1308 +#define _gather_78_type 1309 +#define _loop1_80_type 1310 +#define _loop1_81_type 1311 +#define _loop0_83_type 1312 +#define _gather_82_type 1313 +#define _loop1_84_type 1314 +#define _loop1_85_type 1315 +#define _loop1_86_type 1316 +#define _tmp_87_type 1317 +#define _loop0_89_type 1318 +#define _gather_88_type 1319 +#define _tmp_90_type 1320 +#define _tmp_91_type 1321 +#define _tmp_92_type 1322 +#define _tmp_93_type 1323 +#define _tmp_94_type 1324 +#define _loop0_95_type 1325 +#define _loop0_96_type 1326 +#define _loop0_97_type 1327 +#define _loop1_98_type 1328 +#define _loop0_99_type 1329 +#define _loop1_100_type 1330 +#define _loop1_101_type 1331 +#define _loop1_102_type 1332 +#define _loop0_103_type 1333 +#define _loop1_104_type 1334 +#define _loop0_105_type 1335 +#define _loop1_106_type 1336 +#define _loop0_107_type 1337 +#define _loop1_108_type 1338 +#define _loop1_109_type 1339 +#define _tmp_110_type 1340 +#define _loop0_112_type 1341 +#define _gather_111_type 1342 +#define _loop1_113_type 1343 +#define _loop0_114_type 1344 +#define _loop0_115_type 1345 +#define _tmp_116_type 1346 +#define _loop0_118_type 1347 +#define _gather_117_type 1348 +#define _tmp_119_type 1349 +#define _loop0_121_type 1350 +#define _gather_120_type 1351 +#define _loop0_123_type 1352 +#define _gather_122_type 1353 +#define _loop0_125_type 1354 +#define _gather_124_type 1355 +#define _loop0_127_type 1356 +#define _gather_126_type 1357 +#define _loop0_128_type 1358 +#define _loop0_130_type 1359 +#define _gather_129_type 1360 +#define _loop1_131_type 1361 +#define _tmp_132_type 1362 +#define _loop0_134_type 1363 +#define _gather_133_type 1364 +#define _loop0_136_type 1365 +#define _gather_135_type 1366 +#define _loop0_138_type 1367 +#define _gather_137_type 1368 +#define _loop0_140_type 1369 +#define _gather_139_type 1370 +#define _loop0_142_type 1371 +#define _gather_141_type 1372 +#define _tmp_143_type 1373 +#define _tmp_144_type 1374 +#define _tmp_145_type 1375 +#define _tmp_146_type 1376 +#define _tmp_147_type 1377 +#define _tmp_148_type 1378 +#define _tmp_149_type 1379 +#define _tmp_150_type 1380 +#define _tmp_151_type 1381 +#define _tmp_152_type 1382 +#define _tmp_153_type 1383 +#define _loop0_154_type 1384 +#define _loop0_155_type 1385 +#define _loop0_156_type 1386 +#define _tmp_157_type 1387 +#define _tmp_158_type 1388 +#define _tmp_159_type 1389 +#define _tmp_160_type 1390 +#define _tmp_161_type 1391 +#define _loop0_162_type 1392 +#define _loop0_163_type 1393 +#define _loop0_164_type 1394 +#define _loop1_165_type 1395 +#define _tmp_166_type 1396 +#define _loop0_167_type 1397 +#define _tmp_168_type 1398 +#define _loop0_169_type 1399 +#define _loop1_170_type 1400 +#define _tmp_171_type 1401 +#define _tmp_172_type 1402 +#define _tmp_173_type 1403 +#define _loop0_174_type 1404 +#define _tmp_175_type 1405 +#define _tmp_176_type 1406 +#define _loop1_177_type 1407 +#define _tmp_178_type 1408 +#define _loop0_179_type 1409 +#define _loop0_180_type 1410 +#define _loop0_181_type 1411 +#define _loop0_183_type 1412 +#define _gather_182_type 1413 +#define _tmp_184_type 1414 +#define _loop0_185_type 1415 +#define _tmp_186_type 1416 +#define _loop0_187_type 1417 +#define _loop1_188_type 1418 +#define _loop1_189_type 1419 +#define _tmp_190_type 1420 +#define _tmp_191_type 1421 +#define _loop0_192_type 1422 +#define _tmp_193_type 1423 +#define _tmp_194_type 1424 +#define _tmp_195_type 1425 +#define _loop0_197_type 1426 +#define _gather_196_type 1427 +#define _loop0_199_type 1428 +#define _gather_198_type 1429 +#define _loop0_201_type 1430 +#define _gather_200_type 1431 +#define _loop0_203_type 1432 +#define _gather_202_type 1433 +#define _tmp_204_type 1434 +#define _loop0_205_type 1435 +#define _loop1_206_type 1436 +#define _tmp_207_type 1437 +#define _loop0_208_type 1438 +#define _loop1_209_type 1439 +#define _tmp_210_type 1440 +#define _tmp_211_type 1441 +#define _tmp_212_type 1442 +#define _tmp_213_type 1443 +#define _tmp_214_type 1444 +#define _tmp_215_type 1445 +#define _tmp_216_type 1446 +#define _tmp_217_type 1447 +#define _tmp_218_type 1448 +#define _tmp_219_type 1449 +#define _loop0_221_type 1450 +#define _gather_220_type 1451 +#define _tmp_222_type 1452 +#define _tmp_223_type 1453 +#define _tmp_224_type 1454 +#define _tmp_225_type 1455 +#define _tmp_226_type 1456 +#define _tmp_227_type 1457 +#define _tmp_228_type 1458 +#define _tmp_229_type 1459 +#define _tmp_230_type 1460 +#define _tmp_231_type 1461 +#define _tmp_232_type 1462 +#define _tmp_233_type 1463 +#define _tmp_234_type 1464 +#define _tmp_235_type 1465 +#define _tmp_236_type 1466 +#define _tmp_237_type 1467 +#define _tmp_238_type 1468 +#define _tmp_239_type 1469 +#define _tmp_240_type 1470 +#define _tmp_241_type 1471 +#define _tmp_242_type 1472 +#define _tmp_243_type 1473 +#define _tmp_244_type 1474 +#define _tmp_245_type 1475 +#define _tmp_246_type 1476 +#define _tmp_247_type 1477 +#define _tmp_248_type 1478 +#define _tmp_249_type 1479 +#define _tmp_250_type 1480 +#define _tmp_251_type 1481 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -763,6 +767,7 @@ static void *invalid_double_type_comments_rule(Parser *p); static void *invalid_with_item_rule(Parser *p); static void *invalid_for_target_rule(Parser *p); static void *invalid_group_rule(Parser *p); +static void *invalid_import_rule(Parser *p); static void *invalid_import_from_targets_rule(Parser *p); static void *invalid_with_stmt_rule(Parser *p); static void *invalid_with_stmt_indent_rule(Parser *p); @@ -785,6 +790,7 @@ static void *invalid_def_raw_rule(Parser *p); static void *invalid_class_def_raw_rule(Parser *p); static void *invalid_double_starred_kvpairs_rule(Parser *p); static void *invalid_kvpair_rule(Parser *p); +static void *invalid_starred_expression_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); static asdl_seq *_loop1_3_rule(Parser *p); @@ -936,76 +942,76 @@ static void *_tmp_148_rule(Parser *p); static void *_tmp_149_rule(Parser *p); static void *_tmp_150_rule(Parser *p); static void *_tmp_151_rule(Parser *p); -static asdl_seq *_loop0_152_rule(Parser *p); -static asdl_seq *_loop0_153_rule(Parser *p); +static void *_tmp_152_rule(Parser *p); +static void *_tmp_153_rule(Parser *p); static asdl_seq *_loop0_154_rule(Parser *p); -static void *_tmp_155_rule(Parser *p); -static void *_tmp_156_rule(Parser *p); +static asdl_seq *_loop0_155_rule(Parser *p); +static asdl_seq *_loop0_156_rule(Parser *p); static void *_tmp_157_rule(Parser *p); static void *_tmp_158_rule(Parser *p); static void *_tmp_159_rule(Parser *p); -static asdl_seq *_loop0_160_rule(Parser *p); -static asdl_seq *_loop0_161_rule(Parser *p); +static void *_tmp_160_rule(Parser *p); +static void *_tmp_161_rule(Parser *p); static asdl_seq *_loop0_162_rule(Parser *p); -static asdl_seq *_loop1_163_rule(Parser *p); -static void *_tmp_164_rule(Parser *p); -static asdl_seq *_loop0_165_rule(Parser *p); +static asdl_seq *_loop0_163_rule(Parser *p); +static asdl_seq *_loop0_164_rule(Parser *p); +static asdl_seq *_loop1_165_rule(Parser *p); static void *_tmp_166_rule(Parser *p); static asdl_seq *_loop0_167_rule(Parser *p); -static asdl_seq *_loop1_168_rule(Parser *p); -static void *_tmp_169_rule(Parser *p); -static void *_tmp_170_rule(Parser *p); +static void *_tmp_168_rule(Parser *p); +static asdl_seq *_loop0_169_rule(Parser *p); +static asdl_seq *_loop1_170_rule(Parser *p); static void *_tmp_171_rule(Parser *p); -static asdl_seq *_loop0_172_rule(Parser *p); +static void *_tmp_172_rule(Parser *p); static void *_tmp_173_rule(Parser *p); -static void *_tmp_174_rule(Parser *p); -static asdl_seq *_loop1_175_rule(Parser *p); +static asdl_seq *_loop0_174_rule(Parser *p); +static void *_tmp_175_rule(Parser *p); static void *_tmp_176_rule(Parser *p); -static asdl_seq *_loop0_177_rule(Parser *p); -static asdl_seq *_loop0_178_rule(Parser *p); +static asdl_seq *_loop1_177_rule(Parser *p); +static void *_tmp_178_rule(Parser *p); static asdl_seq *_loop0_179_rule(Parser *p); +static asdl_seq *_loop0_180_rule(Parser *p); static asdl_seq *_loop0_181_rule(Parser *p); -static asdl_seq *_gather_180_rule(Parser *p); -static void *_tmp_182_rule(Parser *p); static asdl_seq *_loop0_183_rule(Parser *p); +static asdl_seq *_gather_182_rule(Parser *p); static void *_tmp_184_rule(Parser *p); static asdl_seq *_loop0_185_rule(Parser *p); -static asdl_seq *_loop1_186_rule(Parser *p); -static asdl_seq *_loop1_187_rule(Parser *p); -static void *_tmp_188_rule(Parser *p); -static void *_tmp_189_rule(Parser *p); -static asdl_seq *_loop0_190_rule(Parser *p); +static void *_tmp_186_rule(Parser *p); +static asdl_seq *_loop0_187_rule(Parser *p); +static asdl_seq *_loop1_188_rule(Parser *p); +static asdl_seq *_loop1_189_rule(Parser *p); +static void *_tmp_190_rule(Parser *p); static void *_tmp_191_rule(Parser *p); -static void *_tmp_192_rule(Parser *p); +static asdl_seq *_loop0_192_rule(Parser *p); static void *_tmp_193_rule(Parser *p); -static asdl_seq *_loop0_195_rule(Parser *p); -static asdl_seq *_gather_194_rule(Parser *p); +static void *_tmp_194_rule(Parser *p); +static void *_tmp_195_rule(Parser *p); static asdl_seq *_loop0_197_rule(Parser *p); static asdl_seq *_gather_196_rule(Parser *p); static asdl_seq *_loop0_199_rule(Parser *p); static asdl_seq *_gather_198_rule(Parser *p); static asdl_seq *_loop0_201_rule(Parser *p); static asdl_seq *_gather_200_rule(Parser *p); -static void *_tmp_202_rule(Parser *p); static asdl_seq *_loop0_203_rule(Parser *p); +static asdl_seq *_gather_202_rule(Parser *p); static void *_tmp_204_rule(Parser *p); static asdl_seq *_loop0_205_rule(Parser *p); -static void *_tmp_206_rule(Parser *p); +static asdl_seq *_loop1_206_rule(Parser *p); static void *_tmp_207_rule(Parser *p); -static void *_tmp_208_rule(Parser *p); -static void *_tmp_209_rule(Parser *p); +static asdl_seq *_loop0_208_rule(Parser *p); +static asdl_seq *_loop1_209_rule(Parser *p); static void *_tmp_210_rule(Parser *p); static void *_tmp_211_rule(Parser *p); static void *_tmp_212_rule(Parser *p); static void *_tmp_213_rule(Parser *p); static void *_tmp_214_rule(Parser *p); -static asdl_seq *_loop0_216_rule(Parser *p); -static asdl_seq *_gather_215_rule(Parser *p); +static void *_tmp_215_rule(Parser *p); +static void *_tmp_216_rule(Parser *p); static void *_tmp_217_rule(Parser *p); static void *_tmp_218_rule(Parser *p); static void *_tmp_219_rule(Parser *p); -static void *_tmp_220_rule(Parser *p); -static void *_tmp_221_rule(Parser *p); +static asdl_seq *_loop0_221_rule(Parser *p); +static asdl_seq *_gather_220_rule(Parser *p); static void *_tmp_222_rule(Parser *p); static void *_tmp_223_rule(Parser *p); static void *_tmp_224_rule(Parser *p); @@ -1032,8 +1038,10 @@ static void *_tmp_244_rule(Parser *p); static void *_tmp_245_rule(Parser *p); static void *_tmp_246_rule(Parser *p); static void *_tmp_247_rule(Parser *p); -static asdl_seq *_loop1_248_rule(Parser *p); -static asdl_seq *_loop1_249_rule(Parser *p); +static void *_tmp_248_rule(Parser *p); +static void *_tmp_249_rule(Parser *p); +static void *_tmp_250_rule(Parser *p); +static void *_tmp_251_rule(Parser *p); // file: statements? $ @@ -2019,7 +2027,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 634) // token='if' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 641) // token='if' && (if_stmt_var = if_stmt_rule(p)) // if_stmt ) @@ -2103,7 +2111,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 618) // token='try' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 623) // token='try' && (try_stmt_var = try_stmt_rule(p)) // try_stmt ) @@ -2124,7 +2132,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 639) // token='while' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 646) // token='while' && (while_stmt_var = while_stmt_rule(p)) // while_stmt ) @@ -3301,7 +3309,7 @@ assert_stmt_rule(Parser *p) return _res; } -// import_stmt: import_name | import_from +// import_stmt: invalid_import | import_name | import_from static stmt_ty import_stmt_rule(Parser *p) { @@ -3315,6 +3323,25 @@ import_stmt_rule(Parser *p) } stmt_ty _res = NULL; int _mark = p->mark; + if (p->call_invalid_rules) { // invalid_import + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> import_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_import")); + void *invalid_import_var; + if ( + (invalid_import_var = invalid_import_rule(p)) // invalid_import + ) + { + D(fprintf(stderr, "%*c+ import_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_import")); + _res = invalid_import_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_import")); + } { // import_name if (p->error_indicator) { p->level--; @@ -3391,7 +3418,7 @@ import_name_rule(Parser *p) Token * _keyword; asdl_alias_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='import' + (_keyword = _PyPegen_expect_token(p, 606)) // token='import' && (a = dotted_as_names_rule(p)) // dotted_as_names ) @@ -3461,13 +3488,13 @@ import_from_rule(Parser *p) expr_ty b; asdl_alias_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' && (a = _loop0_24_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && - (_keyword_1 = _PyPegen_expect_token(p, 531)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import' && (c = import_from_targets_rule(p)) // import_from_targets ) @@ -3505,11 +3532,11 @@ import_from_rule(Parser *p) asdl_seq * a; asdl_alias_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' && (a = _loop1_25_rule(p)) // (('.' | '...'))+ && - (_keyword_1 = _PyPegen_expect_token(p, 531)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import' && (b = import_from_targets_rule(p)) // import_from_targets ) @@ -4266,7 +4293,7 @@ class_def_raw_rule(Parser *p) void *b; asdl_stmt_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 646)) // token='class' + (_keyword = _PyPegen_expect_token(p, 653)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && @@ -4432,7 +4459,7 @@ function_def_raw_rule(Parser *p) void *params; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='def' + (_keyword = _PyPegen_expect_token(p, 651)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4492,7 +4519,7 @@ function_def_raw_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 644)) // token='def' + (_keyword = _PyPegen_expect_token(p, 651)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -5844,7 +5871,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -5889,7 +5916,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -5985,7 +6012,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 643)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6030,7 +6057,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 643)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6112,7 +6139,7 @@ else_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 637)) // token='else' + (_keyword = _PyPegen_expect_token(p, 644)) // token='else' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6192,7 +6219,7 @@ while_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='while' + (_keyword = _PyPegen_expect_token(p, 646)) // token='while' && (a = named_expression_rule(p)) // named_expression && @@ -6293,11 +6320,11 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (_cut_var = 1) && @@ -6357,11 +6384,11 @@ for_stmt_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (_cut_var = 1) && @@ -6490,7 +6517,7 @@ with_stmt_rule(Parser *p) asdl_withitem_seq* a; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6539,7 +6566,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (a = (asdl_withitem_seq*)_gather_53_rule(p)) // ','.with_item+ && @@ -6590,7 +6617,7 @@ with_stmt_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6642,7 +6669,7 @@ with_stmt_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (a = (asdl_withitem_seq*)_gather_57_rule(p)) // ','.with_item+ && @@ -6729,7 +6756,7 @@ with_item_rule(Parser *p) if ( (e = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (t = star_target_rule(p)) // star_target && @@ -6855,7 +6882,7 @@ try_stmt_rule(Parser *p) asdl_stmt_seq* b; asdl_stmt_seq* f; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6899,7 +6926,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6947,7 +6974,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7046,7 +7073,7 @@ except_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' && (e = expression_rule(p)) // expression && @@ -7089,7 +7116,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7201,7 +7228,7 @@ except_star_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7304,7 +7331,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 632)) // token='finally' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7616,7 +7643,7 @@ guard_rule(Parser *p) Token * _keyword; expr_ty guard; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (guard = named_expression_rule(p)) // named_expression ) @@ -7814,7 +7841,7 @@ as_pattern_rule(Parser *p) if ( (pattern = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (target = pattern_capture_target_rule(p)) // pattern_capture_target ) @@ -10642,11 +10669,11 @@ expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 637)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else' && (c = expression_rule(p)) // expression ) @@ -10753,7 +10780,7 @@ yield_expr_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 573)) // token='yield' && - (_keyword_1 = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='from' && (a = expression_rule(p)) // expression ) @@ -12203,7 +12230,7 @@ notin_bitwise_or_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 581)) // token='not' && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12250,7 +12277,7 @@ in_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword = _PyPegen_expect_token(p, 650)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -16035,11 +16062,11 @@ for_if_clause_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (_cut_var = 1) && @@ -16078,11 +16105,11 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (_cut_var = 1) && @@ -16760,7 +16787,7 @@ kwargs_rule(Parser *p) return _res; } -// starred_expression: '*' expression +// starred_expression: invalid_starred_expression | '*' expression static expr_ty starred_expression_rule(Parser *p) { @@ -16783,6 +16810,25 @@ starred_expression_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro + if (p->call_invalid_rules) { // invalid_starred_expression + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> starred_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_starred_expression")); + void *invalid_starred_expression_var; + if ( + (invalid_starred_expression_var = invalid_starred_expression_rule(p)) // invalid_starred_expression + ) + { + D(fprintf(stderr, "%*c+ starred_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_starred_expression")); + _res = invalid_starred_expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s starred_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_starred_expression")); + } { // '*' expression if (p->error_indicator) { p->level--; @@ -18918,6 +18964,7 @@ func_type_comment_rule(Parser *p) // | args ',' '*' // | expression for_if_clauses ',' [args | expression for_if_clauses] // | NAME '=' expression for_if_clauses +// | [(args ',')] NAME '=' &(',' | ')') // | args for_if_clauses // | args ',' expression for_if_clauses // | args ',' args @@ -19031,6 +19078,39 @@ invalid_arguments_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_arguments[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '=' expression for_if_clauses")); } + { // [(args ',')] NAME '=' &(',' | ')') + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; + Token * b; + if ( + (_opt_var = _tmp_145_rule(p), !p->error_indicator) // [(args ',')] + && + (a = _PyPegen_name_token(p)) // NAME + && + (b = _PyPegen_expect_token(p, 22)) // token='=' + && + _PyPegen_lookahead(1, _tmp_146_rule, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "expected argument value expression" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')")); + } { // args for_if_clauses if (p->error_indicator) { p->level--; @@ -19131,6 +19211,7 @@ invalid_arguments_rule(Parser *p) // | ('True' | 'False' | 'None') '=' // | NAME '=' expression for_if_clauses // | !(NAME '=') expression '=' +// | '**' expression '=' expression static void * invalid_kwarg_rule(Parser *p) { @@ -19153,7 +19234,7 @@ invalid_kwarg_rule(Parser *p) Token* a; Token * b; if ( - (a = (Token*)_tmp_145_rule(p)) // 'True' | 'False' | 'None' + (a = (Token*)_tmp_147_rule(p)) // 'True' | 'False' | 'None' && (b = _PyPegen_expect_token(p, 22)) // token='=' ) @@ -19213,7 +19294,7 @@ invalid_kwarg_rule(Parser *p) expr_ty a; Token * b; if ( - _PyPegen_lookahead(0, _tmp_146_rule, p) + _PyPegen_lookahead(0, _tmp_148_rule, p) && (a = expression_rule(p)) // expression && @@ -19233,6 +19314,39 @@ invalid_kwarg_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_kwarg[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!(NAME '=') expression '='")); } + { // '**' expression '=' expression + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_kwarg[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' expression '=' expression")); + Token * _literal; + Token * a; + expr_ty b; + expr_ty expression_var; + if ( + (a = _PyPegen_expect_token(p, 35)) // token='**' + && + (expression_var = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ invalid_kwarg[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' expression '=' expression")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "cannot assign to keyword argument unpacking" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_kwarg[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' expression '=' expression")); + } _res = NULL; done: p->level--; @@ -19284,11 +19398,11 @@ expression_without_invalid_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 637)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else' && (c = expression_rule(p)) // expression ) @@ -19438,7 +19552,7 @@ invalid_expression_rule(Parser *p) expr_ty a; expr_ty b; if ( - _PyPegen_lookahead(0, _tmp_147_rule, p) + _PyPegen_lookahead(0, _tmp_149_rule, p) && (a = disjunction_rule(p)) // disjunction && @@ -19470,11 +19584,11 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (b = disjunction_rule(p)) // disjunction && - _PyPegen_lookahead(0, _tmp_148_rule, p) + _PyPegen_lookahead(0, _tmp_150_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')")); @@ -19563,7 +19677,7 @@ invalid_named_expression_rule(Parser *p) && (b = bitwise_or_rule(p)) // bitwise_or && - _PyPegen_lookahead(0, _tmp_149_rule, p) + _PyPegen_lookahead(0, _tmp_151_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' bitwise_or !('=' | ':=')")); @@ -19589,7 +19703,7 @@ invalid_named_expression_rule(Parser *p) Token * b; expr_ty bitwise_or_var; if ( - _PyPegen_lookahead(0, _tmp_150_rule, p) + _PyPegen_lookahead(0, _tmp_152_rule, p) && (a = bitwise_or_rule(p)) // bitwise_or && @@ -19597,7 +19711,7 @@ invalid_named_expression_rule(Parser *p) && (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or && - _PyPegen_lookahead(0, _tmp_151_rule, p) + _PyPegen_lookahead(0, _tmp_153_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=')")); @@ -19678,7 +19792,7 @@ invalid_assignment_rule(Parser *p) D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression")); Token * _literal; Token * _literal_1; - asdl_seq * _loop0_152_var; + asdl_seq * _loop0_154_var; expr_ty a; expr_ty expression_var; if ( @@ -19686,7 +19800,7 @@ invalid_assignment_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_loop0_152_var = _loop0_152_rule(p)) // star_named_expressions* + (_loop0_154_var = _loop0_154_rule(p)) // star_named_expressions* && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -19743,10 +19857,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); Token * _literal; - asdl_seq * _loop0_153_var; + asdl_seq * _loop0_155_var; expr_ty a; if ( - (_loop0_153_var = _loop0_153_rule(p)) // ((star_targets '='))* + (_loop0_155_var = _loop0_155_rule(p)) // ((star_targets '='))* && (a = star_expressions_rule(p)) // star_expressions && @@ -19773,10 +19887,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); Token * _literal; - asdl_seq * _loop0_154_var; + asdl_seq * _loop0_156_var; expr_ty a; if ( - (_loop0_154_var = _loop0_154_rule(p)) // ((star_targets '='))* + (_loop0_156_var = _loop0_156_rule(p)) // ((star_targets '='))* && (a = yield_expr_rule(p)) // yield_expr && @@ -19802,7 +19916,7 @@ invalid_assignment_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - void *_tmp_155_var; + void *_tmp_157_var; expr_ty a; AugOperator* augassign_var; if ( @@ -19810,7 +19924,7 @@ invalid_assignment_rule(Parser *p) && (augassign_var = augassign_rule(p)) // augassign && - (_tmp_155_var = _tmp_155_rule(p)) // yield_expr | star_expressions + (_tmp_157_var = _tmp_157_rule(p)) // yield_expr | star_expressions ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); @@ -20036,11 +20150,11 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - void *_tmp_156_var; + void *_tmp_158_var; expr_ty a; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_156_var = _tmp_156_rule(p)) // '[' | '(' | '{' + (_tmp_158_var = _tmp_158_rule(p)) // '[' | '(' | '{' && (a = starred_expression_rule(p)) // starred_expression && @@ -20067,12 +20181,12 @@ invalid_comprehension_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses")); Token * _literal; - void *_tmp_157_var; + void *_tmp_159_var; expr_ty a; asdl_expr_seq* b; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_157_var = _tmp_157_rule(p)) // '[' | '{' + (_tmp_159_var = _tmp_159_rule(p)) // '[' | '{' && (a = star_named_expression_rule(p)) // star_named_expression && @@ -20102,12 +20216,12 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' for_if_clauses")); - void *_tmp_158_var; + void *_tmp_160_var; expr_ty a; Token * b; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_158_var = _tmp_158_rule(p)) // '[' | '{' + (_tmp_160_var = _tmp_160_rule(p)) // '[' | '{' && (a = star_named_expression_rule(p)) // star_named_expression && @@ -20244,13 +20358,13 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slash_no_default | slash_with_default) param_maybe_default* '/'")); - asdl_seq * _loop0_160_var; - void *_tmp_159_var; + asdl_seq * _loop0_162_var; + void *_tmp_161_var; Token * a; if ( - (_tmp_159_var = _tmp_159_rule(p)) // slash_no_default | slash_with_default + (_tmp_161_var = _tmp_161_rule(p)) // slash_no_default | slash_with_default && - (_loop0_160_var = _loop0_160_rule(p)) // param_maybe_default* + (_loop0_162_var = _loop0_162_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20274,7 +20388,7 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default? param_no_default* invalid_parameters_helper param_no_default")); - asdl_seq * _loop0_161_var; + asdl_seq * _loop0_163_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings arg_ty a; @@ -20282,7 +20396,7 @@ invalid_parameters_rule(Parser *p) if ( (_opt_var = slash_no_default_rule(p), !p->error_indicator) // slash_no_default? && - (_loop0_161_var = _loop0_161_rule(p)) // param_no_default* + (_loop0_163_var = _loop0_163_rule(p)) // param_no_default* && (invalid_parameters_helper_var = invalid_parameters_helper_rule(p)) // invalid_parameters_helper && @@ -20308,18 +20422,18 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* '(' param_no_default+ ','? ')'")); - asdl_seq * _loop0_162_var; - asdl_seq * _loop1_163_var; + asdl_seq * _loop0_164_var; + asdl_seq * _loop1_165_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; if ( - (_loop0_162_var = _loop0_162_rule(p)) // param_no_default* + (_loop0_164_var = _loop0_164_rule(p)) // param_no_default* && (a = _PyPegen_expect_token(p, 7)) // token='(' && - (_loop1_163_var = _loop1_163_rule(p)) // param_no_default+ + (_loop1_165_var = _loop1_165_rule(p)) // param_no_default+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -20346,22 +20460,22 @@ invalid_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/'")); Token * _literal; - asdl_seq * _loop0_165_var; asdl_seq * _loop0_167_var; + asdl_seq * _loop0_169_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - void *_tmp_166_var; + void *_tmp_168_var; Token * a; if ( - (_opt_var = _tmp_164_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)] + (_opt_var = _tmp_166_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)] && - (_loop0_165_var = _loop0_165_rule(p)) // param_maybe_default* + (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default* && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_166_var = _tmp_166_rule(p)) // ',' | param_no_default + (_tmp_168_var = _tmp_168_rule(p)) // ',' | param_no_default && - (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default* + (_loop0_169_var = _loop0_169_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20386,10 +20500,10 @@ invalid_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default+ '/' '*'")); Token * _literal; - asdl_seq * _loop1_168_var; + asdl_seq * _loop1_170_var; Token * a; if ( - (_loop1_168_var = _loop1_168_rule(p)) // param_maybe_default+ + (_loop1_170_var = _loop1_170_rule(p)) // param_maybe_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -20439,7 +20553,7 @@ invalid_default_rule(Parser *p) if ( (a = _PyPegen_expect_token(p, 22)) // token='=' && - _PyPegen_lookahead(1, _tmp_169_rule, p) + _PyPegen_lookahead(1, _tmp_171_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' &(')' | ',')")); @@ -20485,12 +20599,12 @@ invalid_star_etc_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); - void *_tmp_170_var; + void *_tmp_172_var; Token * a; if ( (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_170_var = _tmp_170_rule(p)) // ')' | ',' (')' | '**') + (_tmp_172_var = _tmp_172_rule(p)) // ')' | ',' (')' | '**') ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); @@ -20573,20 +20687,20 @@ invalid_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')")); Token * _literal; - asdl_seq * _loop0_172_var; - void *_tmp_171_var; + asdl_seq * _loop0_174_var; void *_tmp_173_var; + void *_tmp_175_var; Token * a; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_171_var = _tmp_171_rule(p)) // param_no_default | ',' + (_tmp_173_var = _tmp_173_rule(p)) // param_no_default | ',' && - (_loop0_172_var = _loop0_172_rule(p)) // param_maybe_default* + (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_173_var = _tmp_173_rule(p)) // param_no_default | ',' + (_tmp_175_var = _tmp_175_rule(p)) // param_no_default | ',' ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')")); @@ -20702,7 +20816,7 @@ invalid_kwds_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (a = (Token*)_tmp_174_rule(p)) // '*' | '**' | '/' + (a = (Token*)_tmp_176_rule(p)) // '*' | '**' | '/' ) { D(fprintf(stderr, "%*c+ invalid_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param ',' ('*' | '**' | '/')")); @@ -20768,13 +20882,13 @@ invalid_parameters_helper_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - asdl_seq * _loop1_175_var; + asdl_seq * _loop1_177_var; if ( - (_loop1_175_var = _loop1_175_rule(p)) // param_with_default+ + (_loop1_177_var = _loop1_177_rule(p)) // param_with_default+ ) { D(fprintf(stderr, "%*c+ invalid_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - _res = _loop1_175_var; + _res = _loop1_177_var; goto done; } p->mark = _mark; @@ -20840,13 +20954,13 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/'")); - asdl_seq * _loop0_177_var; - void *_tmp_176_var; + asdl_seq * _loop0_179_var; + void *_tmp_178_var; Token * a; if ( - (_tmp_176_var = _tmp_176_rule(p)) // lambda_slash_no_default | lambda_slash_with_default + (_tmp_178_var = _tmp_178_rule(p)) // lambda_slash_no_default | lambda_slash_with_default && - (_loop0_177_var = _loop0_177_rule(p)) // lambda_param_maybe_default* + (_loop0_179_var = _loop0_179_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20870,7 +20984,7 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default")); - asdl_seq * _loop0_178_var; + asdl_seq * _loop0_180_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings arg_ty a; @@ -20878,7 +20992,7 @@ invalid_lambda_parameters_rule(Parser *p) if ( (_opt_var = lambda_slash_no_default_rule(p), !p->error_indicator) // lambda_slash_no_default? && - (_loop0_178_var = _loop0_178_rule(p)) // lambda_param_no_default* + (_loop0_180_var = _loop0_180_rule(p)) // lambda_param_no_default* && (invalid_lambda_parameters_helper_var = invalid_lambda_parameters_helper_rule(p)) // invalid_lambda_parameters_helper && @@ -20904,18 +21018,18 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* '(' ','.lambda_param+ ','? ')'")); - asdl_seq * _gather_180_var; - asdl_seq * _loop0_179_var; + asdl_seq * _gather_182_var; + asdl_seq * _loop0_181_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; if ( - (_loop0_179_var = _loop0_179_rule(p)) // lambda_param_no_default* + (_loop0_181_var = _loop0_181_rule(p)) // lambda_param_no_default* && (a = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_180_var = _gather_180_rule(p)) // ','.lambda_param+ + (_gather_182_var = _gather_182_rule(p)) // ','.lambda_param+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -20942,22 +21056,22 @@ invalid_lambda_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/'")); Token * _literal; - asdl_seq * _loop0_183_var; asdl_seq * _loop0_185_var; + asdl_seq * _loop0_187_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - void *_tmp_184_var; + void *_tmp_186_var; Token * a; if ( - (_opt_var = _tmp_182_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] + (_opt_var = _tmp_184_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] && - (_loop0_183_var = _loop0_183_rule(p)) // lambda_param_maybe_default* + (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_maybe_default* && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_184_var = _tmp_184_rule(p)) // ',' | lambda_param_no_default + (_tmp_186_var = _tmp_186_rule(p)) // ',' | lambda_param_no_default && - (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_maybe_default* + (_loop0_187_var = _loop0_187_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20982,10 +21096,10 @@ invalid_lambda_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default+ '/' '*'")); Token * _literal; - asdl_seq * _loop1_186_var; + asdl_seq * _loop1_188_var; Token * a; if ( - (_loop1_186_var = _loop1_186_rule(p)) // lambda_param_maybe_default+ + (_loop1_188_var = _loop1_188_rule(p)) // lambda_param_maybe_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -21057,13 +21171,13 @@ invalid_lambda_parameters_helper_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - asdl_seq * _loop1_187_var; + asdl_seq * _loop1_189_var; if ( - (_loop1_187_var = _loop1_187_rule(p)) // lambda_param_with_default+ + (_loop1_189_var = _loop1_189_rule(p)) // lambda_param_with_default+ ) { D(fprintf(stderr, "%*c+ invalid_lambda_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - _res = _loop1_187_var; + _res = _loop1_189_var; goto done; } p->mark = _mark; @@ -21100,11 +21214,11 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); Token * _literal; - void *_tmp_188_var; + void *_tmp_190_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_188_var = _tmp_188_rule(p)) // ':' | ',' (':' | '**') + (_tmp_190_var = _tmp_190_rule(p)) // ':' | ',' (':' | '**') ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); @@ -21157,20 +21271,20 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')")); Token * _literal; - asdl_seq * _loop0_190_var; - void *_tmp_189_var; + asdl_seq * _loop0_192_var; void *_tmp_191_var; + void *_tmp_193_var; Token * a; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_189_var = _tmp_189_rule(p)) // lambda_param_no_default | ',' + (_tmp_191_var = _tmp_191_rule(p)) // lambda_param_no_default | ',' && - (_loop0_190_var = _loop0_190_rule(p)) // lambda_param_maybe_default* + (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_191_var = _tmp_191_rule(p)) // lambda_param_no_default | ',' + (_tmp_193_var = _tmp_193_rule(p)) // lambda_param_no_default | ',' ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')")); @@ -21289,7 +21403,7 @@ invalid_lambda_kwds_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (a = (Token*)_tmp_192_rule(p)) // '*' | '**' | '/' + (a = (Token*)_tmp_194_rule(p)) // '*' | '**' | '/' ) { D(fprintf(stderr, "%*c+ invalid_lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param ',' ('*' | '**' | '/')")); @@ -21393,11 +21507,11 @@ invalid_with_item_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (a = expression_rule(p)) // expression && - _PyPegen_lookahead(1, _tmp_193_rule, p) + _PyPegen_lookahead(1, _tmp_195_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' expression &(',' | ')' | ':')")); @@ -21446,7 +21560,7 @@ invalid_for_target_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (a = star_expressions_rule(p)) // star_expressions ) @@ -21553,6 +21667,59 @@ invalid_group_rule(Parser *p) return _res; } +// invalid_import: 'import' dotted_name 'from' dotted_name +static void * +invalid_import_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'import' dotted_name 'from' dotted_name + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_import[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import' dotted_name 'from' dotted_name")); + Token * _keyword; + Token * a; + expr_ty dotted_name_var; + expr_ty dotted_name_var_1; + if ( + (a = _PyPegen_expect_token(p, 606)) // token='import' + && + (dotted_name_var = dotted_name_rule(p)) // dotted_name + && + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' + && + (dotted_name_var_1 = dotted_name_rule(p)) // dotted_name + ) + { + D(fprintf(stderr, "%*c+ invalid_import[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import' dotted_name 'from' dotted_name")); + _res = RAISE_SYNTAX_ERROR_STARTING_FROM ( a , "Did you mean to use 'from ... import ...' instead?" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_import[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import' dotted_name 'from' dotted_name")); + } + _res = NULL; + done: + p->level--; + return _res; +} + // invalid_import_from_targets: import_from_as_names ',' NEWLINE static void * invalid_import_from_targets_rule(Parser *p) @@ -21625,7 +21792,7 @@ invalid_with_stmt_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE")); - asdl_seq * _gather_194_var; + asdl_seq * _gather_196_var; Token * _keyword; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -21633,9 +21800,9 @@ invalid_with_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && - (_gather_194_var = _gather_194_rule(p)) // ','.(expression ['as' star_target])+ + (_gather_196_var = _gather_196_rule(p)) // ','.(expression ['as' star_target])+ && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -21659,7 +21826,7 @@ invalid_with_stmt_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); - asdl_seq * _gather_196_var; + asdl_seq * _gather_198_var; Token * _keyword; Token * _literal; Token * _literal_1; @@ -21671,11 +21838,11 @@ invalid_with_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_196_var = _gather_196_rule(p)) // ','.(expressions ['as' star_target])+ + (_gather_198_var = _gather_198_rule(p)) // ','.(expressions ['as' star_target])+ && (_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -21725,7 +21892,7 @@ invalid_with_stmt_indent_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); - asdl_seq * _gather_198_var; + asdl_seq * _gather_200_var; Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -21734,9 +21901,9 @@ invalid_with_stmt_indent_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 612)) // token='with' + (a = _PyPegen_expect_token(p, 614)) // token='with' && - (_gather_198_var = _gather_198_rule(p)) // ','.(expression ['as' star_target])+ + (_gather_200_var = _gather_200_rule(p)) // ','.(expression ['as' star_target])+ && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -21764,7 +21931,7 @@ invalid_with_stmt_indent_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); - asdl_seq * _gather_200_var; + asdl_seq * _gather_202_var; Token * _literal; Token * _literal_1; Token * _literal_2; @@ -21777,11 +21944,11 @@ invalid_with_stmt_indent_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 612)) // token='with' + (a = _PyPegen_expect_token(p, 614)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_200_var = _gather_200_rule(p)) // ','.(expressions ['as' star_target])+ + (_gather_202_var = _gather_202_rule(p)) // ','.(expressions ['as' star_target])+ && (_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -21816,7 +21983,8 @@ invalid_with_stmt_indent_rule(Parser *p) // invalid_try_stmt: // | 'try' ':' NEWLINE !INDENT // | 'try' ':' block !('except' | 'finally') -// | 'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block* +// | 'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':' +// | 'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':' static void * invalid_try_stmt_rule(Parser *p) { @@ -21840,7 +22008,7 @@ invalid_try_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 618)) // token='try' + (a = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -21872,13 +22040,13 @@ invalid_try_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (block_var = block_rule(p)) // block && - _PyPegen_lookahead(0, _tmp_202_rule, p) + _PyPegen_lookahead(0, _tmp_204_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block !('except' | 'finally')")); @@ -21894,31 +22062,44 @@ invalid_try_stmt_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_try_stmt[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block !('except' | 'finally')")); } - { // 'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block* + { // 'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':' if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block*")); + D(fprintf(stderr, "%*c> invalid_try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':'")); Token * _keyword; Token * _literal; - asdl_seq * _loop0_203_var; + Token * _literal_1; asdl_seq * _loop0_205_var; - void *_tmp_204_var; + asdl_seq * _loop1_206_var; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + Token * a; + Token * b; + expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='try' + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (_loop0_203_var = _loop0_203_rule(p)) // block* + (_loop0_205_var = _loop0_205_rule(p)) // block* && - (_tmp_204_var = _tmp_204_rule(p)) // (except_block+ except_star_block) | (except_star_block+ except_block) + (_loop1_206_var = _loop1_206_rule(p)) // except_block+ && - (_loop0_205_var = _loop0_205_rule(p)) // block* + (a = _PyPegen_expect_token(p, 636)) // token='except' + && + (b = _PyPegen_expect_token(p, 16)) // token='*' + && + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_207_rule(p), !p->error_indicator) // ['as' NAME] + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block*")); - _res = RAISE_SYNTAX_ERROR ( "cannot have both 'except' and 'except*' on the same 'try'" ); + D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "cannot have both 'except' and 'except*' on the same 'try'" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; p->level--; @@ -21928,7 +22109,50 @@ invalid_try_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_try_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block* ((except_block+ except_star_block) | (except_star_block+ except_block)) block*")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':'")); + } + { // 'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':'")); + Token * _keyword; + Token * _literal; + Token * _literal_1; + asdl_seq * _loop0_208_var; + asdl_seq * _loop1_209_var; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + Token * a; + if ( + (_keyword = _PyPegen_expect_token(p, 623)) // token='try' + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (_loop0_208_var = _loop0_208_rule(p)) // block* + && + (_loop1_209_var = _loop1_209_rule(p)) // except_star_block+ + && + (a = _PyPegen_expect_token(p, 636)) // token='except' + && + (_opt_var = _tmp_210_rule(p), !p->error_indicator) // [expression ['as' NAME]] + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot have both 'except' and 'except*' on the same 'try'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_try_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':'")); } _res = NULL; done: @@ -21970,7 +22194,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty a; expr_ty expressions_var; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && @@ -21980,7 +22204,7 @@ invalid_except_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_opt_var_1 = _tmp_206_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var_1 = _tmp_211_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -22012,13 +22236,13 @@ invalid_except_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && (expression_var = expression_rule(p)) // expression && - (_opt_var_1 = _tmp_207_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var_1 = _tmp_212_rule(p), !p->error_indicator) // ['as' NAME] && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -22045,7 +22269,7 @@ invalid_except_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -22070,14 +22294,14 @@ invalid_except_stmt_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')")); Token * _literal; - void *_tmp_208_var; + void *_tmp_213_var; Token * a; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_208_var = _tmp_208_rule(p)) // NEWLINE | ':' + (_tmp_213_var = _tmp_213_rule(p)) // NEWLINE | ':' ) { D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')")); @@ -22123,7 +22347,7 @@ invalid_finally_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 625)) // token='finally' + (a = _PyPegen_expect_token(p, 632)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22180,11 +22404,11 @@ invalid_except_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_209_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_214_rule(p), !p->error_indicator) // ['as' NAME] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22216,7 +22440,7 @@ invalid_except_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22273,13 +22497,13 @@ invalid_except_star_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 629)) // token='except' + (a = _PyPegen_expect_token(p, 636)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_210_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_215_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22515,7 +22739,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"' ) @@ -22545,7 +22769,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && _PyPegen_lookahead_with_name(0, _PyPegen_name_token, p) && @@ -22648,7 +22872,7 @@ invalid_class_argument_pattern_rule(Parser *p) asdl_pattern_seq* a; asdl_seq* keyword_patterns_var; if ( - (_opt_var = _tmp_211_rule(p), !p->error_indicator) // [positional_patterns ','] + (_opt_var = _tmp_216_rule(p), !p->error_indicator) // [positional_patterns ','] && (keyword_patterns_var = keyword_patterns_rule(p)) // keyword_patterns && @@ -22702,7 +22926,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22733,7 +22957,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty a_1; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 634)) // token='if' + (a = _PyPegen_expect_token(p, 641)) // token='if' && (a_1 = named_expression_rule(p)) // named_expression && @@ -22789,7 +23013,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 643)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22820,7 +23044,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 636)) // token='elif' + (a = _PyPegen_expect_token(p, 643)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22874,7 +23098,7 @@ invalid_else_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 637)) // token='else' + (a = _PyPegen_expect_token(p, 644)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22928,7 +23152,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='while' + (_keyword = _PyPegen_expect_token(p, 646)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22959,7 +23183,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 639)) // token='while' + (a = _PyPegen_expect_token(p, 646)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23021,11 +23245,11 @@ invalid_for_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -23062,11 +23286,11 @@ invalid_for_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 642)) // token='for' + (a = _PyPegen_expect_token(p, 649)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword = _PyPegen_expect_token(p, 643)) // token='in' + (_keyword = _PyPegen_expect_token(p, 650)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -23132,7 +23356,7 @@ invalid_def_raw_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 644)) // token='def' + (a = _PyPegen_expect_token(p, 651)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -23142,7 +23366,7 @@ invalid_def_raw_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (_opt_var_2 = _tmp_212_rule(p), !p->error_indicator) // ['->' expression] + (_opt_var_2 = _tmp_217_rule(p), !p->error_indicator) // ['->' expression] && (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23198,11 +23422,11 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 646)) // token='class' + (_keyword = _PyPegen_expect_token(p, 653)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && - (_opt_var = _tmp_213_rule(p), !p->error_indicator) // ['(' arguments? ')'] + (_opt_var = _tmp_218_rule(p), !p->error_indicator) // ['(' arguments? ')'] && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -23233,11 +23457,11 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 646)) // token='class' + (a = _PyPegen_expect_token(p, 653)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && - (_opt_var = _tmp_214_rule(p), !p->error_indicator) // ['(' arguments? ')'] + (_opt_var = _tmp_219_rule(p), !p->error_indicator) // ['(' arguments? ')'] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23288,11 +23512,11 @@ invalid_double_starred_kvpairs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair")); - asdl_seq * _gather_215_var; + asdl_seq * _gather_220_var; Token * _literal; void *invalid_kvpair_var; if ( - (_gather_215_var = _gather_215_rule(p)) // ','.double_starred_kvpair+ + (_gather_220_var = _gather_220_rule(p)) // ','.double_starred_kvpair+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -23300,7 +23524,7 @@ invalid_double_starred_kvpairs_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair")); - _res = _PyPegen_dummy_name(p, _gather_215_var, _literal, invalid_kvpair_var); + _res = _PyPegen_dummy_name(p, _gather_220_var, _literal, invalid_kvpair_var); goto done; } p->mark = _mark; @@ -23353,7 +23577,7 @@ invalid_double_starred_kvpairs_rule(Parser *p) && (a = _PyPegen_expect_token(p, 11)) // token=':' && - _PyPegen_lookahead(1, _tmp_217_rule, p) + _PyPegen_lookahead(1, _tmp_222_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')")); @@ -23464,7 +23688,7 @@ invalid_kvpair_rule(Parser *p) && (a = _PyPegen_expect_token(p, 11)) // token=':' && - _PyPegen_lookahead(1, _tmp_218_rule, p) + _PyPegen_lookahead(1, _tmp_223_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')")); @@ -23486,6 +23710,59 @@ invalid_kvpair_rule(Parser *p) return _res; } +// invalid_starred_expression: '*' expression '=' expression +static void * +invalid_starred_expression_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '*' expression '=' expression + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_starred_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression '=' expression")); + Token * _literal; + Token * a; + expr_ty b; + expr_ty expression_var; + if ( + (a = _PyPegen_expect_token(p, 16)) // token='*' + && + (expression_var = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ invalid_starred_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression '=' expression")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "cannot assign to iterable argument unpacking" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_starred_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression '=' expression")); + } + _res = NULL; + done: + p->level--; + return _res; +} + // _loop0_1: NEWLINE static asdl_seq * _loop0_1_rule(Parser *p) @@ -23840,7 +24117,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='import' + (_keyword = _PyPegen_expect_token(p, 606)) // token='import' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); @@ -23859,7 +24136,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); @@ -23898,7 +24175,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='def' + (_keyword = _PyPegen_expect_token(p, 651)) // token='def' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); @@ -23975,7 +24252,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 646)) // token='class' + (_keyword = _PyPegen_expect_token(p, 653)) // token='class' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); @@ -24033,7 +24310,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='with' + (_keyword = _PyPegen_expect_token(p, 614)) // token='with' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); @@ -24091,7 +24368,7 @@ _tmp_10_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='for' + (_keyword = _PyPegen_expect_token(p, 649)) // token='for' ) { D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); @@ -24320,12 +24597,12 @@ _loop1_14_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_219_var; + void *_tmp_224_var; while ( - (_tmp_219_var = _tmp_219_rule(p)) // star_targets '=' + (_tmp_224_var = _tmp_224_rule(p)) // star_targets '=' ) { - _res = _tmp_219_var; + _res = _tmp_224_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -24503,7 +24780,7 @@ _tmp_17_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 572)) // token='from' + (_keyword = _PyPegen_expect_token(p, 607)) // token='from' && (z = expression_rule(p)) // expression ) @@ -24902,12 +25179,12 @@ _loop0_24_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_220_var; + void *_tmp_225_var; while ( - (_tmp_220_var = _tmp_220_rule(p)) // '.' | '...' + (_tmp_225_var = _tmp_225_rule(p)) // '.' | '...' ) { - _res = _tmp_220_var; + _res = _tmp_225_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -24971,12 +25248,12 @@ _loop1_25_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_221_var; + void *_tmp_226_var; while ( - (_tmp_221_var = _tmp_221_rule(p)) // '.' | '...' + (_tmp_226_var = _tmp_226_rule(p)) // '.' | '...' ) { - _res = _tmp_221_var; + _res = _tmp_226_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -25158,7 +25435,7 @@ _tmp_28_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -25325,7 +25602,7 @@ _tmp_31_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -25379,12 +25656,12 @@ _loop1_32_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_222_var; + void *_tmp_227_var; while ( - (_tmp_222_var = _tmp_222_rule(p)) // '@' named_expression NEWLINE + (_tmp_227_var = _tmp_227_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_222_var; + _res = _tmp_227_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -27365,7 +27642,7 @@ _tmp_62_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -27412,7 +27689,7 @@ _tmp_63_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -28468,12 +28745,12 @@ _loop1_80_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_223_var; + void *_tmp_228_var; while ( - (_tmp_223_var = _tmp_223_rule(p)) // ',' expression + (_tmp_228_var = _tmp_228_rule(p)) // ',' expression ) { - _res = _tmp_223_var; + _res = _tmp_228_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28542,12 +28819,12 @@ _loop1_81_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_224_var; + void *_tmp_229_var; while ( - (_tmp_224_var = _tmp_224_rule(p)) // ',' star_expression + (_tmp_229_var = _tmp_229_rule(p)) // ',' star_expression ) { - _res = _tmp_224_var; + _res = _tmp_229_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28736,12 +29013,12 @@ _loop1_84_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_225_var; + void *_tmp_230_var; while ( - (_tmp_225_var = _tmp_225_rule(p)) // 'or' conjunction + (_tmp_230_var = _tmp_230_rule(p)) // 'or' conjunction ) { - _res = _tmp_225_var; + _res = _tmp_230_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28810,12 +29087,12 @@ _loop1_85_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_226_var; + void *_tmp_231_var; while ( - (_tmp_226_var = _tmp_226_rule(p)) // 'and' inversion + (_tmp_231_var = _tmp_231_rule(p)) // 'and' inversion ) { - _res = _tmp_226_var; + _res = _tmp_231_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -29007,7 +29284,7 @@ _loop0_89_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_227_rule(p)) // slice | starred_expression + (elem = _tmp_232_rule(p)) // slice | starred_expression ) { _res = elem; @@ -29073,7 +29350,7 @@ _gather_88_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_227_rule(p)) // slice | starred_expression + (elem = _tmp_232_rule(p)) // slice | starred_expression && (seq = _loop0_89_rule(p)) // _loop0_89 ) @@ -30777,12 +31054,12 @@ _loop0_114_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_228_var; + void *_tmp_233_var; while ( - (_tmp_228_var = _tmp_228_rule(p)) // 'if' disjunction + (_tmp_233_var = _tmp_233_rule(p)) // 'if' disjunction ) { - _res = _tmp_228_var; + _res = _tmp_233_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30846,12 +31123,12 @@ _loop0_115_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_229_var; + void *_tmp_234_var; while ( - (_tmp_229_var = _tmp_229_rule(p)) // 'if' disjunction + (_tmp_234_var = _tmp_234_rule(p)) // 'if' disjunction ) { - _res = _tmp_229_var; + _res = _tmp_234_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30980,7 +31257,7 @@ _loop0_118_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_230_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' ) { _res = elem; @@ -31047,7 +31324,7 @@ _gather_117_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_230_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' && (seq = _loop0_118_rule(p)) // _loop0_118 ) @@ -31623,12 +31900,12 @@ _loop0_128_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_231_var; + void *_tmp_236_var; while ( - (_tmp_231_var = _tmp_231_rule(p)) // ',' star_target + (_tmp_236_var = _tmp_236_rule(p)) // ',' star_target ) { - _res = _tmp_231_var; + _res = _tmp_236_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -31812,12 +32089,12 @@ _loop1_131_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_232_var; + void *_tmp_237_var; while ( - (_tmp_232_var = _tmp_232_rule(p)) // ',' star_target + (_tmp_237_var = _tmp_237_rule(p)) // ',' star_target ) { - _res = _tmp_232_var; + _res = _tmp_237_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -32600,9 +32877,109 @@ _tmp_144_rule(Parser *p) return _res; } -// _tmp_145: 'True' | 'False' | 'None' +// _tmp_145: args ',' static void * _tmp_145_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // args ',' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','")); + Token * _literal; + expr_ty args_var; + if ( + (args_var = args_rule(p)) // args + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','")); + _res = _PyPegen_dummy_name(p, args_var, _literal); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_146: ',' | ')' +static void * +_tmp_146_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ',' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); + } + { // ')' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_147: 'True' | 'False' | 'None' +static void * +_tmp_147_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32619,18 +32996,18 @@ _tmp_145_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 600)) // token='True' ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); } { // 'False' @@ -32638,18 +33015,18 @@ _tmp_145_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 602)) // token='False' ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); } { // 'None' @@ -32657,18 +33034,18 @@ _tmp_145_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 601)) // token='None' ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } _res = NULL; @@ -32677,9 +33054,9 @@ _tmp_145_rule(Parser *p) return _res; } -// _tmp_146: NAME '=' +// _tmp_148: NAME '=' static void * -_tmp_146_rule(Parser *p) +_tmp_148_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32696,7 +33073,7 @@ _tmp_146_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='")); + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='")); Token * _literal; expr_ty name_var; if ( @@ -32705,12 +33082,12 @@ _tmp_146_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='")); + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='")); _res = _PyPegen_dummy_name(p, name_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '='")); } _res = NULL; @@ -32719,9 +33096,9 @@ _tmp_146_rule(Parser *p) return _res; } -// _tmp_147: NAME STRING | SOFT_KEYWORD +// _tmp_149: NAME STRING | SOFT_KEYWORD static void * -_tmp_147_rule(Parser *p) +_tmp_149_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32738,7 +33115,7 @@ _tmp_147_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING")); + D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING")); expr_ty name_var; expr_ty string_var; if ( @@ -32747,12 +33124,12 @@ _tmp_147_rule(Parser *p) (string_var = _PyPegen_string_token(p)) // STRING ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING")); + D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING")); _res = _PyPegen_dummy_name(p, name_var, string_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME STRING")); } { // SOFT_KEYWORD @@ -32760,18 +33137,18 @@ _tmp_147_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); + D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); expr_ty soft_keyword_var; if ( (soft_keyword_var = _PyPegen_soft_keyword_token(p)) // SOFT_KEYWORD ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); + D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); _res = soft_keyword_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "SOFT_KEYWORD")); } _res = NULL; @@ -32780,9 +33157,9 @@ _tmp_147_rule(Parser *p) return _res; } -// _tmp_148: 'else' | ':' +// _tmp_150: 'else' | ':' static void * -_tmp_148_rule(Parser *p) +_tmp_150_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32799,18 +33176,18 @@ _tmp_148_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 637)) // token='else' + (_keyword = _PyPegen_expect_token(p, 644)) // token='else' ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else'")); } { // ':' @@ -32818,18 +33195,18 @@ _tmp_148_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -32838,9 +33215,9 @@ _tmp_148_rule(Parser *p) return _res; } -// _tmp_149: '=' | ':=' +// _tmp_151: '=' | ':=' static void * -_tmp_149_rule(Parser *p) +_tmp_151_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32857,18 +33234,18 @@ _tmp_149_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); } { // ':=' @@ -32876,18 +33253,18 @@ _tmp_149_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 53)) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='")); } _res = NULL; @@ -32896,9 +33273,9 @@ _tmp_149_rule(Parser *p) return _res; } -// _tmp_150: list | tuple | genexp | 'True' | 'None' | 'False' +// _tmp_152: list | tuple | genexp | 'True' | 'None' | 'False' static void * -_tmp_150_rule(Parser *p) +_tmp_152_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32915,18 +33292,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); expr_ty list_var; if ( (list_var = list_rule(p)) // list ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); _res = list_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); } { // tuple @@ -32934,18 +33311,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); _res = tuple_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); } { // genexp @@ -32953,18 +33330,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); expr_ty genexp_var; if ( (genexp_var = genexp_rule(p)) // genexp ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); _res = genexp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp")); } { // 'True' @@ -32972,18 +33349,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 600)) // token='True' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); } { // 'None' @@ -32991,18 +33368,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 601)) // token='None' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } { // 'False' @@ -33010,18 +33387,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( (_keyword = _PyPegen_expect_token(p, 602)) // token='False' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); } _res = NULL; @@ -33030,9 +33407,9 @@ _tmp_150_rule(Parser *p) return _res; } -// _tmp_151: '=' | ':=' +// _tmp_153: '=' | ':=' static void * -_tmp_151_rule(Parser *p) +_tmp_153_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33049,18 +33426,18 @@ _tmp_151_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); } { // ':=' @@ -33068,18 +33445,18 @@ _tmp_151_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 53)) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='")); } _res = NULL; @@ -33088,9 +33465,9 @@ _tmp_151_rule(Parser *p) return _res; } -// _loop0_152: star_named_expressions +// _loop0_154: star_named_expressions static asdl_seq * -_loop0_152_rule(Parser *p) +_loop0_154_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33117,7 +33494,7 @@ _loop0_152_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); + D(fprintf(stderr, "%*c> _loop0_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); asdl_expr_seq* star_named_expressions_var; while ( (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions @@ -33139,7 +33516,7 @@ _loop0_152_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33152,14 +33529,14 @@ _loop0_152_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_152_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_154_type, _seq); p->level--; return _seq; } -// _loop0_153: (star_targets '=') +// _loop0_155: (star_targets '=') static asdl_seq * -_loop0_153_rule(Parser *p) +_loop0_155_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33186,13 +33563,13 @@ _loop0_153_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_233_var; + D(fprintf(stderr, "%*c> _loop0_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_238_var; while ( - (_tmp_233_var = _tmp_233_rule(p)) // star_targets '=' + (_tmp_238_var = _tmp_238_rule(p)) // star_targets '=' ) { - _res = _tmp_233_var; + _res = _tmp_238_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -33208,7 +33585,7 @@ _loop0_153_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_153[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_155[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33221,14 +33598,14 @@ _loop0_153_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_153_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_155_type, _seq); p->level--; return _seq; } -// _loop0_154: (star_targets '=') +// _loop0_156: (star_targets '=') static asdl_seq * -_loop0_154_rule(Parser *p) +_loop0_156_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33255,13 +33632,13 @@ _loop0_154_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_234_var; + D(fprintf(stderr, "%*c> _loop0_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_239_var; while ( - (_tmp_234_var = _tmp_234_rule(p)) // star_targets '=' + (_tmp_239_var = _tmp_239_rule(p)) // star_targets '=' ) { - _res = _tmp_234_var; + _res = _tmp_239_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -33277,7 +33654,7 @@ _loop0_154_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_156[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33290,14 +33667,14 @@ _loop0_154_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_154_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_156_type, _seq); p->level--; return _seq; } -// _tmp_155: yield_expr | star_expressions +// _tmp_157: yield_expr | star_expressions static void * -_tmp_155_rule(Parser *p) +_tmp_157_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33314,18 +33691,18 @@ _tmp_155_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -33333,18 +33710,18 @@ _tmp_155_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -33353,9 +33730,9 @@ _tmp_155_rule(Parser *p) return _res; } -// _tmp_156: '[' | '(' | '{' +// _tmp_158: '[' | '(' | '{' static void * -_tmp_156_rule(Parser *p) +_tmp_158_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33372,18 +33749,18 @@ _tmp_156_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '(' @@ -33391,18 +33768,18 @@ _tmp_156_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); } { // '{' @@ -33410,18 +33787,18 @@ _tmp_156_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -33430,9 +33807,9 @@ _tmp_156_rule(Parser *p) return _res; } -// _tmp_157: '[' | '{' +// _tmp_159: '[' | '{' static void * -_tmp_157_rule(Parser *p) +_tmp_159_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33449,18 +33826,18 @@ _tmp_157_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '{' @@ -33468,18 +33845,18 @@ _tmp_157_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -33488,9 +33865,9 @@ _tmp_157_rule(Parser *p) return _res; } -// _tmp_158: '[' | '{' +// _tmp_160: '[' | '{' static void * -_tmp_158_rule(Parser *p) +_tmp_160_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33507,18 +33884,18 @@ _tmp_158_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '{' @@ -33526,18 +33903,18 @@ _tmp_158_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -33546,9 +33923,9 @@ _tmp_158_rule(Parser *p) return _res; } -// _tmp_159: slash_no_default | slash_with_default +// _tmp_161: slash_no_default | slash_with_default static void * -_tmp_159_rule(Parser *p) +_tmp_161_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33565,18 +33942,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); asdl_arg_seq* slash_no_default_var; if ( (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); _res = slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default")); } { // slash_with_default @@ -33584,18 +33961,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } _res = NULL; @@ -33604,9 +33981,9 @@ _tmp_159_rule(Parser *p) return _res; } -// _loop0_160: param_maybe_default +// _loop0_162: param_maybe_default static asdl_seq * -_loop0_160_rule(Parser *p) +_loop0_162_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33633,7 +34010,7 @@ _loop0_160_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -33655,7 +34032,7 @@ _loop0_160_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_160[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_162[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33668,14 +34045,14 @@ _loop0_160_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_160_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_162_type, _seq); p->level--; return _seq; } -// _loop0_161: param_no_default +// _loop0_163: param_no_default static asdl_seq * -_loop0_161_rule(Parser *p) +_loop0_163_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33702,7 +34079,7 @@ _loop0_161_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -33724,7 +34101,7 @@ _loop0_161_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_161[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_163[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33737,14 +34114,14 @@ _loop0_161_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_161_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_163_type, _seq); p->level--; return _seq; } -// _loop0_162: param_no_default +// _loop0_164: param_no_default static asdl_seq * -_loop0_162_rule(Parser *p) +_loop0_164_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33771,7 +34148,7 @@ _loop0_162_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -33793,7 +34170,7 @@ _loop0_162_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_162[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_164[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33806,14 +34183,14 @@ _loop0_162_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_162_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_164_type, _seq); p->level--; return _seq; } -// _loop1_163: param_no_default +// _loop1_165: param_no_default static asdl_seq * -_loop1_163_rule(Parser *p) +_loop1_165_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33840,7 +34217,7 @@ _loop1_163_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -33862,7 +34239,7 @@ _loop1_163_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_163[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_165[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -33880,14 +34257,14 @@ _loop1_163_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_163_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_165_type, _seq); p->level--; return _seq; } -// _tmp_164: slash_no_default | slash_with_default +// _tmp_166: slash_no_default | slash_with_default static void * -_tmp_164_rule(Parser *p) +_tmp_166_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33904,18 +34281,18 @@ _tmp_164_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); asdl_arg_seq* slash_no_default_var; if ( (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); _res = slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default")); } { // slash_with_default @@ -33923,18 +34300,18 @@ _tmp_164_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } _res = NULL; @@ -33943,9 +34320,9 @@ _tmp_164_rule(Parser *p) return _res; } -// _loop0_165: param_maybe_default +// _loop0_167: param_maybe_default static asdl_seq * -_loop0_165_rule(Parser *p) +_loop0_167_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33972,7 +34349,7 @@ _loop0_165_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -33994,7 +34371,7 @@ _loop0_165_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_165[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34007,14 +34384,14 @@ _loop0_165_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_165_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_167_type, _seq); p->level--; return _seq; } -// _tmp_166: ',' | param_no_default +// _tmp_168: ',' | param_no_default static void * -_tmp_166_rule(Parser *p) +_tmp_168_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34031,18 +34408,18 @@ _tmp_166_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // param_no_default @@ -34050,18 +34427,18 @@ _tmp_166_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } _res = NULL; @@ -34070,9 +34447,9 @@ _tmp_166_rule(Parser *p) return _res; } -// _loop0_167: param_maybe_default +// _loop0_169: param_maybe_default static asdl_seq * -_loop0_167_rule(Parser *p) +_loop0_169_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34099,7 +34476,7 @@ _loop0_167_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34121,7 +34498,7 @@ _loop0_167_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_169[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34134,14 +34511,14 @@ _loop0_167_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_167_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_169_type, _seq); p->level--; return _seq; } -// _loop1_168: param_maybe_default +// _loop1_170: param_maybe_default static asdl_seq * -_loop1_168_rule(Parser *p) +_loop1_170_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34168,7 +34545,7 @@ _loop1_168_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34190,7 +34567,7 @@ _loop1_168_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_168[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_170[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -34208,14 +34585,14 @@ _loop1_168_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_168_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_170_type, _seq); p->level--; return _seq; } -// _tmp_169: ')' | ',' +// _tmp_171: ')' | ',' static void * -_tmp_169_rule(Parser *p) +_tmp_171_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34232,18 +34609,18 @@ _tmp_169_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' @@ -34251,18 +34628,18 @@ _tmp_169_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -34271,9 +34648,9 @@ _tmp_169_rule(Parser *p) return _res; } -// _tmp_170: ')' | ',' (')' | '**') +// _tmp_172: ')' | ',' (')' | '**') static void * -_tmp_170_rule(Parser *p) +_tmp_172_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34290,18 +34667,18 @@ _tmp_170_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_170[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_170[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' (')' | '**') @@ -34309,21 +34686,21 @@ _tmp_170_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; - void *_tmp_235_var; + void *_tmp_240_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_235_var = _tmp_235_rule(p)) // ')' | '**' + (_tmp_240_var = _tmp_240_rule(p)) // ')' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_170[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_235_var); + D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_240_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_170[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); } _res = NULL; @@ -34332,9 +34709,9 @@ _tmp_170_rule(Parser *p) return _res; } -// _tmp_171: param_no_default | ',' +// _tmp_173: param_no_default | ',' static void * -_tmp_171_rule(Parser *p) +_tmp_173_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34351,18 +34728,18 @@ _tmp_171_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } { // ',' @@ -34370,18 +34747,18 @@ _tmp_171_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -34390,9 +34767,9 @@ _tmp_171_rule(Parser *p) return _res; } -// _loop0_172: param_maybe_default +// _loop0_174: param_maybe_default static asdl_seq * -_loop0_172_rule(Parser *p) +_loop0_174_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34419,7 +34796,7 @@ _loop0_172_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34441,7 +34818,7 @@ _loop0_172_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_172[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_174[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34454,14 +34831,14 @@ _loop0_172_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_172_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_174_type, _seq); p->level--; return _seq; } -// _tmp_173: param_no_default | ',' +// _tmp_175: param_no_default | ',' static void * -_tmp_173_rule(Parser *p) +_tmp_175_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34478,18 +34855,18 @@ _tmp_173_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } { // ',' @@ -34497,18 +34874,18 @@ _tmp_173_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -34517,9 +34894,9 @@ _tmp_173_rule(Parser *p) return _res; } -// _tmp_174: '*' | '**' | '/' +// _tmp_176: '*' | '**' | '/' static void * -_tmp_174_rule(Parser *p) +_tmp_176_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34536,18 +34913,18 @@ _tmp_174_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); } { // '**' @@ -34555,18 +34932,18 @@ _tmp_174_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } { // '/' @@ -34574,18 +34951,18 @@ _tmp_174_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 17)) // token='/' ) { - D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'")); } _res = NULL; @@ -34594,9 +34971,9 @@ _tmp_174_rule(Parser *p) return _res; } -// _loop1_175: param_with_default +// _loop1_177: param_with_default static asdl_seq * -_loop1_175_rule(Parser *p) +_loop1_177_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34623,7 +35000,7 @@ _loop1_175_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -34645,7 +35022,7 @@ _loop1_175_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_175[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_177[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -34663,14 +35040,14 @@ _loop1_175_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_175_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_177_type, _seq); p->level--; return _seq; } -// _tmp_176: lambda_slash_no_default | lambda_slash_with_default +// _tmp_178: lambda_slash_no_default | lambda_slash_with_default static void * -_tmp_176_rule(Parser *p) +_tmp_178_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34687,18 +35064,18 @@ _tmp_176_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); asdl_arg_seq* lambda_slash_no_default_var; if ( (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); _res = lambda_slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default")); } { // lambda_slash_with_default @@ -34706,18 +35083,18 @@ _tmp_176_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } _res = NULL; @@ -34726,9 +35103,9 @@ _tmp_176_rule(Parser *p) return _res; } -// _loop0_177: lambda_param_maybe_default +// _loop0_179: lambda_param_maybe_default static asdl_seq * -_loop0_177_rule(Parser *p) +_loop0_179_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34755,7 +35132,7 @@ _loop0_177_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -34777,7 +35154,7 @@ _loop0_177_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_177[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34790,14 +35167,14 @@ _loop0_177_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_177_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_179_type, _seq); p->level--; return _seq; } -// _loop0_178: lambda_param_no_default +// _loop0_180: lambda_param_no_default static asdl_seq * -_loop0_178_rule(Parser *p) +_loop0_180_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34824,7 +35201,7 @@ _loop0_178_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -34846,7 +35223,7 @@ _loop0_178_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_178[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_180[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34859,14 +35236,14 @@ _loop0_178_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_178_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_180_type, _seq); p->level--; return _seq; } -// _loop0_179: lambda_param_no_default +// _loop0_181: lambda_param_no_default static asdl_seq * -_loop0_179_rule(Parser *p) +_loop0_181_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34893,7 +35270,7 @@ _loop0_179_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -34915,7 +35292,7 @@ _loop0_179_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_181[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34928,14 +35305,14 @@ _loop0_179_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_179_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_181_type, _seq); p->level--; return _seq; } -// _loop0_181: ',' lambda_param +// _loop0_183: ',' lambda_param static asdl_seq * -_loop0_181_rule(Parser *p) +_loop0_183_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34962,7 +35339,7 @@ _loop0_181_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param")); + D(fprintf(stderr, "%*c> _loop0_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param")); Token * _literal; arg_ty elem; while ( @@ -34993,7 +35370,7 @@ _loop0_181_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_181[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_183[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' lambda_param")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35006,14 +35383,14 @@ _loop0_181_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_181_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_183_type, _seq); p->level--; return _seq; } -// _gather_180: lambda_param _loop0_181 +// _gather_182: lambda_param _loop0_183 static asdl_seq * -_gather_180_rule(Parser *p) +_gather_182_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35025,27 +35402,27 @@ _gather_180_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // lambda_param _loop0_181 + { // lambda_param _loop0_183 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_181")); + D(fprintf(stderr, "%*c> _gather_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183")); arg_ty elem; asdl_seq * seq; if ( (elem = lambda_param_rule(p)) // lambda_param && - (seq = _loop0_181_rule(p)) // _loop0_181 + (seq = _loop0_183_rule(p)) // _loop0_183 ) { - D(fprintf(stderr, "%*c+ _gather_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_181")); + D(fprintf(stderr, "%*c+ _gather_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_180[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_181")); + D(fprintf(stderr, "%*c%s _gather_182[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_183")); } _res = NULL; done: @@ -35053,9 +35430,9 @@ _gather_180_rule(Parser *p) return _res; } -// _tmp_182: lambda_slash_no_default | lambda_slash_with_default +// _tmp_184: lambda_slash_no_default | lambda_slash_with_default static void * -_tmp_182_rule(Parser *p) +_tmp_184_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35072,18 +35449,18 @@ _tmp_182_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); asdl_arg_seq* lambda_slash_no_default_var; if ( (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); _res = lambda_slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_182[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default")); } { // lambda_slash_with_default @@ -35091,18 +35468,18 @@ _tmp_182_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_182[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } _res = NULL; @@ -35111,9 +35488,9 @@ _tmp_182_rule(Parser *p) return _res; } -// _loop0_183: lambda_param_maybe_default +// _loop0_185: lambda_param_maybe_default static asdl_seq * -_loop0_183_rule(Parser *p) +_loop0_185_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35140,7 +35517,7 @@ _loop0_183_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35162,7 +35539,7 @@ _loop0_183_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_183[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35175,14 +35552,14 @@ _loop0_183_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_183_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_185_type, _seq); p->level--; return _seq; } -// _tmp_184: ',' | lambda_param_no_default +// _tmp_186: ',' | lambda_param_no_default static void * -_tmp_184_rule(Parser *p) +_tmp_186_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35199,18 +35576,18 @@ _tmp_184_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // lambda_param_no_default @@ -35218,18 +35595,18 @@ _tmp_184_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } _res = NULL; @@ -35238,9 +35615,9 @@ _tmp_184_rule(Parser *p) return _res; } -// _loop0_185: lambda_param_maybe_default +// _loop0_187: lambda_param_maybe_default static asdl_seq * -_loop0_185_rule(Parser *p) +_loop0_187_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35267,7 +35644,7 @@ _loop0_185_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35289,7 +35666,7 @@ _loop0_185_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_187[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35302,14 +35679,14 @@ _loop0_185_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_185_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_187_type, _seq); p->level--; return _seq; } -// _loop1_186: lambda_param_maybe_default +// _loop1_188: lambda_param_maybe_default static asdl_seq * -_loop1_186_rule(Parser *p) +_loop1_188_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35336,7 +35713,7 @@ _loop1_186_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35358,7 +35735,7 @@ _loop1_186_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_186[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_188[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -35376,14 +35753,14 @@ _loop1_186_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_186_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_188_type, _seq); p->level--; return _seq; } -// _loop1_187: lambda_param_with_default +// _loop1_189: lambda_param_with_default static asdl_seq * -_loop1_187_rule(Parser *p) +_loop1_189_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35410,7 +35787,7 @@ _loop1_187_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop1_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -35432,7 +35809,7 @@ _loop1_187_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_187[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_189[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -35450,14 +35827,14 @@ _loop1_187_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_187_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_189_type, _seq); p->level--; return _seq; } -// _tmp_188: ':' | ',' (':' | '**') +// _tmp_190: ':' | ',' (':' | '**') static void * -_tmp_188_rule(Parser *p) +_tmp_190_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35474,18 +35851,18 @@ _tmp_188_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_188[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_188[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // ',' (':' | '**') @@ -35493,21 +35870,21 @@ _tmp_188_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; - void *_tmp_236_var; + void *_tmp_241_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_236_var = _tmp_236_rule(p)) // ':' | '**' + (_tmp_241_var = _tmp_241_rule(p)) // ':' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_188[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_236_var); + D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_241_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_188[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); } _res = NULL; @@ -35516,9 +35893,9 @@ _tmp_188_rule(Parser *p) return _res; } -// _tmp_189: lambda_param_no_default | ',' +// _tmp_191: lambda_param_no_default | ',' static void * -_tmp_189_rule(Parser *p) +_tmp_191_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35535,18 +35912,18 @@ _tmp_189_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } { // ',' @@ -35554,18 +35931,18 @@ _tmp_189_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -35574,9 +35951,9 @@ _tmp_189_rule(Parser *p) return _res; } -// _loop0_190: lambda_param_maybe_default +// _loop0_192: lambda_param_maybe_default static asdl_seq * -_loop0_190_rule(Parser *p) +_loop0_192_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35603,7 +35980,7 @@ _loop0_190_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35625,7 +36002,7 @@ _loop0_190_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_190[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_192[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35638,14 +36015,14 @@ _loop0_190_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_190_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_192_type, _seq); p->level--; return _seq; } -// _tmp_191: lambda_param_no_default | ',' +// _tmp_193: lambda_param_no_default | ',' static void * -_tmp_191_rule(Parser *p) +_tmp_193_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35662,18 +36039,18 @@ _tmp_191_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } { // ',' @@ -35681,18 +36058,18 @@ _tmp_191_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -35701,9 +36078,9 @@ _tmp_191_rule(Parser *p) return _res; } -// _tmp_192: '*' | '**' | '/' +// _tmp_194: '*' | '**' | '/' static void * -_tmp_192_rule(Parser *p) +_tmp_194_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35720,18 +36097,18 @@ _tmp_192_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - D(fprintf(stderr, "%*c+ _tmp_192[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_192[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); } { // '**' @@ -35739,18 +36116,18 @@ _tmp_192_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_192[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_192[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } { // '/' @@ -35758,18 +36135,18 @@ _tmp_192_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 17)) // token='/' ) { - D(fprintf(stderr, "%*c+ _tmp_192[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_192[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'")); } _res = NULL; @@ -35778,9 +36155,9 @@ _tmp_192_rule(Parser *p) return _res; } -// _tmp_193: ',' | ')' | ':' +// _tmp_195: ',' | ')' | ':' static void * -_tmp_193_rule(Parser *p) +_tmp_195_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35797,18 +36174,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // ')' @@ -35816,18 +36193,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ':' @@ -35835,18 +36212,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -35855,9 +36232,9 @@ _tmp_193_rule(Parser *p) return _res; } -// _loop0_195: ',' (expression ['as' star_target]) +// _loop0_197: ',' (expression ['as' star_target]) static asdl_seq * -_loop0_195_rule(Parser *p) +_loop0_197_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35884,13 +36261,13 @@ _loop0_195_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_237_rule(p)) // expression ['as' star_target] + (elem = _tmp_242_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -35915,7 +36292,7 @@ _loop0_195_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_195[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35928,14 +36305,14 @@ _loop0_195_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_195_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_197_type, _seq); p->level--; return _seq; } -// _gather_194: (expression ['as' star_target]) _loop0_195 +// _gather_196: (expression ['as' star_target]) _loop0_197 static asdl_seq * -_gather_194_rule(Parser *p) +_gather_196_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35947,27 +36324,27 @@ _gather_194_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expression ['as' star_target]) _loop0_195 + { // (expression ['as' star_target]) _loop0_197 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_195")); + D(fprintf(stderr, "%*c> _gather_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_237_rule(p)) // expression ['as' star_target] + (elem = _tmp_242_rule(p)) // expression ['as' star_target] && - (seq = _loop0_195_rule(p)) // _loop0_195 + (seq = _loop0_197_rule(p)) // _loop0_197 ) { - D(fprintf(stderr, "%*c+ _gather_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_195")); + D(fprintf(stderr, "%*c+ _gather_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_194[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_195")); + D(fprintf(stderr, "%*c%s _gather_196[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_197")); } _res = NULL; done: @@ -35975,9 +36352,9 @@ _gather_194_rule(Parser *p) return _res; } -// _loop0_197: ',' (expressions ['as' star_target]) +// _loop0_199: ',' (expressions ['as' star_target]) static asdl_seq * -_loop0_197_rule(Parser *p) +_loop0_199_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36004,13 +36381,13 @@ _loop0_197_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_238_rule(p)) // expressions ['as' star_target] + (elem = _tmp_243_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -36035,7 +36412,7 @@ _loop0_197_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_199[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36048,14 +36425,14 @@ _loop0_197_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_197_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_199_type, _seq); p->level--; return _seq; } -// _gather_196: (expressions ['as' star_target]) _loop0_197 +// _gather_198: (expressions ['as' star_target]) _loop0_199 static asdl_seq * -_gather_196_rule(Parser *p) +_gather_198_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36067,27 +36444,27 @@ _gather_196_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expressions ['as' star_target]) _loop0_197 + { // (expressions ['as' star_target]) _loop0_199 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_197")); + D(fprintf(stderr, "%*c> _gather_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_238_rule(p)) // expressions ['as' star_target] + (elem = _tmp_243_rule(p)) // expressions ['as' star_target] && - (seq = _loop0_197_rule(p)) // _loop0_197 + (seq = _loop0_199_rule(p)) // _loop0_199 ) { - D(fprintf(stderr, "%*c+ _gather_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_197")); + D(fprintf(stderr, "%*c+ _gather_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_196[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_197")); + D(fprintf(stderr, "%*c%s _gather_198[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_199")); } _res = NULL; done: @@ -36095,9 +36472,9 @@ _gather_196_rule(Parser *p) return _res; } -// _loop0_199: ',' (expression ['as' star_target]) +// _loop0_201: ',' (expression ['as' star_target]) static asdl_seq * -_loop0_199_rule(Parser *p) +_loop0_201_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36124,13 +36501,13 @@ _loop0_199_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_239_rule(p)) // expression ['as' star_target] + (elem = _tmp_244_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -36155,7 +36532,7 @@ _loop0_199_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_199[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_201[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36168,14 +36545,14 @@ _loop0_199_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_199_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_201_type, _seq); p->level--; return _seq; } -// _gather_198: (expression ['as' star_target]) _loop0_199 +// _gather_200: (expression ['as' star_target]) _loop0_201 static asdl_seq * -_gather_198_rule(Parser *p) +_gather_200_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36187,27 +36564,27 @@ _gather_198_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expression ['as' star_target]) _loop0_199 + { // (expression ['as' star_target]) _loop0_201 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_199")); + D(fprintf(stderr, "%*c> _gather_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_239_rule(p)) // expression ['as' star_target] + (elem = _tmp_244_rule(p)) // expression ['as' star_target] && - (seq = _loop0_199_rule(p)) // _loop0_199 + (seq = _loop0_201_rule(p)) // _loop0_201 ) { - D(fprintf(stderr, "%*c+ _gather_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_199")); + D(fprintf(stderr, "%*c+ _gather_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_198[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_199")); + D(fprintf(stderr, "%*c%s _gather_200[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_201")); } _res = NULL; done: @@ -36215,9 +36592,9 @@ _gather_198_rule(Parser *p) return _res; } -// _loop0_201: ',' (expressions ['as' star_target]) +// _loop0_203: ',' (expressions ['as' star_target]) static asdl_seq * -_loop0_201_rule(Parser *p) +_loop0_203_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36244,13 +36621,13 @@ _loop0_201_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_240_rule(p)) // expressions ['as' star_target] + (elem = _tmp_245_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -36275,7 +36652,7 @@ _loop0_201_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_201[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_203[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36288,14 +36665,14 @@ _loop0_201_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_201_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_203_type, _seq); p->level--; return _seq; } -// _gather_200: (expressions ['as' star_target]) _loop0_201 +// _gather_202: (expressions ['as' star_target]) _loop0_203 static asdl_seq * -_gather_200_rule(Parser *p) +_gather_202_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36307,27 +36684,27 @@ _gather_200_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expressions ['as' star_target]) _loop0_201 + { // (expressions ['as' star_target]) _loop0_203 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_201")); + D(fprintf(stderr, "%*c> _gather_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_240_rule(p)) // expressions ['as' star_target] + (elem = _tmp_245_rule(p)) // expressions ['as' star_target] && - (seq = _loop0_201_rule(p)) // _loop0_201 + (seq = _loop0_203_rule(p)) // _loop0_203 ) { - D(fprintf(stderr, "%*c+ _gather_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_201")); + D(fprintf(stderr, "%*c+ _gather_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_200[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_201")); + D(fprintf(stderr, "%*c%s _gather_202[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_203")); } _res = NULL; done: @@ -36335,9 +36712,9 @@ _gather_200_rule(Parser *p) return _res; } -// _tmp_202: 'except' | 'finally' +// _tmp_204: 'except' | 'finally' static void * -_tmp_202_rule(Parser *p) +_tmp_204_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36354,18 +36731,18 @@ _tmp_202_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); + D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 629)) // token='except' + (_keyword = _PyPegen_expect_token(p, 636)) // token='except' ) { - D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); + D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except'")); } { // 'finally' @@ -36373,18 +36750,18 @@ _tmp_202_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); + D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 632)) // token='finally' ) { - D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); + D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally'")); } _res = NULL; @@ -36393,9 +36770,9 @@ _tmp_202_rule(Parser *p) return _res; } -// _loop0_203: block +// _loop0_205: block static asdl_seq * -_loop0_203_rule(Parser *p) +_loop0_205_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36422,7 +36799,7 @@ _loop0_203_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); + D(fprintf(stderr, "%*c> _loop0_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); asdl_stmt_seq* block_var; while ( (block_var = block_rule(p)) // block @@ -36444,7 +36821,7 @@ _loop0_203_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_203[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_205[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36457,14 +36834,14 @@ _loop0_203_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_203_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_205_type, _seq); p->level--; return _seq; } -// _tmp_204: (except_block+ except_star_block) | (except_star_block+ except_block) -static void * -_tmp_204_rule(Parser *p) +// _loop1_206: except_block +static asdl_seq * +_loop1_206_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36474,45 +36851,103 @@ _tmp_204_rule(Parser *p) p->level--; return NULL; } - void * _res = NULL; + void *_res = NULL; int _mark = p->mark; - { // (except_block+ except_star_block) + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // except_block if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(except_block+ except_star_block)")); - void *_tmp_241_var; - if ( - (_tmp_241_var = _tmp_241_rule(p)) // except_block+ except_star_block + D(fprintf(stderr, "%*c> _loop1_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); + excepthandler_ty except_block_var; + while ( + (except_block_var = except_block_rule(p)) // except_block ) { - D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(except_block+ except_star_block)")); - _res = _tmp_241_var; - goto done; + _res = except_block_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(except_block+ except_star_block)")); + D(fprintf(stderr, "%*c%s _loop1_206[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + p->level--; + return NULL; } - { // (except_star_block+ except_block) + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_206_type, _seq); + p->level--; + return _seq; +} + +// _tmp_207: 'as' NAME +static void * +_tmp_207_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'as' NAME if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(except_star_block+ except_block)")); - void *_tmp_242_var; + D(fprintf(stderr, "%*c> _tmp_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + Token * _keyword; + expr_ty name_var; if ( - (_tmp_242_var = _tmp_242_rule(p)) // except_star_block+ except_block + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + && + (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(except_star_block+ except_block)")); - _res = _tmp_242_var; + D(fprintf(stderr, "%*c+ _tmp_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(except_star_block+ except_block)")); + D(fprintf(stderr, "%*c%s _tmp_207[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; done: @@ -36520,9 +36955,9 @@ _tmp_204_rule(Parser *p) return _res; } -// _loop0_205: block +// _loop0_208: block static asdl_seq * -_loop0_205_rule(Parser *p) +_loop0_208_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36549,7 +36984,7 @@ _loop0_205_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); + D(fprintf(stderr, "%*c> _loop0_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); asdl_stmt_seq* block_var; while ( (block_var = block_rule(p)) // block @@ -36571,7 +37006,7 @@ _loop0_205_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_205[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_208[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36584,14 +37019,131 @@ _loop0_205_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_205_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_208_type, _seq); p->level--; return _seq; } -// _tmp_206: 'as' NAME +// _loop1_209: except_star_block +static asdl_seq * +_loop1_209_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // except_star_block + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); + excepthandler_ty except_star_block_var; + while ( + (except_star_block_var = except_star_block_rule(p)) // except_star_block + ) + { + _res = except_star_block_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_209[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + p->level--; + return NULL; + } + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_209_type, _seq); + p->level--; + return _seq; +} + +// _tmp_210: expression ['as' NAME] static void * -_tmp_206_rule(Parser *p) +_tmp_210_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // expression ['as' NAME] + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_246_rule(p), !p->error_indicator) // ['as' NAME] + ) + { + D(fprintf(stderr, "%*c+ _tmp_210[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); + _res = _PyPegen_dummy_name(p, expression_var, _opt_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_210[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' NAME]")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_211: 'as' NAME +static void * +_tmp_211_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36608,21 +37160,21 @@ _tmp_206_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_206[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_211[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_206[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_211[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -36631,9 +37183,9 @@ _tmp_206_rule(Parser *p) return _res; } -// _tmp_207: 'as' NAME +// _tmp_212: 'as' NAME static void * -_tmp_207_rule(Parser *p) +_tmp_212_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36650,21 +37202,21 @@ _tmp_207_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_207[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -36673,9 +37225,9 @@ _tmp_207_rule(Parser *p) return _res; } -// _tmp_208: NEWLINE | ':' +// _tmp_213: NEWLINE | ':' static void * -_tmp_208_rule(Parser *p) +_tmp_213_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36692,18 +37244,18 @@ _tmp_208_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_208[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); _res = newline_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_208[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); } { // ':' @@ -36711,18 +37263,18 @@ _tmp_208_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_208[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_208[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -36731,9 +37283,9 @@ _tmp_208_rule(Parser *p) return _res; } -// _tmp_209: 'as' NAME +// _tmp_214: 'as' NAME static void * -_tmp_209_rule(Parser *p) +_tmp_214_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36750,21 +37302,21 @@ _tmp_209_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_209[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_209[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_214[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -36773,9 +37325,9 @@ _tmp_209_rule(Parser *p) return _res; } -// _tmp_210: 'as' NAME +// _tmp_215: 'as' NAME static void * -_tmp_210_rule(Parser *p) +_tmp_215_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36792,21 +37344,21 @@ _tmp_210_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_210[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_210[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_215[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -36815,9 +37367,9 @@ _tmp_210_rule(Parser *p) return _res; } -// _tmp_211: positional_patterns ',' +// _tmp_216: positional_patterns ',' static void * -_tmp_211_rule(Parser *p) +_tmp_216_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36834,7 +37386,7 @@ _tmp_211_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); + D(fprintf(stderr, "%*c> _tmp_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); Token * _literal; asdl_pattern_seq* positional_patterns_var; if ( @@ -36843,12 +37395,12 @@ _tmp_211_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_211[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); + D(fprintf(stderr, "%*c+ _tmp_216[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); _res = _PyPegen_dummy_name(p, positional_patterns_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_211[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_216[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "positional_patterns ','")); } _res = NULL; @@ -36857,9 +37409,9 @@ _tmp_211_rule(Parser *p) return _res; } -// _tmp_212: '->' expression +// _tmp_217: '->' expression static void * -_tmp_212_rule(Parser *p) +_tmp_217_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36876,7 +37428,7 @@ _tmp_212_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty expression_var; if ( @@ -36885,12 +37437,12 @@ _tmp_212_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = _PyPegen_dummy_name(p, _literal, expression_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; @@ -36899,9 +37451,9 @@ _tmp_212_rule(Parser *p) return _res; } -// _tmp_213: '(' arguments? ')' +// _tmp_218: '(' arguments? ')' static void * -_tmp_213_rule(Parser *p) +_tmp_218_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36918,7 +37470,7 @@ _tmp_213_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *_opt_var; @@ -36931,12 +37483,12 @@ _tmp_213_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -36945,9 +37497,9 @@ _tmp_213_rule(Parser *p) return _res; } -// _tmp_214: '(' arguments? ')' +// _tmp_219: '(' arguments? ')' static void * -_tmp_214_rule(Parser *p) +_tmp_219_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36964,7 +37516,7 @@ _tmp_214_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *_opt_var; @@ -36977,12 +37529,12 @@ _tmp_214_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_214[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -36991,9 +37543,9 @@ _tmp_214_rule(Parser *p) return _res; } -// _loop0_216: ',' double_starred_kvpair +// _loop0_221: ',' double_starred_kvpair static asdl_seq * -_loop0_216_rule(Parser *p) +_loop0_221_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37020,7 +37572,7 @@ _loop0_216_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); + D(fprintf(stderr, "%*c> _loop0_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); Token * _literal; KeyValuePair* elem; while ( @@ -37051,7 +37603,7 @@ _loop0_216_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_216[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_221[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -37064,14 +37616,14 @@ _loop0_216_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_216_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_221_type, _seq); p->level--; return _seq; } -// _gather_215: double_starred_kvpair _loop0_216 +// _gather_220: double_starred_kvpair _loop0_221 static asdl_seq * -_gather_215_rule(Parser *p) +_gather_220_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37083,27 +37635,27 @@ _gather_215_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // double_starred_kvpair _loop0_216 + { // double_starred_kvpair _loop0_221 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_216")); + D(fprintf(stderr, "%*c> _gather_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221")); KeyValuePair* elem; asdl_seq * seq; if ( (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair && - (seq = _loop0_216_rule(p)) // _loop0_216 + (seq = _loop0_221_rule(p)) // _loop0_221 ) { - D(fprintf(stderr, "%*c+ _gather_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_216")); + D(fprintf(stderr, "%*c+ _gather_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_215[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_216")); + D(fprintf(stderr, "%*c%s _gather_220[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_221")); } _res = NULL; done: @@ -37111,9 +37663,9 @@ _gather_215_rule(Parser *p) return _res; } -// _tmp_217: '}' | ',' +// _tmp_222: '}' | ',' static void * -_tmp_217_rule(Parser *p) +_tmp_222_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37130,18 +37682,18 @@ _tmp_217_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } { // ',' @@ -37149,18 +37701,18 @@ _tmp_217_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -37169,9 +37721,9 @@ _tmp_217_rule(Parser *p) return _res; } -// _tmp_218: '}' | ',' +// _tmp_223: '}' | ',' static void * -_tmp_218_rule(Parser *p) +_tmp_223_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37188,18 +37740,18 @@ _tmp_218_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } { // ',' @@ -37207,18 +37759,18 @@ _tmp_218_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -37227,9 +37779,9 @@ _tmp_218_rule(Parser *p) return _res; } -// _tmp_219: star_targets '=' +// _tmp_224: star_targets '=' static void * -_tmp_219_rule(Parser *p) +_tmp_224_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37246,7 +37798,7 @@ _tmp_219_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -37255,7 +37807,7 @@ _tmp_219_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37265,7 +37817,7 @@ _tmp_219_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -37274,9 +37826,9 @@ _tmp_219_rule(Parser *p) return _res; } -// _tmp_220: '.' | '...' +// _tmp_225: '.' | '...' static void * -_tmp_220_rule(Parser *p) +_tmp_225_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37293,18 +37845,18 @@ _tmp_220_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_220[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -37312,18 +37864,18 @@ _tmp_220_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_220[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -37332,9 +37884,9 @@ _tmp_220_rule(Parser *p) return _res; } -// _tmp_221: '.' | '...' +// _tmp_226: '.' | '...' static void * -_tmp_221_rule(Parser *p) +_tmp_226_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37351,18 +37903,18 @@ _tmp_221_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_221[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_221[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -37370,18 +37922,18 @@ _tmp_221_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_221[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_221[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -37390,9 +37942,9 @@ _tmp_221_rule(Parser *p) return _res; } -// _tmp_222: '@' named_expression NEWLINE +// _tmp_227: '@' named_expression NEWLINE static void * -_tmp_222_rule(Parser *p) +_tmp_227_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37409,7 +37961,7 @@ _tmp_222_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -37421,7 +37973,7 @@ _tmp_222_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37431,7 +37983,7 @@ _tmp_222_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; @@ -37440,9 +37992,9 @@ _tmp_222_rule(Parser *p) return _res; } -// _tmp_223: ',' expression +// _tmp_228: ',' expression static void * -_tmp_223_rule(Parser *p) +_tmp_228_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37459,7 +38011,7 @@ _tmp_223_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -37468,7 +38020,7 @@ _tmp_223_rule(Parser *p) (c = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37478,7 +38030,7 @@ _tmp_223_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -37487,9 +38039,9 @@ _tmp_223_rule(Parser *p) return _res; } -// _tmp_224: ',' star_expression +// _tmp_229: ',' star_expression static void * -_tmp_224_rule(Parser *p) +_tmp_229_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37506,7 +38058,7 @@ _tmp_224_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -37515,7 +38067,7 @@ _tmp_224_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { - D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37525,7 +38077,7 @@ _tmp_224_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; @@ -37534,9 +38086,9 @@ _tmp_224_rule(Parser *p) return _res; } -// _tmp_225: 'or' conjunction +// _tmp_230: 'or' conjunction static void * -_tmp_225_rule(Parser *p) +_tmp_230_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37553,7 +38105,7 @@ _tmp_225_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -37562,7 +38114,7 @@ _tmp_225_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { - D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37572,7 +38124,7 @@ _tmp_225_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; @@ -37581,9 +38133,9 @@ _tmp_225_rule(Parser *p) return _res; } -// _tmp_226: 'and' inversion +// _tmp_231: 'and' inversion static void * -_tmp_226_rule(Parser *p) +_tmp_231_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37600,7 +38152,7 @@ _tmp_226_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -37609,7 +38161,7 @@ _tmp_226_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { - D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37619,7 +38171,7 @@ _tmp_226_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; @@ -37628,9 +38180,9 @@ _tmp_226_rule(Parser *p) return _res; } -// _tmp_227: slice | starred_expression +// _tmp_232: slice | starred_expression static void * -_tmp_227_rule(Parser *p) +_tmp_232_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37647,18 +38199,18 @@ _tmp_227_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); expr_ty slice_var; if ( (slice_var = slice_rule(p)) // slice ) { - D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); _res = slice_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice")); } { // starred_expression @@ -37666,18 +38218,18 @@ _tmp_227_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } _res = NULL; @@ -37686,9 +38238,9 @@ _tmp_227_rule(Parser *p) return _res; } -// _tmp_228: 'if' disjunction +// _tmp_233: 'if' disjunction static void * -_tmp_228_rule(Parser *p) +_tmp_233_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37705,16 +38257,16 @@ _tmp_228_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37724,7 +38276,7 @@ _tmp_228_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -37733,9 +38285,9 @@ _tmp_228_rule(Parser *p) return _res; } -// _tmp_229: 'if' disjunction +// _tmp_234: 'if' disjunction static void * -_tmp_229_rule(Parser *p) +_tmp_234_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37752,16 +38304,16 @@ _tmp_229_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='if' + (_keyword = _PyPegen_expect_token(p, 641)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37771,7 +38323,7 @@ _tmp_229_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -37780,9 +38332,9 @@ _tmp_229_rule(Parser *p) return _res; } -// _tmp_230: starred_expression | (assignment_expression | expression !':=') !'=' +// _tmp_235: starred_expression | (assignment_expression | expression !':=') !'=' static void * -_tmp_230_rule(Parser *p) +_tmp_235_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37799,18 +38351,18 @@ _tmp_230_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } { // (assignment_expression | expression !':=') !'=' @@ -37818,20 +38370,20 @@ _tmp_230_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - void *_tmp_243_var; + D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + void *_tmp_247_var; if ( - (_tmp_243_var = _tmp_243_rule(p)) // assignment_expression | expression !':=' + (_tmp_247_var = _tmp_247_rule(p)) // assignment_expression | expression !':=' && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - _res = _tmp_243_var; + D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + _res = _tmp_247_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='")); } _res = NULL; @@ -37840,9 +38392,9 @@ _tmp_230_rule(Parser *p) return _res; } -// _tmp_231: ',' star_target +// _tmp_236: ',' star_target static void * -_tmp_231_rule(Parser *p) +_tmp_236_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37859,7 +38411,7 @@ _tmp_231_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -37868,7 +38420,7 @@ _tmp_231_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37878,7 +38430,7 @@ _tmp_231_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -37887,9 +38439,9 @@ _tmp_231_rule(Parser *p) return _res; } -// _tmp_232: ',' star_target +// _tmp_237: ',' star_target static void * -_tmp_232_rule(Parser *p) +_tmp_237_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37906,7 +38458,7 @@ _tmp_232_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -37915,7 +38467,7 @@ _tmp_232_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37925,7 +38477,7 @@ _tmp_232_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -37934,9 +38486,9 @@ _tmp_232_rule(Parser *p) return _res; } -// _tmp_233: star_targets '=' +// _tmp_238: star_targets '=' static void * -_tmp_233_rule(Parser *p) +_tmp_238_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37953,7 +38505,7 @@ _tmp_233_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -37962,12 +38514,12 @@ _tmp_233_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -37976,9 +38528,9 @@ _tmp_233_rule(Parser *p) return _res; } -// _tmp_234: star_targets '=' +// _tmp_239: star_targets '=' static void * -_tmp_234_rule(Parser *p) +_tmp_239_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37995,7 +38547,7 @@ _tmp_234_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -38004,12 +38556,12 @@ _tmp_234_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -38018,9 +38570,9 @@ _tmp_234_rule(Parser *p) return _res; } -// _tmp_235: ')' | '**' +// _tmp_240: ')' | '**' static void * -_tmp_235_rule(Parser *p) +_tmp_240_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38037,18 +38589,18 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' @@ -38056,18 +38608,18 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -38076,9 +38628,9 @@ _tmp_235_rule(Parser *p) return _res; } -// _tmp_236: ':' | '**' +// _tmp_241: ':' | '**' static void * -_tmp_236_rule(Parser *p) +_tmp_241_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38095,18 +38647,18 @@ _tmp_236_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' @@ -38114,18 +38666,18 @@ _tmp_236_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -38134,9 +38686,9 @@ _tmp_236_rule(Parser *p) return _res; } -// _tmp_237: expression ['as' star_target] +// _tmp_242: expression ['as' star_target] static void * -_tmp_237_rule(Parser *p) +_tmp_242_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38153,22 +38705,22 @@ _tmp_237_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_244_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_248_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -38177,9 +38729,9 @@ _tmp_237_rule(Parser *p) return _res; } -// _tmp_238: expressions ['as' star_target] +// _tmp_243: expressions ['as' star_target] static void * -_tmp_238_rule(Parser *p) +_tmp_243_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38196,22 +38748,22 @@ _tmp_238_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_245_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_249_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -38220,9 +38772,9 @@ _tmp_238_rule(Parser *p) return _res; } -// _tmp_239: expression ['as' star_target] +// _tmp_244: expression ['as' star_target] static void * -_tmp_239_rule(Parser *p) +_tmp_244_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38239,22 +38791,22 @@ _tmp_239_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_246_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_250_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -38263,9 +38815,9 @@ _tmp_239_rule(Parser *p) return _res; } -// _tmp_240: expressions ['as' star_target] +// _tmp_245: expressions ['as' star_target] static void * -_tmp_240_rule(Parser *p) +_tmp_245_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38282,22 +38834,22 @@ _tmp_240_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_247_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_251_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -38306,51 +38858,9 @@ _tmp_240_rule(Parser *p) return _res; } -// _tmp_241: except_block+ except_star_block -static void * -_tmp_241_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // except_block+ except_star_block - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block+ except_star_block")); - asdl_seq * _loop1_248_var; - excepthandler_ty except_star_block_var; - if ( - (_loop1_248_var = _loop1_248_rule(p)) // except_block+ - && - (except_star_block_var = except_star_block_rule(p)) // except_star_block - ) - { - D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "except_block+ except_star_block")); - _res = _PyPegen_dummy_name(p, _loop1_248_var, except_star_block_var); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block+ except_star_block")); - } - _res = NULL; - done: - p->level--; - return _res; -} - -// _tmp_242: except_star_block+ except_block +// _tmp_246: 'as' NAME static void * -_tmp_242_rule(Parser *p) +_tmp_246_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38362,27 +38872,27 @@ _tmp_242_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // except_star_block+ except_block + { // 'as' NAME if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block+ except_block")); - asdl_seq * _loop1_249_var; - excepthandler_ty except_block_var; + D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + Token * _keyword; + expr_ty name_var; if ( - (_loop1_249_var = _loop1_249_rule(p)) // except_star_block+ + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && - (except_block_var = except_block_rule(p)) // except_block + (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "except_star_block+ except_block")); - _res = _PyPegen_dummy_name(p, _loop1_249_var, except_block_var); + D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block+ except_block")); + D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; done: @@ -38390,9 +38900,9 @@ _tmp_242_rule(Parser *p) return _res; } -// _tmp_243: assignment_expression | expression !':=' +// _tmp_247: assignment_expression | expression !':=' static void * -_tmp_243_rule(Parser *p) +_tmp_247_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38409,18 +38919,18 @@ _tmp_243_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); expr_ty assignment_expression_var; if ( (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression ) { - D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); _res = assignment_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); } { // expression !':=' @@ -38428,7 +38938,7 @@ _tmp_243_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -38436,12 +38946,12 @@ _tmp_243_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); _res = expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } _res = NULL; @@ -38450,9 +38960,9 @@ _tmp_243_rule(Parser *p) return _res; } -// _tmp_244: 'as' star_target +// _tmp_248: 'as' star_target static void * -_tmp_244_rule(Parser *p) +_tmp_248_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38469,21 +38979,21 @@ _tmp_244_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38492,9 +39002,9 @@ _tmp_244_rule(Parser *p) return _res; } -// _tmp_245: 'as' star_target +// _tmp_249: 'as' star_target static void * -_tmp_245_rule(Parser *p) +_tmp_249_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38511,21 +39021,21 @@ _tmp_245_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38534,9 +39044,9 @@ _tmp_245_rule(Parser *p) return _res; } -// _tmp_246: 'as' star_target +// _tmp_250: 'as' star_target static void * -_tmp_246_rule(Parser *p) +_tmp_250_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38553,21 +39063,21 @@ _tmp_246_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38576,9 +39086,9 @@ _tmp_246_rule(Parser *p) return _res; } -// _tmp_247: 'as' star_target +// _tmp_251: 'as' star_target static void * -_tmp_247_rule(Parser *p) +_tmp_251_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38595,21 +39105,21 @@ _tmp_247_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='as' + (_keyword = _PyPegen_expect_token(p, 639)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38618,154 +39128,6 @@ _tmp_247_rule(Parser *p) return _res; } -// _loop1_248: except_block -static asdl_seq * -_loop1_248_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - Py_ssize_t _children_capacity = 1; - Py_ssize_t _n = 0; - { // except_block - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); - excepthandler_ty except_block_var; - while ( - (except_block_var = except_block_rule(p)) // except_block - ) - { - _res = except_block_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_248[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - p->level--; - return NULL; - } - asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_248_type, _seq); - p->level--; - return _seq; -} - -// _loop1_249: except_star_block -static asdl_seq * -_loop1_249_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - Py_ssize_t _children_capacity = 1; - Py_ssize_t _n = 0; - { // except_star_block - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); - excepthandler_ty except_star_block_var; - while ( - (except_star_block_var = except_star_block_rule(p)) // except_star_block - ) - { - _res = except_star_block_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_249[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - p->level--; - return NULL; - } - asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_249_type, _seq); - p->level--; - return _seq; -} - void * _PyPegen_parse(Parser *p) { diff --git a/Parser/pegen.c b/Parser/pegen.c index a5d123da51296c..d34a86e9c883de 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -123,16 +123,18 @@ growable_comment_array_deallocate(growable_comment_array *arr) { } static int -_get_keyword_or_name_type(Parser *p, const char *name, int name_len) +_get_keyword_or_name_type(Parser *p, struct token *new_token) { + int name_len = new_token->end_col_offset - new_token->col_offset; assert(name_len > 0); + if (name_len >= p->n_keyword_lists || p->keywords[name_len] == NULL || p->keywords[name_len]->type == -1) { return NAME; } for (KeywordToken *k = p->keywords[name_len]; k != NULL && k->type != -1; k++) { - if (strncmp(k->str, name, name_len) == 0) { + if (strncmp(k->str, new_token->start, name_len) == 0) { return k->type; } } @@ -140,33 +142,26 @@ _get_keyword_or_name_type(Parser *p, const char *name, int name_len) } static int -initialize_token(Parser *p, Token *token, const char *start, const char *end, int token_type) { - assert(token != NULL); +initialize_token(Parser *p, Token *parser_token, struct token *new_token, int token_type) { + assert(parser_token != NULL); - token->type = (token_type == NAME) ? _get_keyword_or_name_type(p, start, (int)(end - start)) : token_type; - token->bytes = PyBytes_FromStringAndSize(start, end - start); - if (token->bytes == NULL) { + parser_token->type = (token_type == NAME) ? _get_keyword_or_name_type(p, new_token) : token_type; + parser_token->bytes = PyBytes_FromStringAndSize(new_token->start, new_token->end - new_token->start); + if (parser_token->bytes == NULL) { return -1; } - - if (_PyArena_AddPyObject(p->arena, token->bytes) < 0) { - Py_DECREF(token->bytes); + if (_PyArena_AddPyObject(p->arena, parser_token->bytes) < 0) { + Py_DECREF(parser_token->bytes); return -1; } - token->level = p->tok->level; - - const char *line_start = token_type == STRING ? p->tok->multi_line_start : p->tok->line_start; - int lineno = token_type == STRING ? p->tok->first_lineno : p->tok->lineno; - int end_lineno = p->tok->lineno; - - int col_offset = (start != NULL && start >= line_start) ? (int)(start - line_start) : -1; - int end_col_offset = (end != NULL && end >= p->tok->line_start) ? (int)(end - p->tok->line_start) : -1; - - token->lineno = lineno; - token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + col_offset : col_offset; - token->end_lineno = end_lineno; - token->end_col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + end_col_offset : end_col_offset; + parser_token->level = new_token->level; + parser_token->lineno = new_token->lineno; + parser_token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + new_token->col_offset + : new_token->col_offset; + parser_token->end_lineno = new_token->end_lineno; + parser_token->end_col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + new_token->end_col_offset + : new_token->end_col_offset; p->fill += 1; @@ -202,26 +197,25 @@ _resize_tokens_array(Parser *p) { int _PyPegen_fill_token(Parser *p) { - const char *start; - const char *end; - int type = _PyTokenizer_Get(p->tok, &start, &end); + struct token new_token; + int type = _PyTokenizer_Get(p->tok, &new_token); // Record and skip '# type: ignore' comments while (type == TYPE_IGNORE) { - Py_ssize_t len = end - start; + Py_ssize_t len = new_token.end_col_offset - new_token.col_offset; char *tag = PyMem_Malloc(len + 1); if (tag == NULL) { PyErr_NoMemory(); return -1; } - strncpy(tag, start, len); + strncpy(tag, new_token.start, len); tag[len] = '\0'; // Ownership of tag passes to the growable array if (!growable_comment_array_add(&p->type_ignore_comments, p->tok->lineno, tag)) { PyErr_NoMemory(); return -1; } - type = _PyTokenizer_Get(p->tok, &start, &end); + type = _PyTokenizer_Get(p->tok, &new_token); } // If we have reached the end and we are in single input mode we need to insert a newline and reset the parsing @@ -244,7 +238,7 @@ _PyPegen_fill_token(Parser *p) } Token *t = p->tokens[p->fill]; - return initialize_token(p, t, start, end, type); + return initialize_token(p, t, &new_token, type); } #if defined(Py_DEBUG) @@ -891,8 +885,7 @@ _PyPegen_run_parser_from_file_pointer(FILE *fp, int start_rule, PyObject *filena tok->fp_interactive = 1; } // This transfers the ownership to the tokenizer - tok->filename = filename_ob; - Py_INCREF(filename_ob); + tok->filename = Py_NewRef(filename_ob); // From here on we need to clean up even if there's an error mod_ty result = NULL; @@ -931,8 +924,7 @@ _PyPegen_run_parser_from_string(const char *str, int start_rule, PyObject *filen return NULL; } // This transfers the ownership to the tokenizer - tok->filename = filename_ob; - Py_INCREF(filename_ob); + tok->filename = Py_NewRef(filename_ob); // We need to clear up from here on mod_ty result = NULL; diff --git a/Parser/pegen_errors.c b/Parser/pegen_errors.c index 95bbd43dc32621..7738cbaf9ef39e 100644 --- a/Parser/pegen_errors.c +++ b/Parser/pegen_errors.c @@ -164,11 +164,10 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) { Py_ssize_t current_err_line = current_token->lineno; int ret = 0; + struct token new_token; for (;;) { - const char *start; - const char *end; - switch (_PyTokenizer_Get(p->tok, &start, &end)) { + switch (_PyTokenizer_Get(p->tok, &new_token)) { case ERRORTOKEN: if (p->tok->level != 0) { int error_lineno = p->tok->parenlinenostack[p->tok->level-1]; diff --git a/Parser/string_parser.c b/Parser/string_parser.c index 9bc3b082136be5..c096bea7426e5c 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -21,9 +21,16 @@ warn_invalid_escape_sequence(Parser *p, const char *first_invalid_escape, Token if (msg == NULL) { return -1; } - if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg, p->tok->filename, + PyObject *category; + if (p->feature_version >= 12) { + category = PyExc_SyntaxWarning; + } + else { + category = PyExc_DeprecationWarning; + } + if (PyErr_WarnExplicitObject(category, msg, p->tok->filename, t->lineno, NULL, NULL) < 0) { - if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) { + if (PyErr_ExceptionMatches(category)) { /* Replace the DeprecationWarning exception with a SyntaxError to get a more accurate error report */ PyErr_Clear(); @@ -410,9 +417,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, PyMem_Free(str); return NULL; } - Py_INCREF(p->tok->filename); - - tok->filename = p->tok->filename; + tok->filename = Py_NewRef(p->tok->filename); tok->lineno = t->lineno + lines - 1; Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version, diff --git a/Parser/token.c b/Parser/token.c index fa03fbc450b2bc..6299ad2f563144 100644 --- a/Parser/token.c +++ b/Parser/token.c @@ -1,4 +1,4 @@ -/* Auto-generated by Tools/scripts/generate_token.py */ +/* Auto-generated by Tools/build/generate_token.py */ #include "Python.h" #include "pycore_token.h" diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 3c37fd9c45a49e..ce72e1529024c1 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -36,6 +36,13 @@ /* Don't ever change this -- it would break the portability of Python code */ #define TABSIZE 8 +#define MAKE_TOKEN(token_type) token_setup(tok, token, token_type, p_start, p_end) +#define MAKE_TYPE_COMMENT_TOKEN(token_type, col_offset, end_col_offset) (\ + type_comment_token_setup(tok, token, token_type, col_offset, end_col_offset, p_start, p_end)) +#define ADVANCE_LINENO() \ + tok->lineno++; \ + tok->col_offset = 0; + /* Forward */ static struct tok_state *tok_new(void); static int tok_nextc(struct tok_state *tok); @@ -71,6 +78,8 @@ tok_new(void) tok->pendin = 0; tok->prompt = tok->nextprompt = NULL; tok->lineno = 0; + tok->starting_col_offset = -1; + tok->col_offset = -1; tok->level = 0; tok->altindstack[0] = 0; tok->decoding_state = STATE_INIT; @@ -404,7 +413,11 @@ tok_readline_recode(struct tok_state *tok) { error_ret(tok); goto error; } - if (!tok_reserve_buf(tok, buflen + 1)) { + // Make room for the null terminator *and* potentially + // an extra newline character that we may need to artificially + // add. + size_t buffer_size = buflen + 2; + if (!tok_reserve_buf(tok, buffer_size)) { goto error; } memcpy(tok->inp, buf, buflen); @@ -869,7 +882,7 @@ tok_underflow_string(struct tok_state *tok) { tok->buf = tok->cur; } tok->line_start = tok->cur; - tok->lineno++; + ADVANCE_LINENO(); tok->inp = end; return 1; } @@ -928,7 +941,7 @@ tok_underflow_interactive(struct tok_state *tok) { else if (tok->start != NULL) { Py_ssize_t cur_multi_line_start = tok->multi_line_start - tok->buf; size_t size = strlen(newtok); - tok->lineno++; + ADVANCE_LINENO(); if (!tok_reserve_buf(tok, size + 1)) { PyMem_Free(tok->buf); tok->buf = NULL; @@ -941,7 +954,7 @@ tok_underflow_interactive(struct tok_state *tok) { tok->multi_line_start = tok->buf + cur_multi_line_start; } else { - tok->lineno++; + ADVANCE_LINENO(); PyMem_Free(tok->buf); tok->buf = newtok; tok->cur = tok->buf; @@ -991,12 +1004,13 @@ tok_underflow_file(struct tok_state *tok) { return 0; } if (tok->inp[-1] != '\n') { + assert(tok->inp + 1 < tok->end); /* Last line does not end in \n, fake one */ *tok->inp++ = '\n'; *tok->inp = '\0'; } - tok->lineno++; + ADVANCE_LINENO(); if (tok->decoding_state != STATE_NORMAL) { if (tok->lineno > 2) { tok->decoding_state = STATE_NORMAL; @@ -1054,6 +1068,7 @@ tok_nextc(struct tok_state *tok) int rc; for (;;) { if (tok->cur != tok->inp) { + tok->col_offset++; return Py_CHARMASK(*tok->cur++); /* Fast path */ } if (tok->done != E_OK) { @@ -1102,6 +1117,7 @@ tok_backup(struct tok_state *tok, int c) if ((int)(unsigned char)*tok->cur != c) { Py_FatalError("tok_backup: wrong character"); } + tok->col_offset--; } } @@ -1174,8 +1190,6 @@ syntaxerror_known_range(struct tok_state *tok, return ret; } - - static int indenterror(struct tok_state *tok) { @@ -1391,14 +1405,47 @@ tok_continuation_line(struct tok_state *tok) { } static int -tok_get(struct tok_state *tok, const char **p_start, const char **p_end) +type_comment_token_setup(struct tok_state *tok, struct token *token, int type, int col_offset, + int end_col_offset, const char *start, const char *end) +{ + token->level = tok->level; + token->lineno = token->end_lineno = tok->lineno; + token->col_offset = col_offset; + token->end_col_offset = end_col_offset; + token->start = start; + token->end = end; + return type; +} + +static int +token_setup(struct tok_state *tok, struct token *token, int type, const char *start, const char *end) +{ + assert((start == NULL && end == NULL) || (start != NULL && end != NULL)); + token->level = tok->level; + token->lineno = type == STRING ? tok->first_lineno : tok->lineno; + token->end_lineno = tok->lineno; + token->col_offset = token->end_col_offset = -1; + token->start = start; + token->end = end; + + if (start != NULL && end != NULL) { + token->col_offset = tok->starting_col_offset; + token->end_col_offset = tok->col_offset; + } + return type; +} + +static int +tok_get(struct tok_state *tok, struct token *token) { int c; int blankline, nonascii; - *p_start = *p_end = NULL; + const char *p_start = NULL; + const char *p_end = NULL; nextline: tok->start = NULL; + tok->starting_col_offset = -1; blankline = 0; /* Get indentation level */ @@ -1426,7 +1473,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) // the level of indentation of whatever comes next. cont_line_col = cont_line_col ? cont_line_col : col; if ((c = tok_continuation_line(tok)) == -1) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } else { @@ -1461,7 +1508,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (col == tok->indstack[tok->indent]) { /* No change */ if (altcol != tok->altindstack[tok->indent]) { - return indenterror(tok); + return MAKE_TOKEN(indenterror(tok)); } } else if (col > tok->indstack[tok->indent]) { @@ -1469,10 +1516,10 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (tok->indent+1 >= MAXINDENT) { tok->done = E_TOODEEP; tok->cur = tok->inp; - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } if (altcol <= tok->altindstack[tok->indent]) { - return indenterror(tok); + return MAKE_TOKEN(indenterror(tok)); } tok->pendin++; tok->indstack[++tok->indent] = col; @@ -1488,26 +1535,27 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (col != tok->indstack[tok->indent]) { tok->done = E_DEDENT; tok->cur = tok->inp; - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } if (altcol != tok->altindstack[tok->indent]) { - return indenterror(tok); + return MAKE_TOKEN(indenterror(tok)); } } } } tok->start = tok->cur; + tok->starting_col_offset = tok->col_offset; /* Return pending indents/dedents */ if (tok->pendin != 0) { if (tok->pendin < 0) { tok->pendin++; - return DEDENT; + return MAKE_TOKEN(DEDENT); } else { tok->pendin--; - return INDENT; + return MAKE_TOKEN(INDENT); } } @@ -1545,10 +1593,12 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) /* Set start of current token */ tok->start = tok->cur == NULL ? NULL : tok->cur - 1; + tok->starting_col_offset = tok->col_offset - 1; /* Skip comment, unless it's a type comment */ if (c == '#') { const char *prefix, *p, *type_start; + int current_starting_col_offset; while (c != EOF && c != '\n') { c = tok_nextc(tok); @@ -1556,14 +1606,17 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (tok->type_comments) { p = tok->start; + current_starting_col_offset = tok->starting_col_offset; prefix = type_comment_prefix; while (*prefix && p < tok->cur) { if (*prefix == ' ') { while (*p == ' ' || *p == '\t') { p++; + current_starting_col_offset++; } } else if (*prefix == *p) { p++; + current_starting_col_offset++; } else { break; } @@ -1574,7 +1627,9 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) /* This is a type comment if we matched all of type_comment_prefix. */ if (!*prefix) { int is_type_ignore = 1; + // +6 in order to skip the word 'ignore' const char *ignore_end = p + 6; + const int ignore_end_col_offset = current_starting_col_offset + 6; tok_backup(tok, c); /* don't eat the newline or EOF */ type_start = p; @@ -1587,34 +1642,34 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) && ((unsigned char)ignore_end[0] >= 128 || Py_ISALNUM(ignore_end[0])))); if (is_type_ignore) { - *p_start = ignore_end; - *p_end = tok->cur; + p_start = ignore_end; + p_end = tok->cur; /* If this type ignore is the only thing on the line, consume the newline also. */ if (blankline) { tok_nextc(tok); tok->atbol = 1; } - return TYPE_IGNORE; + return MAKE_TYPE_COMMENT_TOKEN(TYPE_IGNORE, ignore_end_col_offset, tok->col_offset); } else { - *p_start = type_start; /* after type_comment_prefix */ - *p_end = tok->cur; - return TYPE_COMMENT; + p_start = type_start; + p_end = tok->cur; + return MAKE_TYPE_COMMENT_TOKEN(TYPE_COMMENT, current_starting_col_offset, tok->col_offset); } } } } if (tok->done == E_INTERACT_STOP) { - return ENDMARKER; + return MAKE_TOKEN(ENDMARKER); } /* Check for EOF and errors now */ if (c == EOF) { if (tok->level) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } - return tok->done == E_EOF ? ENDMARKER : ERRORTOKEN; + return MAKE_TOKEN(tok->done == E_EOF ? ENDMARKER : ERRORTOKEN); } /* Identifier (most frequent token!) */ @@ -1654,11 +1709,11 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) } tok_backup(tok, c); if (nonascii && !verify_identifier(tok)) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } - *p_start = tok->start; - *p_end = tok->cur; + p_start = tok->start; + p_end = tok->cur; /* async/await parsing block. */ if (tok->cur - tok->start == 5 && tok->start[0] == 'a') { @@ -1673,10 +1728,10 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (!tok->async_hacks || tok->async_def) { /* Always recognize the keywords. */ if (memcmp(tok->start, "async", 5) == 0) { - return ASYNC; + return MAKE_TOKEN(ASYNC); } if (memcmp(tok->start, "await", 5) == 0) { - return AWAIT; + return MAKE_TOKEN(AWAIT); } } else if (memcmp(tok->start, "async", 5) == 0) { @@ -1684,13 +1739,11 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) Look ahead one token to see if that is 'def'. */ struct tok_state ahead_tok; - const char *ahead_tok_start = NULL; - const char *ahead_tok_end = NULL; + struct token ahead_token; int ahead_tok_kind; memcpy(&ahead_tok, tok, sizeof(ahead_tok)); - ahead_tok_kind = tok_get(&ahead_tok, &ahead_tok_start, - &ahead_tok_end); + ahead_tok_kind = tok_get(&ahead_tok, &ahead_token); if (ahead_tok_kind == NAME && ahead_tok.cur - ahead_tok.start == 3 @@ -1700,12 +1753,12 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) returning a plain NAME token, return ASYNC. */ tok->async_def_indent = tok->indent; tok->async_def = 1; - return ASYNC; + return MAKE_TOKEN(ASYNC); } } } - return NAME; + return MAKE_TOKEN(NAME); } /* Newline */ @@ -1714,15 +1767,15 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (blankline || tok->level > 0) { goto nextline; } - *p_start = tok->start; - *p_end = tok->cur - 1; /* Leave '\n' out of the string */ + p_start = tok->start; + p_end = tok->cur - 1; /* Leave '\n' out of the string */ tok->cont_line = 0; if (tok->async_def) { /* We're somewhere inside an 'async def' function, and we've encountered a NEWLINE after its signature. */ tok->async_def_nl = 1; } - return NEWLINE; + return MAKE_TOKEN(NEWLINE); } /* Period or number starting with period? */ @@ -1733,9 +1786,9 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) } else if (c == '.') { c = tok_nextc(tok); if (c == '.') { - *p_start = tok->start; - *p_end = tok->cur; - return ELLIPSIS; + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(ELLIPSIS); } else { tok_backup(tok, c); @@ -1745,9 +1798,9 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) else { tok_backup(tok, c); } - *p_start = tok->start; - *p_end = tok->cur; - return DOT; + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(DOT); } /* Number */ @@ -1764,14 +1817,14 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) } if (!isxdigit(c)) { tok_backup(tok, c); - return syntaxerror(tok, "invalid hexadecimal literal"); + return MAKE_TOKEN(syntaxerror(tok, "invalid hexadecimal literal")); } do { c = tok_nextc(tok); } while (isxdigit(c)); } while (c == '_'); if (!verify_end_of_number(tok, c, "hexadecimal")) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } else if (c == 'o' || c == 'O') { @@ -1783,12 +1836,12 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) } if (c < '0' || c >= '8') { if (isdigit(c)) { - return syntaxerror(tok, - "invalid digit '%c' in octal literal", c); + return MAKE_TOKEN(syntaxerror(tok, + "invalid digit '%c' in octal literal", c)); } else { tok_backup(tok, c); - return syntaxerror(tok, "invalid octal literal"); + return MAKE_TOKEN(syntaxerror(tok, "invalid octal literal")); } } do { @@ -1796,11 +1849,11 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) } while ('0' <= c && c < '8'); } while (c == '_'); if (isdigit(c)) { - return syntaxerror(tok, - "invalid digit '%c' in octal literal", c); + return MAKE_TOKEN(syntaxerror(tok, + "invalid digit '%c' in octal literal", c)); } if (!verify_end_of_number(tok, c, "octal")) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } else if (c == 'b' || c == 'B') { @@ -1812,12 +1865,11 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) } if (c != '0' && c != '1') { if (isdigit(c)) { - return syntaxerror(tok, - "invalid digit '%c' in binary literal", c); + return MAKE_TOKEN(syntaxerror(tok, "invalid digit '%c' in binary literal", c)); } else { tok_backup(tok, c); - return syntaxerror(tok, "invalid binary literal"); + return MAKE_TOKEN(syntaxerror(tok, "invalid binary literal")); } } do { @@ -1825,11 +1877,10 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) } while (c == '0' || c == '1'); } while (c == '_'); if (isdigit(c)) { - return syntaxerror(tok, - "invalid digit '%c' in binary literal", c); + return MAKE_TOKEN(syntaxerror(tok, "invalid digit '%c' in binary literal", c)); } if (!verify_end_of_number(tok, c, "binary")) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } else { @@ -1841,7 +1892,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) c = tok_nextc(tok); if (!isdigit(c)) { tok_backup(tok, c); - return syntaxerror(tok, "invalid decimal literal"); + return MAKE_TOKEN(syntaxerror(tok, "invalid decimal literal")); } } if (c != '0') { @@ -1854,7 +1905,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) nonzero = 1; c = tok_decimal_tail(tok); if (c == 0) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } if (c == '.') { @@ -1870,15 +1921,15 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) else if (nonzero) { /* Old-style octal: now disallowed. */ tok_backup(tok, c); - return syntaxerror_known_range( + return MAKE_TOKEN(syntaxerror_known_range( tok, (int)(tok->start + 1 - tok->line_start), (int)(zeros_end - tok->line_start), "leading zeros in decimal integer " "literals are not permitted; " - "use an 0o prefix for octal integers"); + "use an 0o prefix for octal integers")); } if (!verify_end_of_number(tok, c, "decimal")) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } } @@ -1886,7 +1937,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) /* Decimal */ c = tok_decimal_tail(tok); if (c == 0) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } { /* Accept floating point numbers. */ @@ -1897,7 +1948,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (isdigit(c)) { c = tok_decimal_tail(tok); if (c == 0) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } } @@ -1911,21 +1962,21 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) c = tok_nextc(tok); if (!isdigit(c)) { tok_backup(tok, c); - return syntaxerror(tok, "invalid decimal literal"); + return MAKE_TOKEN(syntaxerror(tok, "invalid decimal literal")); } } else if (!isdigit(c)) { tok_backup(tok, c); if (!verify_end_of_number(tok, e, "decimal")) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } tok_backup(tok, e); - *p_start = tok->start; - *p_end = tok->cur; - return NUMBER; + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(NUMBER); } c = tok_decimal_tail(tok); if (c == 0) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } if (c == 'j' || c == 'J') { @@ -1933,18 +1984,18 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) imaginary: c = tok_nextc(tok); if (!verify_end_of_number(tok, c, "imaginary")) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } else if (!verify_end_of_number(tok, c, "decimal")) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } } tok_backup(tok, c); - *p_start = tok->start; - *p_end = tok->cur; - return NUMBER; + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(NUMBER); } letter_quote: @@ -1997,7 +2048,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (c != '\n') { tok->done = E_EOFS; } - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } else { syntaxerror(tok, "unterminated string literal (detected at" @@ -2005,7 +2056,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (c != '\n') { tok->done = E_EOLS; } - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } } if (c == quote) { @@ -2019,15 +2070,15 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) } } - *p_start = tok->start; - *p_end = tok->cur; - return STRING; + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(STRING); } /* Line continuation */ if (c == '\\') { if ((c = tok_continuation_line(tok)) == -1) { - return ERRORTOKEN; + return MAKE_TOKEN(ERRORTOKEN); } tok->cont_line = 1; goto again; /* Read next line */ @@ -2036,19 +2087,19 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) /* Check for two-character token */ { int c2 = tok_nextc(tok); - int token = _PyToken_TwoChars(c, c2); - if (token != OP) { + int current_token = _PyToken_TwoChars(c, c2); + if (current_token != OP) { int c3 = tok_nextc(tok); - int token3 = _PyToken_ThreeChars(c, c2, c3); - if (token3 != OP) { - token = token3; + int current_token3 = _PyToken_ThreeChars(c, c2, c3); + if (current_token3 != OP) { + current_token = current_token3; } else { tok_backup(tok, c3); } - *p_start = tok->start; - *p_end = tok->cur; - return token; + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(current_token); } tok_backup(tok, c2); } @@ -2059,7 +2110,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) case '[': case '{': if (tok->level >= MAXLEVEL) { - return syntaxerror(tok, "too many nested parentheses"); + return MAKE_TOKEN(syntaxerror(tok, "too many nested parentheses")); } tok->parenstack[tok->level] = c; tok->parenlinenostack[tok->level] = tok->lineno; @@ -2070,7 +2121,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) case ']': case '}': if (!tok->level) { - return syntaxerror(tok, "unmatched '%c'", c); + return MAKE_TOKEN(syntaxerror(tok, "unmatched '%c'", c)); } tok->level--; int opening = tok->parenstack[tok->level]; @@ -2079,16 +2130,16 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) (opening == '{' && c == '}'))) { if (tok->parenlinenostack[tok->level] != tok->lineno) { - return syntaxerror(tok, + return MAKE_TOKEN(syntaxerror(tok, "closing parenthesis '%c' does not match " "opening parenthesis '%c' on line %d", - c, opening, tok->parenlinenostack[tok->level]); + c, opening, tok->parenlinenostack[tok->level])); } else { - return syntaxerror(tok, + return MAKE_TOKEN(syntaxerror(tok, "closing parenthesis '%c' does not match " "opening parenthesis '%c'", - c, opening); + c, opening)); } } break; @@ -2097,20 +2148,19 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) if (!Py_UNICODE_ISPRINTABLE(c)) { char hex[9]; (void)PyOS_snprintf(hex, sizeof(hex), "%04X", c); - return syntaxerror(tok, "invalid non-printable character U+%s", hex); + return MAKE_TOKEN(syntaxerror(tok, "invalid non-printable character U+%s", hex)); } /* Punctuation character */ - *p_start = tok->start; - *p_end = tok->cur; - return _PyToken_OneChar(c); + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(_PyToken_OneChar(c)); } int -_PyTokenizer_Get(struct tok_state *tok, - const char **p_start, const char **p_end) +_PyTokenizer_Get(struct tok_state *tok, struct token *token) { - int result = tok_get(tok, p_start, p_end); + int result = tok_get(tok, token); if (tok->decoding_erred) { result = ERRORTOKEN; tok->done = E_DECODE; @@ -2166,8 +2216,6 @@ _PyTokenizer_FindEncodingFilename(int fd, PyObject *filename) { struct tok_state *tok; FILE *fp; - const char *p_start = NULL; - const char *p_end = NULL; char *encoding = NULL; fp = fdopen_borrow(fd); @@ -2180,8 +2228,7 @@ _PyTokenizer_FindEncodingFilename(int fd, PyObject *filename) return NULL; } if (filename != NULL) { - Py_INCREF(filename); - tok->filename = filename; + tok->filename = Py_NewRef(filename); } else { tok->filename = PyUnicode_FromString(""); @@ -2191,8 +2238,9 @@ _PyTokenizer_FindEncodingFilename(int fd, PyObject *filename) return encoding; } } + struct token token; while (tok->lineno < 2 && tok->done == E_OK) { - _PyTokenizer_Get(tok, &p_start, &p_end); + _PyTokenizer_Get(tok, &token); } fclose(fp); if (tok->encoding) { diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h index 5ac64a99b7d661..2542d30e1da0ed 100644 --- a/Parser/tokenizer.h +++ b/Parser/tokenizer.h @@ -27,6 +27,12 @@ enum interactive_underflow_t { IUNDERFLOW_STOP, }; +struct token { + int level; + int lineno, col_offset, end_lineno, end_col_offset; + const char *start, *end; +}; + /* Tokenizer state */ struct tok_state { /* Input state; buf <= cur <= inp <= end */ @@ -51,6 +57,8 @@ struct tok_state { int lineno; /* Current line number */ int first_lineno; /* First line of a single line or multi line string expression (cf. issue 16806) */ + int starting_col_offset; /* The column offset at the beginning of a token */ + int col_offset; /* Current col offset */ int level; /* () [] {} Parentheses nesting level */ /* Used to allow free continuations inside them */ char parenstack[MAXLEVEL]; @@ -94,7 +102,7 @@ extern struct tok_state *_PyTokenizer_FromUTF8(const char *, int); extern struct tok_state *_PyTokenizer_FromFile(FILE *, const char*, const char *, const char *); extern void _PyTokenizer_Free(struct tok_state *); -extern int _PyTokenizer_Get(struct tok_state *, const char **, const char **); +extern int _PyTokenizer_Get(struct tok_state *, struct token *); #define tok_dump _Py_tok_dump diff --git a/Programs/_bootstrap_python.c b/Programs/_bootstrap_python.c index 6ecbf0c72b5ff4..bbac0c4e1a8a45 100644 --- a/Programs/_bootstrap_python.c +++ b/Programs/_bootstrap_python.c @@ -2,7 +2,7 @@ /* Frozen modules bootstrap * * Limited and restricted Python interpreter to run - * "Tools/scripts/deepfreeze.py" on systems with no or older Python + * "Tools/build/deepfreeze.py" on systems with no or older Python * interpreter. */ diff --git a/Programs/_freeze_module.c b/Programs/_freeze_module.c index 3d27b79c237c36..9e2169f32e9211 100644 --- a/Programs/_freeze_module.c +++ b/Programs/_freeze_module.c @@ -2,7 +2,7 @@ modules into frozen modules (like Lib/importlib/_bootstrap.py into Python/importlib.h). - This is used directly by Tools/scripts/freeze_modules.py, and indirectly by "make regen-frozen". + This is used directly by Tools/build/freeze_modules.py, and indirectly by "make regen-frozen". See Python/frozen.c for more info. @@ -194,6 +194,7 @@ write_frozen(const char *outpath, const char *inpath, const char *name, if (ferror(outfile)) { fprintf(stderr, "error when writing to '%s'\n", outpath); + fclose(outfile); return -1; } fclose(outfile); diff --git a/Programs/_testembed.c b/Programs/_testembed.c index d635c5a4abe34a..a6ce3f7b200550 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -1,7 +1,6 @@ #ifndef Py_BUILD_CORE_MODULE # define Py_BUILD_CORE_MODULE #endif -#define NEEDS_PY_IDENTIFIER /* Always enable assertion (even in release mode) */ #undef NDEBUG @@ -22,7 +21,7 @@ char **main_argv; /********************************************************* * Embedded interpreter tests that need a custom exe * - * Executed via 'EmbeddingTests' in Lib/test/test_capi.py + * Executed via Lib/test/test_embed.py *********************************************************/ // Use to display the usage @@ -73,7 +72,7 @@ static void init_from_config_clear(PyConfig *config) } -static void _testembed_Py_Initialize(void) +static void _testembed_Py_InitializeFromConfig(void) { PyConfig config; _PyConfig_InitCompatConfig(&config); @@ -81,6 +80,12 @@ static void _testembed_Py_Initialize(void) init_from_config_clear(&config); } +static void _testembed_Py_Initialize(void) +{ + Py_SetProgramName(PROGRAM_NAME); + Py_Initialize(); +} + /***************************************************** * Test repeated initialisation and subinterpreters @@ -110,7 +115,7 @@ static int test_repeated_init_and_subinterpreters(void) for (int i=1; i <= INIT_LOOPS; i++) { printf("--- Pass %d ---\n", i); - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); mainstate = PyThreadState_Get(); PyEval_ReleaseThread(mainstate); @@ -168,7 +173,7 @@ static int test_repeated_init_exec(void) fprintf(stderr, "--- Loop #%d ---\n", i); fflush(stderr); - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); int err = PyRun_SimpleString(code); Py_Finalize(); if (err) { @@ -178,6 +183,23 @@ static int test_repeated_init_exec(void) return 0; } +/**************************************************************************** + * Test the Py_Initialize(Ex) convenience/compatibility wrappers + ***************************************************************************/ +// This is here to help ensure there are no wrapper resource leaks (gh-96853) +static int test_repeated_simple_init(void) +{ + for (int i=1; i <= INIT_LOOPS; i++) { + fprintf(stderr, "--- Loop #%d ---\n", i); + fflush(stderr); + + _testembed_Py_Initialize(); + Py_Finalize(); + printf("Finalized\n"); // Give test_embed some output to check + } + return 0; +} + /***************************************************** * Test forcing a particular IO encoding @@ -199,7 +221,7 @@ static void check_stdio_details(const char *encoding, const char * errors) fflush(stdout); /* Force the given IO encoding */ Py_SetStandardStreamEncoding(encoding, errors); - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); PyRun_SimpleString( "import sys;" "print('stdin: {0.encoding}:{0.errors}'.format(sys.stdin));" @@ -308,7 +330,7 @@ static int test_pre_initialization_sys_options(void) dynamic_xoption = NULL; _Py_EMBED_PREINIT_CHECK("Initializing interpreter\n"); - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); _Py_EMBED_PREINIT_CHECK("Check sys module contents\n"); PyRun_SimpleString("import sys; " "print('sys.warnoptions:', sys.warnoptions); " @@ -352,7 +374,7 @@ static int test_bpo20891(void) return 1; } - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); unsigned long thrd = PyThread_start_new_thread(bpo20891_thread, &lock); if (thrd == PYTHREAD_INVALID_THREAD_ID) { @@ -375,7 +397,7 @@ static int test_bpo20891(void) static int test_initialize_twice(void) { - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); /* bpo-33932: Calling Py_Initialize() twice should do nothing * (and not crash!). */ @@ -393,7 +415,7 @@ static int test_initialize_pymain(void) L"print(f'Py_Main() after Py_Initialize: " L"sys.argv={sys.argv}')"), L"arg2"}; - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); /* bpo-34008: Calling Py_Main() after Py_Initialize() must not crash */ Py_Main(Py_ARRAY_LENGTH(argv), argv); @@ -416,7 +438,7 @@ dump_config(void) static int test_init_initialize_config(void) { - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); dump_config(); Py_Finalize(); return 0; @@ -681,8 +703,6 @@ static int test_init_from_config(void) config.safe_path = 1; - config._isolated_interpreter = 1; - putenv("PYTHONINTMAXSTRDIGITS=6666"); config.int_max_str_digits = 31337; @@ -769,7 +789,7 @@ static int test_init_compat_env(void) /* Test initialization from environment variables */ Py_IgnoreEnvironmentFlag = 0; set_all_env_vars(); - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); dump_config(); Py_Finalize(); return 0; @@ -805,7 +825,7 @@ static int test_init_env_dev_mode(void) /* Test initialization from environment variables */ Py_IgnoreEnvironmentFlag = 0; set_all_env_vars_dev_mode(); - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); dump_config(); Py_Finalize(); return 0; @@ -818,7 +838,7 @@ static int test_init_env_dev_mode_alloc(void) Py_IgnoreEnvironmentFlag = 0; set_all_env_vars_dev_mode(); putenv("PYTHONMALLOC=malloc"); - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); dump_config(); Py_Finalize(); return 0; @@ -1158,7 +1178,7 @@ static int test_open_code_hook(void) } Py_IgnoreEnvironmentFlag = 0; - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); result = 0; PyObject *r = PyFile_OpenCode("$$test-filename"); @@ -1222,7 +1242,7 @@ static int _test_audit(Py_ssize_t setValue) Py_IgnoreEnvironmentFlag = 0; PySys_AddAuditHook(_audit_hook, &sawSet); - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); if (PySys_Audit("_testembed.raise", NULL) == 0) { printf("No error raised"); @@ -1278,7 +1298,7 @@ static int test_audit_subinterpreter(void) { Py_IgnoreEnvironmentFlag = 0; PySys_AddAuditHook(_audit_subinterpreter_hook, NULL); - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); Py_NewInterpreter(); Py_NewInterpreter(); @@ -1870,16 +1890,23 @@ static int test_unicode_id_init(void) { // bpo-42882: Test that _PyUnicode_FromId() works // when Python is initialized multiples times. - _Py_IDENTIFIER(test_unicode_id_init); + + // This is equivalent to `_Py_IDENTIFIER(test_unicode_id_init)` + // but since `_Py_IDENTIFIER` is disabled when `Py_BUILD_CORE` + // is defined, it is manually expanded here. + static _Py_Identifier PyId_test_unicode_id_init = { + .string = "test_unicode_id_init", + .index = -1, + }; // Initialize Python once without using the identifier - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); Py_Finalize(); // Now initialize Python multiple times and use the identifier. // The first _PyUnicode_FromId() call initializes the identifier index. for (int i=0; i<3; i++) { - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); PyObject *str1, *str2; @@ -1901,6 +1928,18 @@ static int test_unicode_id_init(void) } +static int test_init_main_interpreter_settings(void) +{ + _testembed_Py_Initialize(); + (void) PyRun_SimpleStringFlags( + "import _testinternalcapi, json; " + "print(json.dumps(_testinternalcapi.get_interp_settings(0)))", + 0); + Py_Finalize(); + return 0; +} + + #ifndef MS_WINDOWS #include "test_frozenmain.h" // M_test_frozenmain @@ -2011,7 +2050,7 @@ unwrap_allocator(PyMemAllocatorEx *allocator) static int test_get_incomplete_frame(void) { - _testembed_Py_Initialize(); + _testembed_Py_InitializeFromConfig(); PyMemAllocatorEx allocator; wrap_allocator(&allocator); // Force an allocation with an incomplete (generator) frame: @@ -2043,6 +2082,7 @@ struct TestCase static struct TestCase TestCases[] = { // Python initialization {"test_repeated_init_exec", test_repeated_init_exec}, + {"test_repeated_simple_init", test_repeated_simple_init}, {"test_forced_io_encoding", test_forced_io_encoding}, {"test_repeated_init_and_subinterpreters", test_repeated_init_and_subinterpreters}, {"test_repeated_init_and_inittab", test_repeated_init_and_inittab}, @@ -2087,6 +2127,7 @@ static struct TestCase TestCases[] = { {"test_run_main_loop", test_run_main_loop}, {"test_get_argc_argv", test_get_argc_argv}, {"test_init_use_frozen_modules", test_init_use_frozen_modules}, + {"test_init_main_interpreter_settings", test_init_main_interpreter_settings}, // Audit {"test_open_code_hook", test_open_code_hook}, diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h index 677ce180f95fdd..96be3ce3c25c3f 100644 --- a/Programs/test_frozenmain.h +++ b/Programs/test_frozenmain.h @@ -1,7 +1,7 @@ // Auto-generated by Programs/freeze_test_frozenmain.py unsigned char M_test_frozenmain[] = { 227,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0, - 0,0,0,0,0,243,182,0,0,0,151,0,100,0,100,1, + 0,0,0,0,0,243,184,0,0,0,151,0,100,0,100,1, 108,0,90,0,100,0,100,1,108,1,90,1,2,0,101,2, 100,2,171,1,0,0,0,0,0,0,0,0,1,0,2,0, 101,2,100,3,101,0,106,6,0,0,0,0,0,0,0,0, @@ -12,31 +12,31 @@ unsigned char M_test_frozenmain[] = { 0,0,0,0,90,5,100,5,68,0,93,23,0,0,90,6, 2,0,101,2,100,6,101,6,155,0,100,7,101,5,101,6, 25,0,0,0,0,0,0,0,0,0,155,0,157,4,171,1, - 0,0,0,0,0,0,0,0,1,0,140,25,100,1,83,0, - 41,8,233,0,0,0,0,78,122,18,70,114,111,122,101,110, - 32,72,101,108,108,111,32,87,111,114,108,100,122,8,115,121, - 115,46,97,114,103,118,218,6,99,111,110,102,105,103,41,5, - 218,12,112,114,111,103,114,97,109,95,110,97,109,101,218,10, - 101,120,101,99,117,116,97,98,108,101,218,15,117,115,101,95, - 101,110,118,105,114,111,110,109,101,110,116,218,17,99,111,110, - 102,105,103,117,114,101,95,99,95,115,116,100,105,111,218,14, - 98,117,102,102,101,114,101,100,95,115,116,100,105,111,122,7, - 99,111,110,102,105,103,32,122,2,58,32,41,7,218,3,115, - 121,115,218,17,95,116,101,115,116,105,110,116,101,114,110,97, - 108,99,97,112,105,218,5,112,114,105,110,116,218,4,97,114, - 103,118,218,11,103,101,116,95,99,111,110,102,105,103,115,114, - 3,0,0,0,218,3,107,101,121,169,0,243,0,0,0,0, - 250,18,116,101,115,116,95,102,114,111,122,101,110,109,97,105, - 110,46,112,121,250,8,60,109,111,100,117,108,101,62,114,18, - 0,0,0,1,0,0,0,115,149,0,0,0,240,3,1,1, - 1,240,8,0,1,11,128,10,128,10,128,10,216,0,24,208, - 0,24,208,0,24,208,0,24,224,0,5,128,5,208,6,26, - 212,0,27,208,0,27,216,0,5,128,5,128,106,144,35,151, - 40,145,40,212,0,27,208,0,27,216,9,38,208,9,26,215, - 9,38,209,9,38,212,9,40,168,24,212,9,50,128,6,240, - 2,6,12,2,240,0,7,1,42,241,0,7,1,42,128,67, - 240,14,0,5,10,128,69,208,10,40,144,67,208,10,40,208, - 10,40,152,54,160,35,156,59,208,10,40,208,10,40,212,4, - 41,208,4,41,208,4,41,240,15,7,1,42,240,0,7,1, - 42,114,16,0,0,0, + 0,0,0,0,0,0,0,0,1,0,140,25,4,0,100,1, + 83,0,41,8,233,0,0,0,0,78,122,18,70,114,111,122, + 101,110,32,72,101,108,108,111,32,87,111,114,108,100,122,8, + 115,121,115,46,97,114,103,118,218,6,99,111,110,102,105,103, + 41,5,218,12,112,114,111,103,114,97,109,95,110,97,109,101, + 218,10,101,120,101,99,117,116,97,98,108,101,218,15,117,115, + 101,95,101,110,118,105,114,111,110,109,101,110,116,218,17,99, + 111,110,102,105,103,117,114,101,95,99,95,115,116,100,105,111, + 218,14,98,117,102,102,101,114,101,100,95,115,116,100,105,111, + 122,7,99,111,110,102,105,103,32,122,2,58,32,41,7,218, + 3,115,121,115,218,17,95,116,101,115,116,105,110,116,101,114, + 110,97,108,99,97,112,105,218,5,112,114,105,110,116,218,4, + 97,114,103,118,218,11,103,101,116,95,99,111,110,102,105,103, + 115,114,3,0,0,0,218,3,107,101,121,169,0,243,0,0, + 0,0,250,18,116,101,115,116,95,102,114,111,122,101,110,109, + 97,105,110,46,112,121,250,8,60,109,111,100,117,108,101,62, + 114,18,0,0,0,1,0,0,0,115,154,0,0,0,240,3, + 1,1,1,240,8,0,1,11,128,10,128,10,128,10,216,0, + 24,208,0,24,208,0,24,208,0,24,224,0,5,128,5,208, + 6,26,212,0,27,208,0,27,216,0,5,128,5,128,106,144, + 35,151,40,145,40,212,0,27,208,0,27,216,9,38,208,9, + 26,215,9,38,209,9,38,212,9,40,168,24,212,9,50,128, + 6,240,2,6,12,2,240,0,7,1,42,241,0,7,1,42, + 128,67,240,14,0,5,10,128,69,208,10,40,144,67,208,10, + 40,208,10,40,152,54,160,35,156,59,208,10,40,208,10,40, + 212,4,41,208,4,41,208,4,41,240,15,7,1,42,240,0, + 7,1,42,240,0,7,1,42,114,16,0,0,0, }; diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 2571e28bc1690d..d113c47b95392e 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -263,6 +263,10 @@ void _PyAST_Fini(PyInterpreterState *interp) Py_CLEAR(state->vararg); Py_CLEAR(state->withitem_type); + if (_PyInterpreterState_Get() == _PyInterpreterState_Main()) { + Py_CLEAR(_Py_CACHED_OBJECT(str_replace_inf)); + } + #if !defined(NDEBUG) state->initialized = -1; #else @@ -993,10 +997,11 @@ static PyObject* ast2obj_list(struct ast_state *state, asdl_seq *seq, PyObject* static PyObject* ast2obj_object(struct ast_state *Py_UNUSED(state), void *o) { - if (!o) - o = Py_None; - Py_INCREF((PyObject*)o); - return (PyObject*)o; + PyObject *op = (PyObject*)o; + if (!op) { + op = Py_None; + } + return Py_NewRef(op); } #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object @@ -1018,9 +1023,11 @@ static int obj2ast_object(struct ast_state *Py_UNUSED(state), PyObject* obj, PyO *out = NULL; return -1; } - Py_INCREF(obj); + *out = Py_NewRef(obj); + } + else { + *out = NULL; } - *out = obj; return 0; } @@ -1030,8 +1037,7 @@ static int obj2ast_constant(struct ast_state *Py_UNUSED(state), PyObject* obj, P *out = NULL; return -1; } - Py_INCREF(obj); - *out = obj; + *out = Py_NewRef(obj); return 0; } @@ -4732,14 +4738,11 @@ PyObject* ast2obj_expr_context(struct ast_state *state, expr_context_ty o) { switch(o) { case Load: - Py_INCREF(state->Load_singleton); - return state->Load_singleton; + return Py_NewRef(state->Load_singleton); case Store: - Py_INCREF(state->Store_singleton); - return state->Store_singleton; + return Py_NewRef(state->Store_singleton); case Del: - Py_INCREF(state->Del_singleton); - return state->Del_singleton; + return Py_NewRef(state->Del_singleton); } Py_UNREACHABLE(); } @@ -4747,11 +4750,9 @@ PyObject* ast2obj_boolop(struct ast_state *state, boolop_ty o) { switch(o) { case And: - Py_INCREF(state->And_singleton); - return state->And_singleton; + return Py_NewRef(state->And_singleton); case Or: - Py_INCREF(state->Or_singleton); - return state->Or_singleton; + return Py_NewRef(state->Or_singleton); } Py_UNREACHABLE(); } @@ -4759,44 +4760,31 @@ PyObject* ast2obj_operator(struct ast_state *state, operator_ty o) { switch(o) { case Add: - Py_INCREF(state->Add_singleton); - return state->Add_singleton; + return Py_NewRef(state->Add_singleton); case Sub: - Py_INCREF(state->Sub_singleton); - return state->Sub_singleton; + return Py_NewRef(state->Sub_singleton); case Mult: - Py_INCREF(state->Mult_singleton); - return state->Mult_singleton; + return Py_NewRef(state->Mult_singleton); case MatMult: - Py_INCREF(state->MatMult_singleton); - return state->MatMult_singleton; + return Py_NewRef(state->MatMult_singleton); case Div: - Py_INCREF(state->Div_singleton); - return state->Div_singleton; + return Py_NewRef(state->Div_singleton); case Mod: - Py_INCREF(state->Mod_singleton); - return state->Mod_singleton; + return Py_NewRef(state->Mod_singleton); case Pow: - Py_INCREF(state->Pow_singleton); - return state->Pow_singleton; + return Py_NewRef(state->Pow_singleton); case LShift: - Py_INCREF(state->LShift_singleton); - return state->LShift_singleton; + return Py_NewRef(state->LShift_singleton); case RShift: - Py_INCREF(state->RShift_singleton); - return state->RShift_singleton; + return Py_NewRef(state->RShift_singleton); case BitOr: - Py_INCREF(state->BitOr_singleton); - return state->BitOr_singleton; + return Py_NewRef(state->BitOr_singleton); case BitXor: - Py_INCREF(state->BitXor_singleton); - return state->BitXor_singleton; + return Py_NewRef(state->BitXor_singleton); case BitAnd: - Py_INCREF(state->BitAnd_singleton); - return state->BitAnd_singleton; + return Py_NewRef(state->BitAnd_singleton); case FloorDiv: - Py_INCREF(state->FloorDiv_singleton); - return state->FloorDiv_singleton; + return Py_NewRef(state->FloorDiv_singleton); } Py_UNREACHABLE(); } @@ -4804,17 +4792,13 @@ PyObject* ast2obj_unaryop(struct ast_state *state, unaryop_ty o) { switch(o) { case Invert: - Py_INCREF(state->Invert_singleton); - return state->Invert_singleton; + return Py_NewRef(state->Invert_singleton); case Not: - Py_INCREF(state->Not_singleton); - return state->Not_singleton; + return Py_NewRef(state->Not_singleton); case UAdd: - Py_INCREF(state->UAdd_singleton); - return state->UAdd_singleton; + return Py_NewRef(state->UAdd_singleton); case USub: - Py_INCREF(state->USub_singleton); - return state->USub_singleton; + return Py_NewRef(state->USub_singleton); } Py_UNREACHABLE(); } @@ -4822,35 +4806,25 @@ PyObject* ast2obj_cmpop(struct ast_state *state, cmpop_ty o) { switch(o) { case Eq: - Py_INCREF(state->Eq_singleton); - return state->Eq_singleton; + return Py_NewRef(state->Eq_singleton); case NotEq: - Py_INCREF(state->NotEq_singleton); - return state->NotEq_singleton; + return Py_NewRef(state->NotEq_singleton); case Lt: - Py_INCREF(state->Lt_singleton); - return state->Lt_singleton; + return Py_NewRef(state->Lt_singleton); case LtE: - Py_INCREF(state->LtE_singleton); - return state->LtE_singleton; + return Py_NewRef(state->LtE_singleton); case Gt: - Py_INCREF(state->Gt_singleton); - return state->Gt_singleton; + return Py_NewRef(state->Gt_singleton); case GtE: - Py_INCREF(state->GtE_singleton); - return state->GtE_singleton; + return Py_NewRef(state->GtE_singleton); case Is: - Py_INCREF(state->Is_singleton); - return state->Is_singleton; + return Py_NewRef(state->Is_singleton); case IsNot: - Py_INCREF(state->IsNot_singleton); - return state->IsNot_singleton; + return Py_NewRef(state->IsNot_singleton); case In: - Py_INCREF(state->In_singleton); - return state->In_singleton; + return Py_NewRef(state->In_singleton); case NotIn: - Py_INCREF(state->NotIn_singleton); - return state->NotIn_singleton; + return Py_NewRef(state->NotIn_singleton); } Py_UNREACHABLE(); } @@ -5511,8 +5485,7 @@ obj2ast_mod(struct ast_state *state, PyObject* obj, mod_ty* out, PyArena* arena) if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Module' node")) { goto failed; } @@ -5548,8 +5521,7 @@ obj2ast_mod(struct ast_state *state, PyObject* obj, mod_ty* out, PyArena* arena) if (type_ignores == NULL) goto failed; for (i = 0; i < len; i++) { type_ignore_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Module' node")) { goto failed; } @@ -5597,8 +5569,7 @@ obj2ast_mod(struct ast_state *state, PyObject* obj, mod_ty* out, PyArena* arena) if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Interactive' node")) { goto failed; } @@ -5676,8 +5647,7 @@ obj2ast_mod(struct ast_state *state, PyObject* obj, mod_ty* out, PyArena* arena) if (argtypes == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'FunctionType' node")) { goto failed; } @@ -5873,8 +5843,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'FunctionDef' node")) { goto failed; } @@ -5910,8 +5879,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (decorator_list == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'FunctionDef' node")) { goto failed; } @@ -6034,8 +6002,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncFunctionDef' node")) { goto failed; } @@ -6071,8 +6038,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (decorator_list == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncFunctionDef' node")) { goto failed; } @@ -6178,8 +6144,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (bases == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ClassDef' node")) { goto failed; } @@ -6215,8 +6180,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (keywords == NULL) goto failed; for (i = 0; i < len; i++) { keyword_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ClassDef' node")) { goto failed; } @@ -6252,8 +6216,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ClassDef' node")) { goto failed; } @@ -6289,8 +6252,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (decorator_list == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ClassDef' node")) { goto failed; } @@ -6370,8 +6332,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (targets == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Delete' node")) { goto failed; } @@ -6422,8 +6383,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (targets == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Assign' node")) { goto failed; } @@ -6694,8 +6654,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'For' node")) { goto failed; } @@ -6731,8 +6690,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'For' node")) { goto failed; } @@ -6836,8 +6794,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncFor' node")) { goto failed; } @@ -6873,8 +6830,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncFor' node")) { goto failed; } @@ -6960,8 +6916,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'While' node")) { goto failed; } @@ -6997,8 +6952,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'While' node")) { goto failed; } @@ -7066,8 +7020,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'If' node")) { goto failed; } @@ -7103,8 +7056,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'If' node")) { goto failed; } @@ -7155,8 +7107,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (items == NULL) goto failed; for (i = 0; i < len; i++) { withitem_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'With' node")) { goto failed; } @@ -7192,8 +7143,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'With' node")) { goto failed; } @@ -7261,8 +7211,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (items == NULL) goto failed; for (i = 0; i < len; i++) { withitem_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncWith' node")) { goto failed; } @@ -7298,8 +7247,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'AsyncWith' node")) { goto failed; } @@ -7383,8 +7331,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (cases == NULL) goto failed; for (i = 0; i < len; i++) { match_case_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Match' node")) { goto failed; } @@ -7484,8 +7431,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Try' node")) { goto failed; } @@ -7521,8 +7467,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (handlers == NULL) goto failed; for (i = 0; i < len; i++) { excepthandler_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Try' node")) { goto failed; } @@ -7558,8 +7503,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Try' node")) { goto failed; } @@ -7595,8 +7539,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (finalbody == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Try' node")) { goto failed; } @@ -7648,8 +7591,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'TryStar' node")) { goto failed; } @@ -7685,8 +7627,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (handlers == NULL) goto failed; for (i = 0; i < len; i++) { excepthandler_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'TryStar' node")) { goto failed; } @@ -7722,8 +7663,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (orelse == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'TryStar' node")) { goto failed; } @@ -7759,8 +7699,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (finalbody == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'TryStar' node")) { goto failed; } @@ -7857,8 +7796,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (names == NULL) goto failed; for (i = 0; i < len; i++) { alias_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Import' node")) { goto failed; } @@ -7926,8 +7864,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (names == NULL) goto failed; for (i = 0; i < len; i++) { alias_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ImportFrom' node")) { goto failed; } @@ -7993,8 +7930,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (names == NULL) goto failed; for (i = 0; i < len; i++) { identifier val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Global' node")) { goto failed; } @@ -8043,8 +7979,7 @@ obj2ast_stmt(struct ast_state *state, PyObject* obj, stmt_ty* out, PyArena* if (names == NULL) goto failed; for (i = 0; i < len; i++) { identifier val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Nonlocal' node")) { goto failed; } @@ -8269,8 +8204,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (values == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'BoolOp' node")) { goto failed; } @@ -8596,8 +8530,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (keys == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Dict' node")) { goto failed; } @@ -8633,8 +8566,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (values == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Dict' node")) { goto failed; } @@ -8683,8 +8615,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (elts == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Set' node")) { goto failed; } @@ -8751,8 +8682,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (generators == NULL) goto failed; for (i = 0; i < len; i++) { comprehension_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ListComp' node")) { goto failed; } @@ -8819,8 +8749,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (generators == NULL) goto failed; for (i = 0; i < len; i++) { comprehension_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'SetComp' node")) { goto failed; } @@ -8905,8 +8834,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (generators == NULL) goto failed; for (i = 0; i < len; i++) { comprehension_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'DictComp' node")) { goto failed; } @@ -8973,8 +8901,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (generators == NULL) goto failed; for (i = 0; i < len; i++) { comprehension_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'GeneratorExp' node")) { goto failed; } @@ -9132,8 +9059,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (ops == NULL) goto failed; for (i = 0; i < len; i++) { cmpop_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Compare' node")) { goto failed; } @@ -9169,8 +9095,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (comparators == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Compare' node")) { goto failed; } @@ -9238,8 +9163,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (args == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Call' node")) { goto failed; } @@ -9275,8 +9199,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (keywords == NULL) goto failed; for (i = 0; i < len; i++) { keyword_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Call' node")) { goto failed; } @@ -9392,8 +9315,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (values == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'JoinedStr' node")) { goto failed; } @@ -9719,8 +9641,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (elts == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'List' node")) { goto failed; } @@ -9787,8 +9708,7 @@ obj2ast_expr(struct ast_state *state, PyObject* obj, expr_ty* out, PyArena* if (elts == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'Tuple' node")) { goto failed; } @@ -10274,8 +10194,7 @@ obj2ast_comprehension(struct ast_state *state, PyObject* obj, comprehension_ty* if (ifs == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'comprehension' node")) { goto failed; } @@ -10464,8 +10383,7 @@ obj2ast_excepthandler(struct ast_state *state, PyObject* obj, excepthandler_ty* if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'ExceptHandler' node")) { goto failed; } @@ -10526,8 +10444,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (posonlyargs == NULL) goto failed; for (i = 0; i < len; i++) { arg_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -10563,8 +10480,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (args == NULL) goto failed; for (i = 0; i < len; i++) { arg_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -10617,8 +10533,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (kwonlyargs == NULL) goto failed; for (i = 0; i < len; i++) { arg_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -10654,8 +10569,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (kw_defaults == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -10708,8 +10622,7 @@ obj2ast_arguments(struct ast_state *state, PyObject* obj, arguments_ty* out, if (defaults == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'arguments' node")) { goto failed; } @@ -11228,8 +11141,7 @@ obj2ast_match_case(struct ast_state *state, PyObject* obj, match_case_ty* out, if (body == NULL) goto failed; for (i = 0; i < len; i++) { stmt_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'match_case' node")) { goto failed; } @@ -11425,8 +11337,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchSequence' node")) { goto failed; } @@ -11477,8 +11388,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (keys == NULL) goto failed; for (i = 0; i < len; i++) { expr_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchMapping' node")) { goto failed; } @@ -11514,8 +11424,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchMapping' node")) { goto failed; } @@ -11601,8 +11510,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchClass' node")) { goto failed; } @@ -11638,8 +11546,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (kwd_attrs == NULL) goto failed; for (i = 0; i < len; i++) { identifier val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchClass' node")) { goto failed; } @@ -11675,8 +11582,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (kwd_patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchClass' node")) { goto failed; } @@ -11804,8 +11710,7 @@ obj2ast_pattern(struct ast_state *state, PyObject* obj, pattern_ty* out, if (patterns == NULL) goto failed; for (i = 0; i < len; i++) { pattern_ty val; - PyObject *tmp2 = PyList_GET_ITEM(tmp, i); - Py_INCREF(tmp2); + PyObject *tmp2 = Py_NewRef(PyList_GET_ITEM(tmp, i)); if (_Py_EnterRecursiveCall(" while traversing 'MatchOr' node")) { goto failed; } diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c index c5124a6942e7f2..8daa9877254e2e 100644 --- a/Python/Python-tokenize.c +++ b/Python/Python-tokenize.c @@ -60,9 +60,8 @@ tokenizeriter_new_impl(PyTypeObject *type, const char *source) static PyObject * tokenizeriter_next(tokenizeriterobject *it) { - const char *start; - const char *end; - int type = _PyTokenizer_Get(it->tok, &start, &end); + struct token token; + int type = _PyTokenizer_Get(it->tok, &token); if (type == ERRORTOKEN && PyErr_Occurred()) { return NULL; } @@ -71,11 +70,11 @@ tokenizeriter_next(tokenizeriterobject *it) return NULL; } PyObject *str = NULL; - if (start == NULL || end == NULL) { + if (token.start == NULL || token.end == NULL) { str = PyUnicode_FromString(""); } else { - str = PyUnicode_FromStringAndSize(start, end - start); + str = PyUnicode_FromStringAndSize(token.start, token.end - token.start); } if (str == NULL) { return NULL; @@ -92,11 +91,11 @@ tokenizeriter_next(tokenizeriterobject *it) int end_lineno = it->tok->lineno; int col_offset = -1; int end_col_offset = -1; - if (start != NULL && start >= line_start) { - col_offset = (int)(start - line_start); + if (token.start != NULL && token.start >= line_start) { + col_offset = (int)(token.start - line_start); } - if (end != NULL && end >= it->tok->line_start) { - end_col_offset = (int)(end - it->tok->line_start); + if (token.end != NULL && token.end >= it->tok->line_start) { + end_col_offset = (int)(token.end - it->tok->line_start); } return Py_BuildValue("(NiiiiiN)", str, type, lineno, end_lineno, col_offset, end_col_offset, line); diff --git a/Python/_warnings.c b/Python/_warnings.c index 1b9e107ea30b13..046c37eb49bac0 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -382,8 +382,7 @@ get_filter(PyInterpreterState *interp, PyObject *category, action = get_default_action(interp); if (action != NULL) { - Py_INCREF(Py_None); - *item = Py_None; + *item = Py_NewRef(Py_None); return action; } @@ -468,8 +467,7 @@ normalize_module(PyObject *filename) module = PyUnicode_Substring(filename, 0, len-3); } else { - module = filename; - Py_INCREF(module); + module = Py_NewRef(filename); } return module; } @@ -751,8 +749,7 @@ warn_explicit(PyThreadState *tstate, PyObject *category, PyObject *message, goto cleanup; return_none: - result = Py_None; - Py_INCREF(result); + result = Py_NewRef(Py_None); cleanup: Py_XDECREF(item); @@ -804,8 +801,7 @@ next_external_frame(PyFrameObject *frame) { do { PyFrameObject *back = PyFrame_GetBack(frame); - Py_DECREF(frame); - frame = back; + Py_SETREF(frame, back); } while (frame != NULL && is_internal_frame(frame)); return frame; @@ -831,8 +827,7 @@ setup_context(Py_ssize_t stack_level, PyObject **filename, int *lineno, if (stack_level <= 0 || is_internal_frame(f)) { while (--stack_level > 0 && f != NULL) { PyFrameObject *back = PyFrame_GetBack(f); - Py_DECREF(f); - f = back; + Py_SETREF(f, back); } } else { @@ -848,8 +843,7 @@ setup_context(Py_ssize_t stack_level, PyObject **filename, int *lineno, } else { globals = f->f_frame->f_globals; - *filename = f->f_frame->f_code->co_filename; - Py_INCREF(*filename); + *filename = Py_NewRef(f->f_frame->f_code->co_filename); *lineno = PyFrame_GetLineNumber(f); Py_DECREF(f); } @@ -977,6 +971,7 @@ warnings_warn_impl(PyObject *module, PyObject *message, PyObject *category, static PyObject * get_source_line(PyInterpreterState *interp, PyObject *module_globals, int lineno) { + PyObject *external; PyObject *loader; PyObject *module_name; PyObject *get_source; @@ -984,12 +979,18 @@ get_source_line(PyInterpreterState *interp, PyObject *module_globals, int lineno PyObject *source_list; PyObject *source_line; - /* Check/get the requisite pieces needed for the loader. */ - loader = _PyDict_GetItemWithError(module_globals, &_Py_ID(__loader__)); + /* stolen from import.c */ + external = PyObject_GetAttrString(interp->importlib, "_bootstrap_external"); + if (external == NULL) { + return NULL; + } + + loader = PyObject_CallMethod(external, "_bless_my_loader", "O", module_globals, NULL); + Py_DECREF(external); if (loader == NULL) { return NULL; } - Py_INCREF(loader); + module_name = _PyDict_GetItemWithError(module_globals, &_Py_ID(__name__)); if (!module_name) { Py_DECREF(loader); @@ -1079,8 +1080,14 @@ warnings_warn_explicit_impl(PyObject *module, PyObject *message, return returned; } +/*[clinic input] +_filters_mutated as warnings_filters_mutated + +[clinic start generated code]*/ + static PyObject * -warnings_filters_mutated(PyObject *self, PyObject *Py_UNUSED(args)) +warnings_filters_mutated_impl(PyObject *module) +/*[clinic end generated code: output=8ce517abd12b88f4 input=35ecbf08ee2491b2]*/ { PyInterpreterState *interp = get_current_interp(); if (interp == NULL) { @@ -1337,8 +1344,7 @@ _PyErr_WarnUnawaitedCoroutine(PyObject *coro) static PyMethodDef warnings_functions[] = { WARNINGS_WARN_METHODDEF WARNINGS_WARN_EXPLICIT_METHODDEF - {"_filters_mutated", _PyCFunction_CAST(warnings_filters_mutated), METH_NOARGS, - NULL}, + WARNINGS_FILTERS_MUTATED_METHODDEF /* XXX(brett.cannon): add showwarning? */ /* XXX(brett.cannon): Reasonable to add formatwarning? */ {NULL, NULL} /* sentinel */ diff --git a/Python/adaptive.md b/Python/adaptive.md index e8161bcdd5b9c6..d978c089b237e0 100644 --- a/Python/adaptive.md +++ b/Python/adaptive.md @@ -11,7 +11,7 @@ A family of instructions has the following fundamental properties: generated by the bytecode compiler. * It has a single adaptive instruction that records an execution count and, at regular intervals, attempts to specialize itself. If not specializing, - it executes the non-adaptive instruction. + it executes the base implementation. * It has at least one specialized form of the instruction that is tailored for a particular value or set of values at runtime. * All members of the family must have the same number of inline cache entries, @@ -22,19 +22,18 @@ A family of instructions has the following fundamental properties: The current implementation also requires the following, although these are not fundamental and may change: -* All families uses one or more inline cache entries, +* All families use one or more inline cache entries, the first entry is always the counter. -* All instruction names should start with the name of the non-adaptive +* All instruction names should start with the name of the adaptive instruction. -* The adaptive instruction should end in `_ADAPTIVE`. * Specialized forms should have names describing their specialization. ## Example family -The `LOAD_GLOBAL` instruction (in Python/ceval.c) already has an adaptive +The `LOAD_GLOBAL` instruction (in Python/bytecodes.c) already has an adaptive family that serves as a relatively simple example. -The `LOAD_GLOBAL_ADAPTIVE` instruction performs adaptive specialization, +The `LOAD_GLOBAL` instruction performs adaptive specialization, calling `_Py_Specialize_LoadGlobal()` when the counter reaches zero. There are two specialized instructions in the family, `LOAD_GLOBAL_MODULE` @@ -138,5 +137,5 @@ to eliminate the branches. Finally, take care that stats are gather correctly. After the last `DEOPT_IF` has passed, a hit should be recorded with `STAT_INC(BASE_INSTRUCTION, hit)`. -After a optimization has been deferred in the `ADAPTIVE` form, +After an optimization has been deferred in the adaptive instruction, that should be recorded with `STAT_INC(BASE_INSTRUCTION, deferred)`. diff --git a/Objects/asm_trampoline.S b/Python/asm_trampoline.S similarity index 100% rename from Objects/asm_trampoline.S rename to Python/asm_trampoline.S diff --git a/Python/ast_opt.c b/Python/ast_opt.c index 426c5341b56fed..1a0b2a05b1c713 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -533,8 +533,7 @@ make_const_tuple(asdl_expr_seq *elts) for (int i = 0; i < asdl_seq_LEN(elts); i++) { expr_ty e = (expr_ty)asdl_seq_GET(elts, i); PyObject *v = e->v.Constant.value; - Py_INCREF(v); - PyTuple_SET_ITEM(newval, i, v); + PyTuple_SET_ITEM(newval, i, Py_NewRef(v)); } return newval; } diff --git a/Python/ast_unparse.c b/Python/ast_unparse.c index 6565b6b33ebd52..79b2e2f15ba243 100644 --- a/Python/ast_unparse.c +++ b/Python/ast_unparse.c @@ -13,7 +13,7 @@ _Py_DECLARE_STR(open_br, "{"); _Py_DECLARE_STR(dbl_open_br, "{{"); _Py_DECLARE_STR(close_br, "}"); _Py_DECLARE_STR(dbl_close_br, "}}"); -static PyObject *_str_replace_inf; +#define _str_replace_inf _Py_CACHED_OBJECT(str_replace_inf) /* Forward declarations for recursion via helper functions. */ static PyObject * diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 2809b03d4a936a..b3b7e8d6c50530 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -63,8 +63,7 @@ update_bases(PyObject *bases, PyObject *const *args, Py_ssize_t nargs) } for (j = 0; j < i; j++) { base = args[j]; - PyList_SET_ITEM(new_bases, j, base); - Py_INCREF(base); + PyList_SET_ITEM(new_bases, j, Py_NewRef(base)); } } j = PyList_GET_SIZE(new_bases); @@ -169,9 +168,7 @@ builtin___build_class__(PyObject *self, PyObject *const *args, Py_ssize_t nargs, goto error; } if (winner != meta) { - Py_DECREF(meta); - meta = winner; - Py_INCREF(meta); + Py_SETREF(meta, Py_NewRef(winner)); } } /* else: meta is not a class, so we cannot do the metaclass @@ -221,8 +218,7 @@ builtin___build_class__(PyObject *self, PyObject *const *args, Py_ssize_t nargs, "__class__ set to %.200R defining %.200R as %.200R"; PyErr_Format(PyExc_TypeError, msg, cell_cls, name, cls); } - Py_DECREF(cls); - cls = NULL; + Py_SETREF(cls, NULL); goto error; } } @@ -804,8 +800,7 @@ builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename, goto error; if (is_ast) { if (flags & PyCF_ONLY_AST) { - Py_INCREF(source); - result = source; + result = Py_NewRef(source); } else { PyArena *arena; @@ -1128,8 +1123,7 @@ builtin_getattr(PyObject *self, PyObject *const *args, Py_ssize_t nargs) if (nargs > 2) { if (_PyObject_LookupAttr(v, name, &result) == 0) { PyObject *dflt = args[2]; - Py_INCREF(dflt); - return dflt; + return Py_NewRef(dflt); } } else { @@ -1162,8 +1156,7 @@ builtin_globals_impl(PyObject *module) PyObject *d; d = PyEval_GetGlobals(); - Py_XINCREF(d); - return d; + return Py_XNewRef(d); } @@ -1390,12 +1383,10 @@ map_reduce(mapobject *lz, PyObject *Py_UNUSED(ignored)) Py_ssize_t i; if (args == NULL) return NULL; - Py_INCREF(lz->func); - PyTuple_SET_ITEM(args, 0, lz->func); + PyTuple_SET_ITEM(args, 0, Py_NewRef(lz->func)); for (i = 0; iiters, i); - Py_INCREF(it); - PyTuple_SET_ITEM(args, i+1, it); + PyTuple_SET_ITEM(args, i+1, Py_NewRef(it)); } return Py_BuildValue("ON", Py_TYPE(lz), args); @@ -1486,8 +1477,7 @@ builtin_next(PyObject *self, PyObject *const *args, Py_ssize_t nargs) return NULL; PyErr_Clear(); } - Py_INCREF(def); - return def; + return Py_NewRef(def); } else if (PyErr_Occurred()) { return NULL; } else { @@ -1723,8 +1713,7 @@ builtin_locals_impl(PyObject *module) PyObject *d; d = PyEval_GetLocals(); - Py_XINCREF(d); - return d; + return Py_XNewRef(d); } @@ -1785,8 +1774,7 @@ min_max(PyObject *args, PyObject *kwds, int op) } /* no key function; the value is the item */ else { - val = item; - Py_INCREF(val); + val = Py_NewRef(item); } /* maximum value and item are unset; set them */ @@ -1816,8 +1804,7 @@ min_max(PyObject *args, PyObject *kwds, int op) if (maxval == NULL) { assert(maxitem == NULL); if (defaultval != NULL) { - Py_INCREF(defaultval); - maxitem = defaultval; + maxitem = Py_NewRef(defaultval); } else { PyErr_Format(PyExc_ValueError, "%s() arg is an empty sequence", name); @@ -2413,8 +2400,7 @@ builtin_vars(PyObject *self, PyObject *args) if (!PyArg_UnpackTuple(args, "vars", 0, 1, &v)) return NULL; if (v == NULL) { - d = PyEval_GetLocals(); - Py_XINCREF(d); + d = Py_XNewRef(PyEval_GetLocals()); } else { if (_PyObject_LookupAttr(v, &_Py_ID(__dict__), &d) == 0) { @@ -2496,8 +2482,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) long i_result = PyLong_AsLongAndOverflow(result, &overflow); /* If this already overflowed, don't even enter the loop. */ if (overflow == 0) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); } while(result == NULL) { item = PyIter_Next(iter); @@ -2547,8 +2532,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) if (PyFloat_CheckExact(result)) { double f_result = PyFloat_AS_DOUBLE(result); - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); while(result == NULL) { item = PyIter_Next(iter); if (item == NULL) { @@ -2595,8 +2579,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) if (item == NULL) { /* error, or end-of-sequence */ if (PyErr_Occurred()) { - Py_DECREF(result); - result = NULL; + Py_SETREF(result, NULL); } break; } @@ -2737,8 +2720,7 @@ zip_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } for (i=0 ; i < tuplesize ; i++) { - Py_INCREF(Py_None); - PyTuple_SET_ITEM(result, i, Py_None); + PyTuple_SET_ITEM(result, i, Py_NewRef(Py_None)); } /* create zipobject structure */ diff --git a/Python/bootstrap_hash.c b/Python/bootstrap_hash.c index 3a2a7318086f77..587063ef1ab29a 100644 --- a/Python/bootstrap_hash.c +++ b/Python/bootstrap_hash.c @@ -1,6 +1,7 @@ #include "Python.h" #include "pycore_initconfig.h" #include "pycore_fileutils.h" // _Py_fstat_noraise() +#include "pycore_runtime.h" // _PyRuntime #ifdef MS_WINDOWS # include @@ -263,11 +264,7 @@ py_getentropy(char *buffer, Py_ssize_t size, int raise) #endif /* defined(HAVE_GETENTROPY) && !(defined(__sun) && defined(__SVR4)) */ -static struct { - int fd; - dev_t st_dev; - ino_t st_ino; -} urandom_cache = { -1 }; +#define urandom_cache (_PyRuntime.pyhash_state.urandom_cache) /* Read random bytes from the /dev/urandom device: @@ -402,6 +399,9 @@ dev_urandom_close(void) urandom_cache.fd = -1; } } + +#undef urandom_cache + #endif /* !MS_WINDOWS */ diff --git a/Python/bytecodes.c b/Python/bytecodes.c new file mode 100644 index 00000000000000..a1f910da8ed54a --- /dev/null +++ b/Python/bytecodes.c @@ -0,0 +1,3727 @@ +// This file contains instruction definitions. +// It is read by Tools/cases_generator/generate_cases.py +// to generate Python/generated_cases.c.h. +// Note that there is some dummy C code at the top and bottom of the file +// to fool text editors like VS Code into believing this is valid C code. +// The actual instruction definitions start at // BEGIN BYTECODES //. +// See Tools/cases_generator/README.md for more information. + +#include "Python.h" +#include "pycore_abstract.h" // _PyIndex_Check() +#include "pycore_call.h" // _PyObject_FastCallDictTstate() +#include "pycore_ceval.h" // _PyEval_SignalAsyncExc() +#include "pycore_code.h" +#include "pycore_function.h" +#include "pycore_long.h" // _PyLong_GetZero() +#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_moduleobject.h" // PyModuleObject +#include "pycore_opcode.h" // EXTRA_CASES +#include "pycore_pyerrors.h" // _PyErr_Fetch() +#include "pycore_pymem.h" // _PyMem_IsPtrFreed() +#include "pycore_pystate.h" // _PyInterpreterState_GET() +#include "pycore_range.h" // _PyRangeIterObject +#include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs +#include "pycore_sysmodule.h" // _PySys_Audit() +#include "pycore_tuple.h" // _PyTuple_ITEMS() +#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS + +#include "pycore_dict.h" +#include "dictobject.h" +#include "pycore_frame.h" +#include "opcode.h" +#include "pydtrace.h" +#include "setobject.h" +#include "structmember.h" // struct PyMemberDef, T_OFFSET_EX + +void _PyFloat_ExactDealloc(PyObject *); +void _PyUnicode_ExactDealloc(PyObject *); + +/* Stack effect macros + * These will be mostly replaced by stack effect descriptions, + * but the tooling need to recognize them. + */ +#define SET_TOP(v) (stack_pointer[-1] = (v)) +#define SET_SECOND(v) (stack_pointer[-2] = (v)) +#define PEEK(n) (stack_pointer[-(n)]) +#define POKE(n, v) (stack_pointer[-(n)] = (v)) +#define PUSH(val) (*(stack_pointer++) = (val)) +#define POP() (*(--stack_pointer)) +#define TOP() PEEK(1) +#define SECOND() PEEK(2) +#define STACK_GROW(n) (stack_pointer += (n)) +#define STACK_SHRINK(n) (stack_pointer -= (n)) +#define EMPTY() 1 +#define STACK_LEVEL() 2 + +/* Local variable macros */ +#define GETLOCAL(i) (frame->localsplus[i]) +#define SETLOCAL(i, val) \ +do { \ + PyObject *_tmp = frame->localsplus[i]; \ + frame->localsplus[i] = (val); \ + Py_XDECREF(_tmp); \ +} while (0) + +/* Flow control macros */ +#define DEOPT_IF(cond, instname) ((void)0) +#define ERROR_IF(cond, labelname) ((void)0) +#define JUMPBY(offset) ((void)0) +#define GO_TO_INSTRUCTION(instname) ((void)0) +#define DISPATCH_SAME_OPARG() ((void)0) + +#define inst(name, ...) case name: +#define op(name, ...) /* NAME is ignored */ +#define macro(name) static int MACRO_##name +#define super(name) static int SUPER_##name +#define family(name, ...) static int family_##name + +#define NAME_ERROR_MSG \ + "name '%.200s' is not defined" + +// Dummy variables for stack effects. +static PyObject *value, *value1, *value2, *left, *right, *res, *sum, *prod, *sub; +static PyObject *container, *start, *stop, *v, *lhs, *rhs; +static PyObject *list, *tuple, *dict; +static PyObject *exit_func, *lasti, *val; + +static PyObject * +dummy_func( + PyThreadState *tstate, + _PyInterpreterFrame *frame, + unsigned char opcode, + unsigned int oparg, + _Py_atomic_int * const eval_breaker, + _PyCFrame cframe, + PyObject *names, + PyObject *consts, + _Py_CODEUNIT *next_instr, + PyObject **stack_pointer, + PyObject *kwnames, + int throwflag, + binaryfunc binary_ops[] +) +{ + _PyInterpreterFrame entry_frame; + + switch (opcode) { + +// BEGIN BYTECODES // + inst(NOP, (--)) { + } + + inst(RESUME, (--)) { + assert(tstate->cframe == &cframe); + assert(frame == cframe.current_frame); + if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { + goto handle_eval_breaker; + } + } + + inst(LOAD_CLOSURE, (-- value)) { + /* We keep LOAD_CLOSURE so that the bytecode stays more readable. */ + value = GETLOCAL(oparg); + ERROR_IF(value == NULL, unbound_local_error); + Py_INCREF(value); + } + + inst(LOAD_FAST_CHECK, (-- value)) { + value = GETLOCAL(oparg); + ERROR_IF(value == NULL, unbound_local_error); + Py_INCREF(value); + } + + inst(LOAD_FAST, (-- value)) { + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + } + + inst(LOAD_CONST, (-- value)) { + value = GETITEM(consts, oparg); + Py_INCREF(value); + } + + inst(STORE_FAST, (value --)) { + SETLOCAL(oparg, value); + } + + super(LOAD_FAST__LOAD_FAST) = LOAD_FAST + LOAD_FAST; + super(LOAD_FAST__LOAD_CONST) = LOAD_FAST + LOAD_CONST; + super(STORE_FAST__LOAD_FAST) = STORE_FAST + LOAD_FAST; + super(STORE_FAST__STORE_FAST) = STORE_FAST + STORE_FAST; + super(LOAD_CONST__LOAD_FAST) = LOAD_CONST + LOAD_FAST; + + inst(POP_TOP, (value --)) { + Py_DECREF(value); + } + + inst(PUSH_NULL, (-- res)) { + res = NULL; + } + + macro(END_FOR) = POP_TOP + POP_TOP; + + inst(UNARY_POSITIVE, (value -- res)) { + res = PyNumber_Positive(value); + Py_DECREF(value); + ERROR_IF(res == NULL, error); + } + + inst(UNARY_NEGATIVE, (value -- res)) { + res = PyNumber_Negative(value); + Py_DECREF(value); + ERROR_IF(res == NULL, error); + } + + inst(UNARY_NOT, (value -- res)) { + int err = PyObject_IsTrue(value); + Py_DECREF(value); + ERROR_IF(err < 0, error); + if (err == 0) { + res = Py_True; + } + else { + res = Py_False; + } + Py_INCREF(res); + } + + inst(UNARY_INVERT, (value -- res)) { + res = PyNumber_Invert(value); + Py_DECREF(value); + ERROR_IF(res == NULL, error); + } + + family(binary_op, INLINE_CACHE_ENTRIES_BINARY_OP) = { + BINARY_OP, + BINARY_OP_ADD_FLOAT, + BINARY_OP_ADD_INT, + BINARY_OP_ADD_UNICODE, + // BINARY_OP_INPLACE_ADD_UNICODE, // This is an odd duck. + BINARY_OP_MULTIPLY_FLOAT, + BINARY_OP_MULTIPLY_INT, + BINARY_OP_SUBTRACT_FLOAT, + BINARY_OP_SUBTRACT_INT, + }; + + + inst(BINARY_OP_MULTIPLY_INT, (left, right, unused/1 -- prod)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + prod = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + ERROR_IF(prod == NULL, error); + } + + inst(BINARY_OP_MULTIPLY_FLOAT, (left, right, unused/1 -- prod)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dprod = ((PyFloatObject *)left)->ob_fval * + ((PyFloatObject *)right)->ob_fval; + prod = PyFloat_FromDouble(dprod); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + ERROR_IF(prod == NULL, error); + } + + inst(BINARY_OP_SUBTRACT_INT, (left, right, unused/1 -- sub)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + sub = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + ERROR_IF(sub == NULL, error); + } + + inst(BINARY_OP_SUBTRACT_FLOAT, (left, right, unused/1 -- sub)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dsub = ((PyFloatObject *)left)->ob_fval - ((PyFloatObject *)right)->ob_fval; + sub = PyFloat_FromDouble(dsub); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + ERROR_IF(sub == NULL, error); + } + + inst(BINARY_OP_ADD_UNICODE, (left, right, unused/1 -- res)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + res = PyUnicode_Concat(left, right); + _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + ERROR_IF(res == NULL, error); + } + + // This is a subtle one. It's a super-instruction for + // BINARY_OP_ADD_UNICODE followed by STORE_FAST + // where the store goes into the left argument. + // So the inputs are the same as for all BINARY_OP + // specializations, but there is no output. + // At the end we just skip over the STORE_FAST. + inst(BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; + assert(_Py_OPCODE(true_next) == STORE_FAST || + _Py_OPCODE(true_next) == STORE_FAST__LOAD_FAST); + PyObject **target_local = &GETLOCAL(_Py_OPARG(true_next)); + DEOPT_IF(*target_local != left, BINARY_OP); + STAT_INC(BINARY_OP, hit); + /* Handle `left = left + right` or `left += right` for str. + * + * When possible, extend `left` in place rather than + * allocating a new PyUnicodeObject. This attempts to avoid + * quadratic behavior when one neglects to use str.join(). + * + * If `left` has only two references remaining (one from + * the stack, one in the locals), DECREFing `left` leaves + * only the locals reference, so PyUnicode_Append knows + * that the string is safe to mutate. + */ + assert(Py_REFCNT(left) >= 2); + _Py_DECREF_NO_DEALLOC(left); + PyUnicode_Append(target_local, right); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + ERROR_IF(*target_local == NULL, error); + // The STORE_FAST is already done. + JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1); + } + + inst(BINARY_OP_ADD_FLOAT, (left, right, unused/1 -- sum)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dsum = ((PyFloatObject *)left)->ob_fval + + ((PyFloatObject *)right)->ob_fval; + sum = PyFloat_FromDouble(dsum); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + ERROR_IF(sum == NULL, error); + } + + inst(BINARY_OP_ADD_INT, (left, right, unused/1 -- sum)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + sum = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + ERROR_IF(sum == NULL, error); + } + + family(binary_subscr, INLINE_CACHE_ENTRIES_BINARY_SUBSCR) = { + BINARY_SUBSCR, + BINARY_SUBSCR_DICT, + BINARY_SUBSCR_GETITEM, + BINARY_SUBSCR_LIST_INT, + BINARY_SUBSCR_TUPLE_INT, + }; + + inst(BINARY_SUBSCR, (container, sub, unused/4 -- res)) { + _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr--; + _Py_Specialize_BinarySubscr(container, sub, next_instr); + DISPATCH_SAME_OPARG(); + } + STAT_INC(BINARY_SUBSCR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + res = PyObject_GetItem(container, sub); + Py_DECREF(container); + Py_DECREF(sub); + ERROR_IF(res == NULL, error); + } + + inst(BINARY_SLICE, (container, start, stop -- res)) { + PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); + // Can't use ERROR_IF() here, because we haven't + // DECREF'ed container yet, and we still own slice. + if (slice == NULL) { + res = NULL; + } + else { + res = PyObject_GetItem(container, slice); + Py_DECREF(slice); + } + Py_DECREF(container); + ERROR_IF(res == NULL, error); + } + + inst(STORE_SLICE, (v, container, start, stop -- )) { + PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); + int err; + if (slice == NULL) { + err = 1; + } + else { + err = PyObject_SetItem(container, slice, v); + Py_DECREF(slice); + } + Py_DECREF(v); + Py_DECREF(container); + ERROR_IF(err, error); + } + + inst(BINARY_SUBSCR_LIST_INT, (list, sub, unused/4 -- res)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); + + // Deopt unless 0 <= sub < PyList_Size(list) + Py_ssize_t signed_magnitude = Py_SIZE(sub); + DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); + assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + DEOPT_IF(index >= PyList_GET_SIZE(list), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyList_GET_ITEM(list, index); + assert(res != NULL); + Py_INCREF(res); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(list); + } + + inst(BINARY_SUBSCR_TUPLE_INT, (tuple, sub, unused/4 -- res)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); + + // Deopt unless 0 <= sub < PyTuple_Size(list) + Py_ssize_t signed_magnitude = Py_SIZE(sub); + DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); + assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + DEOPT_IF(index >= PyTuple_GET_SIZE(tuple), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyTuple_GET_ITEM(tuple, index); + assert(res != NULL); + Py_INCREF(res); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(tuple); + } + + inst(BINARY_SUBSCR_DICT, (dict, sub, unused/4 -- res)) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyDict_GetItemWithError(dict, sub); + if (res == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetKeyError(sub); + } + Py_DECREF(dict); + Py_DECREF(sub); + ERROR_IF(1, error); + } + Py_INCREF(res); // Do this before DECREF'ing dict, sub + Py_DECREF(dict); + Py_DECREF(sub); + } + + inst(BINARY_SUBSCR_GETITEM, (container, sub, unused/1, type_version/2, func_version/1 -- unused)) { + PyTypeObject *tp = Py_TYPE(container); + DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR); + assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE); + PyObject *cached = ((PyHeapTypeObject *)tp)->_spec_cache.getitem; + assert(PyFunction_Check(cached)); + PyFunctionObject *getitem = (PyFunctionObject *)cached; + DEOPT_IF(getitem->func_version != func_version, BINARY_SUBSCR); + PyCodeObject *code = (PyCodeObject *)getitem->func_code; + assert(code->co_argcount == 2); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + Py_INCREF(getitem); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem); + STACK_SHRINK(2); + new_frame->localsplus[0] = container; + new_frame->localsplus[1] = sub; + for (int i = 2; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); + DISPATCH_INLINED(new_frame); + } + + // stack effect: (__0 -- ) + inst(LIST_APPEND) { + PyObject *v = POP(); + PyObject *list = PEEK(oparg); + if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) + goto error; + PREDICT(JUMP_BACKWARD); + } + + // stack effect: (__0 -- ) + inst(SET_ADD) { + PyObject *v = POP(); + PyObject *set = PEEK(oparg); + int err; + err = PySet_Add(set, v); + Py_DECREF(v); + if (err != 0) + goto error; + PREDICT(JUMP_BACKWARD); + } + + inst(STORE_SUBSCR, (v, container, sub -- )) { + _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr--; + _Py_Specialize_StoreSubscr(container, sub, next_instr); + DISPATCH_SAME_OPARG(); + } + STAT_INC(STORE_SUBSCR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + /* container[sub] = v */ + int err = PyObject_SetItem(container, sub, v); + Py_DECREF(v); + Py_DECREF(container); + Py_DECREF(sub); + ERROR_IF(err != 0, error); + JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); + } + + // stack effect: (__0, __1, __2 -- ) + inst(STORE_SUBSCR_LIST_INT) { + assert(cframe.use_tracing == 0); + PyObject *sub = TOP(); + PyObject *list = SECOND(); + PyObject *value = THIRD(); + DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); + DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); + + // Ensure nonnegative, zero-or-one-digit ints. + DEOPT_IF(((size_t)Py_SIZE(sub)) > 1, STORE_SUBSCR); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + // Ensure index < len(list) + DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); + STAT_INC(STORE_SUBSCR, hit); + + PyObject *old_value = PyList_GET_ITEM(list, index); + PyList_SET_ITEM(list, index, value); + STACK_SHRINK(3); + assert(old_value != NULL); + Py_DECREF(old_value); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(list); + JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); + } + + // stack effect: (__0, __1, __2 -- ) + inst(STORE_SUBSCR_DICT) { + assert(cframe.use_tracing == 0); + PyObject *sub = TOP(); + PyObject *dict = SECOND(); + PyObject *value = THIRD(); + DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); + STACK_SHRINK(3); + STAT_INC(STORE_SUBSCR, hit); + int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); + Py_DECREF(dict); + if (err != 0) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); + } + + // stack effect: (__0, __1 -- ) + inst(DELETE_SUBSCR) { + PyObject *sub = TOP(); + PyObject *container = SECOND(); + int err; + STACK_SHRINK(2); + /* del container[sub] */ + err = PyObject_DelItem(container, sub); + Py_DECREF(container); + Py_DECREF(sub); + if (err != 0) + goto error; + } + + // stack effect: (__0 -- ) + inst(PRINT_EXPR) { + PyObject *value = POP(); + PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook)); + PyObject *res; + if (hook == NULL) { + _PyErr_SetString(tstate, PyExc_RuntimeError, + "lost sys.displayhook"); + Py_DECREF(value); + goto error; + } + res = PyObject_CallOneArg(hook, value); + Py_DECREF(value); + if (res == NULL) + goto error; + Py_DECREF(res); + } + + // stack effect: (__array[oparg] -- ) + inst(RAISE_VARARGS) { + PyObject *cause = NULL, *exc = NULL; + switch (oparg) { + case 2: + cause = POP(); /* cause */ + /* fall through */ + case 1: + exc = POP(); /* exc */ + /* fall through */ + case 0: + if (do_raise(tstate, exc, cause)) { + goto exception_unwind; + } + break; + default: + _PyErr_SetString(tstate, PyExc_SystemError, + "bad RAISE_VARARGS oparg"); + break; + } + goto error; + } + + // stack effect: (__0 -- ) + inst(INTERPRETER_EXIT) { + assert(frame == &entry_frame); + assert(_PyFrame_IsIncomplete(frame)); + PyObject *retval = POP(); + assert(EMPTY()); + /* Restore previous cframe and return. */ + tstate->cframe = cframe.previous; + tstate->cframe->use_tracing = cframe.use_tracing; + assert(tstate->cframe->current_frame == frame->previous); + assert(!_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallTstate(tstate); + return retval; + } + + // stack effect: (__0 -- ) + inst(RETURN_VALUE) { + PyObject *retval = POP(); + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + TRACE_FUNCTION_EXIT(); + DTRACE_FUNCTION_EXIT(); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + frame = cframe.current_frame = pop_frame(tstate, frame); + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + // stack effect: ( -- ) + inst(GET_AITER) { + unaryfunc getter = NULL; + PyObject *iter = NULL; + PyObject *obj = TOP(); + PyTypeObject *type = Py_TYPE(obj); + + if (type->tp_as_async != NULL) { + getter = type->tp_as_async->am_aiter; + } + + if (getter != NULL) { + iter = (*getter)(obj); + Py_DECREF(obj); + if (iter == NULL) { + SET_TOP(NULL); + goto error; + } + } + else { + SET_TOP(NULL); + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' requires an object with " + "__aiter__ method, got %.100s", + type->tp_name); + Py_DECREF(obj); + goto error; + } + + if (Py_TYPE(iter)->tp_as_async == NULL || + Py_TYPE(iter)->tp_as_async->am_anext == NULL) { + + SET_TOP(NULL); + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' received an object from __aiter__ " + "that does not implement __anext__: %.100s", + Py_TYPE(iter)->tp_name); + Py_DECREF(iter); + goto error; + } + + SET_TOP(iter); + } + + // stack effect: ( -- __0) + inst(GET_ANEXT) { + unaryfunc getter = NULL; + PyObject *next_iter = NULL; + PyObject *awaitable = NULL; + PyObject *aiter = TOP(); + PyTypeObject *type = Py_TYPE(aiter); + + if (PyAsyncGen_CheckExact(aiter)) { + awaitable = type->tp_as_async->am_anext(aiter); + if (awaitable == NULL) { + goto error; + } + } else { + if (type->tp_as_async != NULL){ + getter = type->tp_as_async->am_anext; + } + + if (getter != NULL) { + next_iter = (*getter)(aiter); + if (next_iter == NULL) { + goto error; + } + } + else { + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' requires an iterator with " + "__anext__ method, got %.100s", + type->tp_name); + goto error; + } + + awaitable = _PyCoro_GetAwaitableIter(next_iter); + if (awaitable == NULL) { + _PyErr_FormatFromCause( + PyExc_TypeError, + "'async for' received an invalid object " + "from __anext__: %.100s", + Py_TYPE(next_iter)->tp_name); + + Py_DECREF(next_iter); + goto error; + } else { + Py_DECREF(next_iter); + } + } + + PUSH(awaitable); + PREDICT(LOAD_CONST); + } + + // stack effect: ( -- ) + inst(GET_AWAITABLE) { + PyObject *iterable = TOP(); + PyObject *iter = _PyCoro_GetAwaitableIter(iterable); + + if (iter == NULL) { + format_awaitable_error(tstate, Py_TYPE(iterable), oparg); + } + + Py_DECREF(iterable); + + if (iter != NULL && PyCoro_CheckExact(iter)) { + PyObject *yf = _PyGen_yf((PyGenObject*)iter); + if (yf != NULL) { + /* `iter` is a coroutine object that is being + awaited, `yf` is a pointer to the current awaitable + being awaited on. */ + Py_DECREF(yf); + Py_CLEAR(iter); + _PyErr_SetString(tstate, PyExc_RuntimeError, + "coroutine is being awaited already"); + /* The code below jumps to `error` if `iter` is NULL. */ + } + } + + SET_TOP(iter); /* Even if it's NULL */ + + if (iter == NULL) { + goto error; + } + + PREDICT(LOAD_CONST); + } + + // error: SEND stack effect depends on jump flag + inst(SEND) { + assert(frame != &entry_frame); + assert(STACK_LEVEL() >= 2); + PyObject *v = POP(); + PyObject *receiver = TOP(); + PySendResult gen_status; + PyObject *retval; + if (tstate->c_tracefunc == NULL) { + gen_status = PyIter_Send(receiver, v, &retval); + } else { + if (Py_IsNone(v) && PyIter_Check(receiver)) { + retval = Py_TYPE(receiver)->tp_iternext(receiver); + } + else { + retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v); + } + if (retval == NULL) { + if (tstate->c_tracefunc != NULL + && _PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) + call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + if (_PyGen_FetchStopIterationValue(&retval) == 0) { + gen_status = PYGEN_RETURN; + } + else { + gen_status = PYGEN_ERROR; + } + } + else { + gen_status = PYGEN_NEXT; + } + } + Py_DECREF(v); + if (gen_status == PYGEN_ERROR) { + assert(retval == NULL); + goto error; + } + if (gen_status == PYGEN_RETURN) { + assert(retval != NULL); + Py_DECREF(receiver); + SET_TOP(retval); + JUMPBY(oparg); + } + else { + assert(gen_status == PYGEN_NEXT); + assert(retval != NULL); + PUSH(retval); + } + } + + // stack effect: ( -- ) + inst(ASYNC_GEN_WRAP) { + PyObject *v = TOP(); + assert(frame->f_code->co_flags & CO_ASYNC_GENERATOR); + PyObject *w = _PyAsyncGenValueWrapperNew(v); + if (w == NULL) { + goto error; + } + SET_TOP(w); + Py_DECREF(v); + } + + // stack effect: ( -- ) + inst(YIELD_VALUE) { + // NOTE: It's important that YIELD_VALUE never raises an exception! + // The compiler treats any exception raised here as a failed close() + // or throw() call. + assert(oparg == STACK_LEVEL()); + assert(frame != &entry_frame); + PyObject *retval = POP(); + PyGenObject *gen = _PyFrame_GetGenerator(frame); + gen->gi_frame_state = FRAME_SUSPENDED; + _PyFrame_SetStackPointer(frame, stack_pointer); + TRACE_FUNCTION_EXIT(); + DTRACE_FUNCTION_EXIT(); + tstate->exc_info = gen->gi_exc_state.previous_item; + gen->gi_exc_state.previous_item = NULL; + _Py_LeaveRecursiveCallPy(tstate); + _PyInterpreterFrame *gen_frame = frame; + frame = cframe.current_frame = frame->previous; + gen_frame->previous = NULL; + frame->prev_instr -= frame->yield_offset; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + // stack effect: (__0 -- ) + inst(POP_EXCEPT) { + _PyErr_StackItem *exc_info = tstate->exc_info; + PyObject *value = exc_info->exc_value; + exc_info->exc_value = POP(); + Py_XDECREF(value); + } + + // stack effect: (__0 -- ) + inst(RERAISE) { + if (oparg) { + PyObject *lasti = PEEK(oparg + 1); + if (PyLong_Check(lasti)) { + frame->prev_instr = _PyCode_CODE(frame->f_code) + PyLong_AsLong(lasti); + assert(!_PyErr_Occurred(tstate)); + } + else { + assert(PyLong_Check(lasti)); + _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); + goto error; + } + } + PyObject *val = POP(); + assert(val && PyExceptionInstance_Check(val)); + PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); + PyObject *tb = PyException_GetTraceback(val); + _PyErr_Restore(tstate, exc, val, tb); + goto exception_unwind; + } + + // stack effect: (__0 -- ) + inst(PREP_RERAISE_STAR) { + PyObject *excs = POP(); + assert(PyList_Check(excs)); + PyObject *orig = POP(); + + PyObject *val = _PyExc_PrepReraiseStar(orig, excs); + Py_DECREF(excs); + Py_DECREF(orig); + + if (val == NULL) { + goto error; + } + + PUSH(val); + } + + // stack effect: (__0, __1 -- ) + inst(END_ASYNC_FOR) { + PyObject *val = POP(); + assert(val && PyExceptionInstance_Check(val)); + if (PyErr_GivenExceptionMatches(val, PyExc_StopAsyncIteration)) { + Py_DECREF(val); + Py_DECREF(POP()); + } + else { + PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); + PyObject *tb = PyException_GetTraceback(val); + _PyErr_Restore(tstate, exc, val, tb); + goto exception_unwind; + } + } + + // stack effect: (__0, __1 -- ) + inst(CLEANUP_THROW) { + assert(throwflag); + PyObject *exc_value = TOP(); + assert(exc_value && PyExceptionInstance_Check(exc_value)); + if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) { + PyObject *value = ((PyStopIterationObject *)exc_value)->value; + Py_INCREF(value); + Py_DECREF(POP()); // The StopIteration. + Py_DECREF(POP()); // The last sent value. + Py_DECREF(POP()); // The delegated sub-iterator. + PUSH(value); + } + else { + PyObject *exc_type = Py_NewRef(Py_TYPE(exc_value)); + PyObject *exc_traceback = PyException_GetTraceback(exc_value); + _PyErr_Restore(tstate, exc_type, Py_NewRef(exc_value), exc_traceback); + goto exception_unwind; + } + } + + inst(STOPITERATION_ERROR) { + assert(frame->owner == FRAME_OWNED_BY_GENERATOR); + PyObject *exc = TOP(); + assert(PyExceptionInstance_Check(exc)); + const char *msg = NULL; + if (PyErr_GivenExceptionMatches(exc, PyExc_StopIteration)) { + msg = "generator raised StopIteration"; + if (frame->f_code->co_flags & CO_ASYNC_GENERATOR) { + msg = "async generator raised StopIteration"; + } + else if (frame->f_code->co_flags & CO_COROUTINE) { + msg = "coroutine raised StopIteration"; + } + } + else if ((frame->f_code->co_flags & CO_ASYNC_GENERATOR) && + PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) + { + /* code in `gen` raised a StopAsyncIteration error: + raise a RuntimeError. + */ + msg = "async generator raised StopAsyncIteration"; + } + if (msg != NULL) { + PyObject *message = _PyUnicode_FromASCII(msg, strlen(msg)); + if (message == NULL) { + goto error; + } + PyObject *error = PyObject_CallOneArg(PyExc_RuntimeError, message); + if (error == NULL) { + Py_DECREF(message); + goto error; + } + assert(PyExceptionInstance_Check(error)); + SET_TOP(error); + PyException_SetCause(error, Py_NewRef(exc)); + // Steal exc reference, rather than Py_NewRef+Py_DECREF + PyException_SetContext(error, exc); + Py_DECREF(message); + } + } + + + // stack effect: ( -- __0) + inst(LOAD_ASSERTION_ERROR) { + PyObject *value = PyExc_AssertionError; + PUSH(Py_NewRef(value)); + } + + // stack effect: ( -- __0) + inst(LOAD_BUILD_CLASS) { + PyObject *bc; + if (PyDict_CheckExact(BUILTINS())) { + bc = _PyDict_GetItemWithError(BUILTINS(), + &_Py_ID(__build_class__)); + if (bc == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_NameError, + "__build_class__ not found"); + } + goto error; + } + Py_INCREF(bc); + } + else { + bc = PyObject_GetItem(BUILTINS(), &_Py_ID(__build_class__)); + if (bc == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) + _PyErr_SetString(tstate, PyExc_NameError, + "__build_class__ not found"); + goto error; + } + } + PUSH(bc); + } + + // stack effect: (__0 -- ) + inst(STORE_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *v = POP(); + PyObject *ns = LOCALS(); + int err; + if (ns == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals found when storing %R", name); + Py_DECREF(v); + goto error; + } + if (PyDict_CheckExact(ns)) + err = PyDict_SetItem(ns, name, v); + else + err = PyObject_SetItem(ns, name, v); + Py_DECREF(v); + if (err != 0) + goto error; + } + + inst(DELETE_NAME, (--)) { + PyObject *name = GETITEM(names, oparg); + PyObject *ns = LOCALS(); + int err; + if (ns == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals when deleting %R", name); + goto error; + } + err = PyObject_DelItem(ns, name); + // Can't use ERROR_IF here. + if (err != 0) { + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, + name); + goto error; + } + } + + // stack effect: (__0 -- __array[oparg]) + inst(UNPACK_SEQUENCE) { + _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *seq = TOP(); + next_instr--; + _Py_Specialize_UnpackSequence(seq, next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(UNPACK_SEQUENCE, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *seq = POP(); + PyObject **top = stack_pointer + oparg; + if (!unpack_iterable(tstate, seq, oparg, -1, top)) { + Py_DECREF(seq); + goto error; + } + STACK_GROW(oparg); + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + } + + // stack effect: (__0 -- __array[oparg]) + inst(UNPACK_SEQUENCE_TWO_TUPLE) { + PyObject *seq = TOP(); + DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + SET_TOP(Py_NewRef(PyTuple_GET_ITEM(seq, 1))); + PUSH(Py_NewRef(PyTuple_GET_ITEM(seq, 0))); + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + } + + // stack effect: (__0 -- __array[oparg]) + inst(UNPACK_SEQUENCE_TUPLE) { + PyObject *seq = TOP(); + DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + STACK_SHRINK(1); + PyObject **items = _PyTuple_ITEMS(seq); + while (oparg--) { + PUSH(Py_NewRef(items[oparg])); + } + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + } + + // stack effect: (__0 -- __array[oparg]) + inst(UNPACK_SEQUENCE_LIST) { + PyObject *seq = TOP(); + DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + STACK_SHRINK(1); + PyObject **items = _PyList_ITEMS(seq); + while (oparg--) { + PUSH(Py_NewRef(items[oparg])); + } + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + } + + // error: UNPACK_EX has irregular stack effect + inst(UNPACK_EX) { + int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); + PyObject *seq = POP(); + PyObject **top = stack_pointer + totalargs; + if (!unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top)) { + Py_DECREF(seq); + goto error; + } + STACK_GROW(totalargs); + Py_DECREF(seq); + } + + // stack effect: (__0, __1 -- ) + inst(STORE_ATTR) { + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *name = GETITEM(names, oparg); + next_instr--; + _Py_Specialize_StoreAttr(owner, next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(STORE_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *name = GETITEM(names, oparg); + PyObject *owner = TOP(); + PyObject *v = SECOND(); + int err; + STACK_SHRINK(2); + err = PyObject_SetAttr(owner, name, v); + Py_DECREF(v); + Py_DECREF(owner); + if (err != 0) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); + } + + // stack effect: (__0 -- ) + inst(DELETE_ATTR) { + PyObject *name = GETITEM(names, oparg); + PyObject *owner = POP(); + int err; + err = PyObject_SetAttr(owner, name, (PyObject *)NULL); + Py_DECREF(owner); + if (err != 0) + goto error; + } + + // stack effect: (__0 -- ) + inst(STORE_GLOBAL) { + PyObject *name = GETITEM(names, oparg); + PyObject *v = POP(); + int err; + err = PyDict_SetItem(GLOBALS(), name, v); + Py_DECREF(v); + if (err != 0) + goto error; + } + + inst(DELETE_GLOBAL, (--)) { + PyObject *name = GETITEM(names, oparg); + int err; + err = PyDict_DelItem(GLOBALS(), name); + // Can't use ERROR_IF here. + if (err != 0) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + + // stack effect: ( -- __0) + inst(LOAD_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *locals = LOCALS(); + PyObject *v; + if (locals == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals when loading %R", name); + goto error; + } + if (PyDict_CheckExact(locals)) { + v = PyDict_GetItemWithError(locals, name); + if (v != NULL) { + Py_INCREF(v); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + } + else { + v = PyObject_GetItem(locals, name); + if (v == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) + goto error; + _PyErr_Clear(tstate); + } + } + if (v == NULL) { + v = PyDict_GetItemWithError(GLOBALS(), name); + if (v != NULL) { + Py_INCREF(v); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + else { + if (PyDict_CheckExact(BUILTINS())) { + v = PyDict_GetItemWithError(BUILTINS(), name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + Py_INCREF(v); + } + else { + v = PyObject_GetItem(BUILTINS(), name); + if (v == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + } + } + PUSH(v); + } + + // error: LOAD_GLOBAL has irregular stack effect + inst(LOAD_GLOBAL) { + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *name = GETITEM(names, oparg>>1); + next_instr--; + _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_GLOBAL, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + int push_null = oparg & 1; + PEEK(0) = NULL; + PyObject *name = GETITEM(names, oparg>>1); + PyObject *v; + if (PyDict_CheckExact(GLOBALS()) + && PyDict_CheckExact(BUILTINS())) + { + v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), + (PyDictObject *)BUILTINS(), + name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + /* _PyDict_LoadGlobal() returns NULL without raising + * an exception if the key doesn't exist */ + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + Py_INCREF(v); + } + else { + /* Slow-path if globals or builtins is not a dict */ + + /* namespace 1: globals */ + v = PyObject_GetItem(GLOBALS(), name); + if (v == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + + /* namespace 2: builtins */ + v = PyObject_GetItem(BUILTINS(), name); + if (v == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + } + /* Skip over inline cache */ + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STACK_GROW(push_null); + PUSH(v); + } + + // error: LOAD_GLOBAL has irregular stack effect + inst(LOAD_GLOBAL_MODULE) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); + PyDictObject *dict = (PyDictObject *)GLOBALS(); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + uint32_t version = read_u32(cache->module_keys_version); + DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(dict->ma_keys)); + PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); + PyObject *res = entries[cache->index].me_value; + DEOPT_IF(res == NULL, LOAD_GLOBAL); + int push_null = oparg & 1; + PEEK(0) = NULL; + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STAT_INC(LOAD_GLOBAL, hit); + STACK_GROW(push_null+1); + SET_TOP(Py_NewRef(res)); + } + + // error: LOAD_GLOBAL has irregular stack effect + inst(LOAD_GLOBAL_BUILTIN) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); + DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL); + PyDictObject *mdict = (PyDictObject *)GLOBALS(); + PyDictObject *bdict = (PyDictObject *)BUILTINS(); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + uint32_t mod_version = read_u32(cache->module_keys_version); + uint16_t bltn_version = cache->builtin_keys_version; + DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL); + DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(bdict->ma_keys)); + PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); + PyObject *res = entries[cache->index].me_value; + DEOPT_IF(res == NULL, LOAD_GLOBAL); + int push_null = oparg & 1; + PEEK(0) = NULL; + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STAT_INC(LOAD_GLOBAL, hit); + STACK_GROW(push_null+1); + SET_TOP(Py_NewRef(res)); + } + + inst(DELETE_FAST, (--)) { + PyObject *v = GETLOCAL(oparg); + ERROR_IF(v == NULL, unbound_local_error); + SETLOCAL(oparg, NULL); + } + + inst(MAKE_CELL, (--)) { + // "initial" is probably NULL but not if it's an arg (or set + // via PyFrame_LocalsToFast() before MAKE_CELL has run). + PyObject *initial = GETLOCAL(oparg); + PyObject *cell = PyCell_New(initial); + if (cell == NULL) { + goto resume_with_error; + } + SETLOCAL(oparg, cell); + } + + inst(DELETE_DEREF, (--)) { + PyObject *cell = GETLOCAL(oparg); + PyObject *oldobj = PyCell_GET(cell); + // Can't use ERROR_IF here. + // Fortunately we don't need its superpower. + if (oldobj == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + PyCell_SET(cell, NULL); + Py_DECREF(oldobj); + } + + // stack effect: ( -- __0) + inst(LOAD_CLASSDEREF) { + PyObject *name, *value, *locals = LOCALS(); + assert(locals); + assert(oparg >= 0 && oparg < frame->f_code->co_nlocalsplus); + name = PyTuple_GET_ITEM(frame->f_code->co_localsplusnames, oparg); + if (PyDict_CheckExact(locals)) { + value = PyDict_GetItemWithError(locals, name); + if (value != NULL) { + Py_INCREF(value); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + } + else { + value = PyObject_GetItem(locals, name); + if (value == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + } + } + if (!value) { + PyObject *cell = GETLOCAL(oparg); + value = PyCell_GET(cell); + if (value == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + Py_INCREF(value); + } + PUSH(value); + } + + // stack effect: ( -- __0) + inst(LOAD_DEREF) { + PyObject *cell = GETLOCAL(oparg); + PyObject *value = PyCell_GET(cell); + if (value == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + PUSH(Py_NewRef(value)); + } + + // stack effect: (__0 -- ) + inst(STORE_DEREF) { + PyObject *v = POP(); + PyObject *cell = GETLOCAL(oparg); + PyObject *oldobj = PyCell_GET(cell); + PyCell_SET(cell, v); + Py_XDECREF(oldobj); + } + + // stack effect: ( -- ) + inst(COPY_FREE_VARS) { + /* Copy closure variables to free variables */ + PyCodeObject *co = frame->f_code; + assert(PyFunction_Check(frame->f_funcobj)); + PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + int offset = co->co_nlocals + co->co_nplaincellvars; + assert(oparg == co->co_nfreevars); + for (int i = 0; i < oparg; ++i) { + PyObject *o = PyTuple_GET_ITEM(closure, i); + frame->localsplus[offset + i] = Py_NewRef(o); + } + } + + // stack effect: (__array[oparg] -- __0) + inst(BUILD_STRING) { + PyObject *str; + str = _PyUnicode_JoinArray(&_Py_STR(empty), + stack_pointer - oparg, oparg); + if (str == NULL) + goto error; + while (--oparg >= 0) { + PyObject *item = POP(); + Py_DECREF(item); + } + PUSH(str); + } + + // stack effect: (__array[oparg] -- __0) + inst(BUILD_TUPLE) { + STACK_SHRINK(oparg); + PyObject *tup = _PyTuple_FromArraySteal(stack_pointer, oparg); + if (tup == NULL) + goto error; + PUSH(tup); + } + + // stack effect: (__array[oparg] -- __0) + inst(BUILD_LIST) { + PyObject *list = PyList_New(oparg); + if (list == NULL) + goto error; + while (--oparg >= 0) { + PyObject *item = POP(); + PyList_SET_ITEM(list, oparg, item); + } + PUSH(list); + } + + // stack effect: ( -- ) + inst(LIST_TO_TUPLE) { + PyObject *list = POP(); + PyObject *tuple = PyList_AsTuple(list); + Py_DECREF(list); + if (tuple == NULL) { + goto error; + } + PUSH(tuple); + } + + // stack effect: (__0 -- ) + inst(LIST_EXTEND) { + PyObject *iterable = POP(); + PyObject *list = PEEK(oparg); + PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); + if (none_val == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && + (Py_TYPE(iterable)->tp_iter == NULL && !PySequence_Check(iterable))) + { + _PyErr_Clear(tstate); + _PyErr_Format(tstate, PyExc_TypeError, + "Value after * must be an iterable, not %.200s", + Py_TYPE(iterable)->tp_name); + } + Py_DECREF(iterable); + goto error; + } + Py_DECREF(none_val); + Py_DECREF(iterable); + } + + // stack effect: (__0 -- ) + inst(SET_UPDATE) { + PyObject *iterable = POP(); + PyObject *set = PEEK(oparg); + int err = _PySet_Update(set, iterable); + Py_DECREF(iterable); + if (err < 0) { + goto error; + } + } + + // stack effect: (__array[oparg] -- __0) + inst(BUILD_SET) { + PyObject *set = PySet_New(NULL); + int err = 0; + int i; + if (set == NULL) + goto error; + for (i = oparg; i > 0; i--) { + PyObject *item = PEEK(i); + if (err == 0) + err = PySet_Add(set, item); + Py_DECREF(item); + } + STACK_SHRINK(oparg); + if (err != 0) { + Py_DECREF(set); + goto error; + } + PUSH(set); + } + + // stack effect: (__array[oparg*2] -- __0) + inst(BUILD_MAP) { + PyObject *map = _PyDict_FromItems( + &PEEK(2*oparg), 2, + &PEEK(2*oparg - 1), 2, + oparg); + if (map == NULL) + goto error; + + while (oparg--) { + Py_DECREF(POP()); + Py_DECREF(POP()); + } + PUSH(map); + } + + // stack effect: ( -- ) + inst(SETUP_ANNOTATIONS) { + int err; + PyObject *ann_dict; + if (LOCALS() == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals found when setting up annotations"); + goto error; + } + /* check if __annotations__ in locals()... */ + if (PyDict_CheckExact(LOCALS())) { + ann_dict = _PyDict_GetItemWithError(LOCALS(), + &_Py_ID(__annotations__)); + if (ann_dict == NULL) { + if (_PyErr_Occurred(tstate)) { + goto error; + } + /* ...if not, create a new one */ + ann_dict = PyDict_New(); + if (ann_dict == NULL) { + goto error; + } + err = PyDict_SetItem(LOCALS(), &_Py_ID(__annotations__), + ann_dict); + Py_DECREF(ann_dict); + if (err != 0) { + goto error; + } + } + } + else { + /* do the same if locals() is not a dict */ + ann_dict = PyObject_GetItem(LOCALS(), &_Py_ID(__annotations__)); + if (ann_dict == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + ann_dict = PyDict_New(); + if (ann_dict == NULL) { + goto error; + } + err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), + ann_dict); + Py_DECREF(ann_dict); + if (err != 0) { + goto error; + } + } + else { + Py_DECREF(ann_dict); + } + } + } + + // stack effect: (__array[oparg] -- ) + inst(BUILD_CONST_KEY_MAP) { + PyObject *map; + PyObject *keys = TOP(); + if (!PyTuple_CheckExact(keys) || + PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { + _PyErr_SetString(tstate, PyExc_SystemError, + "bad BUILD_CONST_KEY_MAP keys argument"); + goto error; + } + map = _PyDict_FromItems( + &PyTuple_GET_ITEM(keys, 0), 1, + &PEEK(oparg + 1), 1, oparg); + if (map == NULL) { + goto error; + } + + Py_DECREF(POP()); + while (oparg--) { + Py_DECREF(POP()); + } + PUSH(map); + } + + // stack effect: (__0 -- ) + inst(DICT_UPDATE) { + PyObject *update = POP(); + PyObject *dict = PEEK(oparg); + if (PyDict_Update(dict, update) < 0) { + if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object is not a mapping", + Py_TYPE(update)->tp_name); + } + Py_DECREF(update); + goto error; + } + Py_DECREF(update); + } + + // stack effect: (__0 -- ) + inst(DICT_MERGE) { + PyObject *update = POP(); + PyObject *dict = PEEK(oparg); + + if (_PyDict_MergeEx(dict, update, 2) < 0) { + format_kwargs_error(tstate, PEEK(2 + oparg), update); + Py_DECREF(update); + goto error; + } + Py_DECREF(update); + PREDICT(CALL_FUNCTION_EX); + } + + // stack effect: (__0, __1 -- ) + inst(MAP_ADD) { + PyObject *value = TOP(); + PyObject *key = SECOND(); + PyObject *map; + STACK_SHRINK(2); + map = PEEK(oparg); /* dict */ + assert(PyDict_CheckExact(map)); + /* map[key] = value */ + if (_PyDict_SetItem_Take2((PyDictObject *)map, key, value) != 0) { + goto error; + } + PREDICT(JUMP_BACKWARD); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR) { + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *name = GETITEM(names, oparg>>1); + next_instr--; + _Py_Specialize_LoadAttr(owner, next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *name = GETITEM(names, oparg >> 1); + PyObject *owner = TOP(); + if (oparg & 1) { + /* Designed to work in tandem with CALL. */ + PyObject* meth = NULL; + + int meth_found = _PyObject_GetMethod(owner, name, &meth); + + if (meth == NULL) { + /* Most likely attribute wasn't found. */ + goto error; + } + + if (meth_found) { + /* We can bypass temporary bound method object. + meth is unbound method and obj is self. + + meth | self | arg1 | ... | argN + */ + SET_TOP(meth); + PUSH(owner); // self + } + else { + /* meth is not an unbound method (but a regular attr, or + something was returned by a descriptor protocol). Set + the second element of the stack to NULL, to signal + CALL that it's not a method call. + + NULL | meth | arg1 | ... | argN + */ + SET_TOP(NULL); + Py_DECREF(owner); + PUSH(meth); + } + } + else { + PyObject *res = PyObject_GetAttr(owner, name); + if (res == NULL) { + goto error; + } + Py_DECREF(owner); + SET_TOP(res); + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_INSTANCE_VALUE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + assert(tp->tp_dictoffset < 0); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + res = _PyDictOrValues_GetValues(dorv)->values[cache->index]; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_MODULE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); + PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; + assert(dict != NULL); + DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->version), + LOAD_ATTR); + assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); + assert(cache->index < dict->ma_keys->dk_nentries); + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + cache->index; + res = ep->me_value; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_WITH_HINT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); + DEOPT_IF(dict == NULL, LOAD_ATTR); + assert(PyDict_CheckExact((PyObject *)dict)); + PyObject *name = GETITEM(names, oparg>>1); + uint16_t hint = cache->index; + DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR); + if (DK_IS_UNICODE(dict->ma_keys)) { + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, LOAD_ATTR); + res = ep->me_value; + } + else { + PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, LOAD_ATTR); + res = ep->me_value; + } + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_SLOT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + char *addr = (char *)owner + cache->index; + res = *(PyObject **)addr; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_CLASS) { + assert(cframe.use_tracing == 0); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + PyObject *cls = TOP(); + DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, + LOAD_ATTR); + assert(type_version != 0); + + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(cls); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_PROPERTY) { + assert(cframe.use_tracing == 0); + DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + PyObject *owner = TOP(); + PyTypeObject *cls = Py_TYPE(owner); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); + assert(type_version != 0); + PyObject *fget = read_obj(cache->descr); + assert(Py_IS_TYPE(fget, &PyFunction_Type)); + PyFunctionObject *f = (PyFunctionObject *)fget; + uint32_t func_version = read_u32(cache->keys_version); + assert(func_version != 0); + DEOPT_IF(f->func_version != func_version, LOAD_ATTR); + PyCodeObject *code = (PyCodeObject *)f->func_code; + assert(code->co_argcount == 1); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(fget); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); + SET_TOP(NULL); + int shrink_stack = !(oparg & 1); + STACK_SHRINK(shrink_stack); + new_frame->localsplus[0] = owner; + for (int i = 1; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH_INLINED(new_frame); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { + assert(cframe.use_tracing == 0); + DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + PyObject *owner = TOP(); + PyTypeObject *cls = Py_TYPE(owner); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); + assert(type_version != 0); + PyObject *getattribute = read_obj(cache->descr); + assert(Py_IS_TYPE(getattribute, &PyFunction_Type)); + PyFunctionObject *f = (PyFunctionObject *)getattribute; + uint32_t func_version = read_u32(cache->keys_version); + assert(func_version != 0); + DEOPT_IF(f->func_version != func_version, LOAD_ATTR); + PyCodeObject *code = (PyCodeObject *)f->func_code; + assert(code->co_argcount == 2); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + + PyObject *name = GETITEM(names, oparg >> 1); + Py_INCREF(f); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); + SET_TOP(NULL); + int shrink_stack = !(oparg & 1); + STACK_SHRINK(shrink_stack); + new_frame->localsplus[0] = owner; + new_frame->localsplus[1] = Py_NewRef(name); + for (int i = 2; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH_INLINED(new_frame); + } + + // stack effect: (__0, __1 -- ) + inst(STORE_ATTR_INSTANCE_VALUE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); + STAT_INC(STORE_ATTR, hit); + Py_ssize_t index = cache->index; + STACK_SHRINK(1); + PyObject *value = POP(); + PyDictValues *values = _PyDictOrValues_GetValues(dorv); + PyObject *old_value = values->values[index]; + values->values[index] = value; + if (old_value == NULL) { + _PyDictValues_AddToInsertionOrder(values, index); + } + else { + Py_DECREF(old_value); + } + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); + } + + // stack effect: (__0, __1 -- ) + inst(STORE_ATTR_WITH_HINT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(_PyDictOrValues_IsValues(dorv), STORE_ATTR); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); + DEOPT_IF(dict == NULL, STORE_ATTR); + assert(PyDict_CheckExact((PyObject *)dict)); + PyObject *name = GETITEM(names, oparg); + uint16_t hint = cache->index; + DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR); + PyObject *value, *old_value; + uint64_t new_version; + if (DK_IS_UNICODE(dict->ma_keys)) { + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, STORE_ATTR); + old_value = ep->me_value; + DEOPT_IF(old_value == NULL, STORE_ATTR); + STACK_SHRINK(1); + value = POP(); + new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); + ep->me_value = value; + } + else { + PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, STORE_ATTR); + old_value = ep->me_value; + DEOPT_IF(old_value == NULL, STORE_ATTR); + STACK_SHRINK(1); + value = POP(); + new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); + ep->me_value = value; + } + Py_DECREF(old_value); + STAT_INC(STORE_ATTR, hit); + /* Ensure dict is GC tracked if it needs to be */ + if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { + _PyObject_GC_TRACK(dict); + } + /* PEP 509 */ + dict->ma_version_tag = new_version; + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); + } + + // stack effect: (__0, __1 -- ) + inst(STORE_ATTR_SLOT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + char *addr = (char *)owner + cache->index; + STAT_INC(STORE_ATTR, hit); + STACK_SHRINK(1); + PyObject *value = POP(); + PyObject *old_value = *(PyObject **)addr; + *(PyObject **)addr = value; + Py_XDECREF(old_value); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); + } + + // stack effect: (__0 -- ) + inst(COMPARE_OP) { + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *right = TOP(); + PyObject *left = SECOND(); + next_instr--; + _Py_Specialize_CompareOp(left, right, next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(COMPARE_OP, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + assert(oparg <= Py_GE); + PyObject *right = POP(); + PyObject *left = TOP(); + PyObject *res = PyObject_RichCompare(left, right, oparg); + SET_TOP(res); + Py_DECREF(left); + Py_DECREF(right); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); + } + + // stack effect: (__0 -- ) + inst(COMPARE_OP_FLOAT_JUMP) { + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false) + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + int when_to_jump_mask = cache->mask; + PyObject *right = TOP(); + PyObject *left = SECOND(); + DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); + double dleft = PyFloat_AS_DOUBLE(left); + double dright = PyFloat_AS_DOUBLE(right); + int sign = (dleft > dright) - (dleft < dright); + DEOPT_IF(isnan(dleft), COMPARE_OP); + DEOPT_IF(isnan(dright), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); + NEXTOPARG(); + STACK_SHRINK(2); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + int jump = (1 << (sign + 1)) & when_to_jump_mask; + if (!jump) { + next_instr++; + } + else { + JUMPBY(1 + oparg); + } + } + + // stack effect: (__0 -- ) + inst(COMPARE_OP_INT_JUMP) { + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false) + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + int when_to_jump_mask = cache->mask; + PyObject *right = TOP(); + PyObject *left = SECOND(); + DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); + DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP); + DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1); + Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0]; + Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0]; + int sign = (ileft > iright) - (ileft < iright); + JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); + NEXTOPARG(); + STACK_SHRINK(2); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + int jump = (1 << (sign + 1)) & when_to_jump_mask; + if (!jump) { + next_instr++; + } + else { + JUMPBY(1 + oparg); + } + } + + // stack effect: (__0 -- ) + inst(COMPARE_OP_STR_JUMP) { + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false) + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + int invert = cache->mask; + PyObject *right = TOP(); + PyObject *left = SECOND(); + DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + int res = _PyUnicode_Equal(left, right); + assert(oparg == Py_EQ || oparg == Py_NE); + JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); + NEXTOPARG(); + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + STACK_SHRINK(2); + _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + assert(res == 0 || res == 1); + assert(invert == 0 || invert == 1); + int jump = res ^ invert; + if (!jump) { + next_instr++; + } + else { + JUMPBY(1 + oparg); + } + } + + // stack effect: (__0 -- ) + inst(IS_OP) { + PyObject *right = POP(); + PyObject *left = TOP(); + int res = Py_Is(left, right) ^ oparg; + PyObject *b = res ? Py_True : Py_False; + SET_TOP(Py_NewRef(b)); + Py_DECREF(left); + Py_DECREF(right); + } + + // stack effect: (__0 -- ) + inst(CONTAINS_OP) { + PyObject *right = POP(); + PyObject *left = POP(); + int res = PySequence_Contains(right, left); + Py_DECREF(left); + Py_DECREF(right); + if (res < 0) { + goto error; + } + PyObject *b = (res^oparg) ? Py_True : Py_False; + PUSH(Py_NewRef(b)); + } + + // stack effect: ( -- ) + inst(CHECK_EG_MATCH) { + PyObject *match_type = POP(); + if (check_except_star_type_valid(tstate, match_type) < 0) { + Py_DECREF(match_type); + goto error; + } + + PyObject *exc_value = TOP(); + PyObject *match = NULL, *rest = NULL; + int res = exception_group_match(exc_value, match_type, + &match, &rest); + Py_DECREF(match_type); + if (res < 0) { + goto error; + } + + if (match == NULL || rest == NULL) { + assert(match == NULL); + assert(rest == NULL); + goto error; + } + if (Py_IsNone(match)) { + PUSH(match); + Py_XDECREF(rest); + } + else { + /* Total or partial match - update the stack from + * [val] + * to + * [rest, match] + * (rest can be Py_None) + */ + + SET_TOP(rest); + PUSH(match); + PyErr_SetExcInfo(NULL, Py_NewRef(match), NULL); + Py_DECREF(exc_value); + } + } + + // stack effect: ( -- ) + inst(CHECK_EXC_MATCH) { + PyObject *right = POP(); + PyObject *left = TOP(); + assert(PyExceptionInstance_Check(left)); + if (check_except_type_valid(tstate, right) < 0) { + Py_DECREF(right); + goto error; + } + + int res = PyErr_GivenExceptionMatches(left, right); + Py_DECREF(right); + PUSH(Py_NewRef(res ? Py_True : Py_False)); + } + + // stack effect: (__0 -- ) + inst(IMPORT_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *fromlist = POP(); + PyObject *level = TOP(); + PyObject *res; + res = import_name(tstate, frame, name, fromlist, level); + Py_DECREF(level); + Py_DECREF(fromlist); + SET_TOP(res); + if (res == NULL) + goto error; + } + + // stack effect: (__0 -- ) + inst(IMPORT_STAR) { + PyObject *from = POP(), *locals; + int err; + if (_PyFrame_FastToLocalsWithError(frame) < 0) { + Py_DECREF(from); + goto error; + } + + locals = LOCALS(); + if (locals == NULL) { + _PyErr_SetString(tstate, PyExc_SystemError, + "no locals found during 'import *'"); + Py_DECREF(from); + goto error; + } + err = import_all_from(tstate, locals, from); + _PyFrame_LocalsToFast(frame, 0); + Py_DECREF(from); + if (err != 0) + goto error; + } + + // stack effect: ( -- __0) + inst(IMPORT_FROM) { + PyObject *name = GETITEM(names, oparg); + PyObject *from = TOP(); + PyObject *res; + res = import_from(tstate, from, name); + PUSH(res); + if (res == NULL) + goto error; + } + + // stack effect: ( -- ) + inst(JUMP_FORWARD) { + JUMPBY(oparg); + } + + // stack effect: ( -- ) + inst(JUMP_BACKWARD) { + assert(oparg < INSTR_OFFSET()); + JUMPBY(-oparg); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0 -- ) + inst(POP_JUMP_IF_FALSE) { + PyObject *cond = POP(); + if (Py_IsTrue(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsFalse(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + JUMPBY(oparg); + } + else { + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err > 0) + ; + else if (err == 0) { + JUMPBY(oparg); + } + else + goto error; + } + } + + // stack effect: (__0 -- ) + inst(POP_JUMP_IF_TRUE) { + PyObject *cond = POP(); + if (Py_IsFalse(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsTrue(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + JUMPBY(oparg); + } + else { + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err > 0) { + JUMPBY(oparg); + } + else if (err == 0) + ; + else + goto error; + } + } + + // stack effect: (__0 -- ) + inst(POP_JUMP_IF_NOT_NONE) { + PyObject *value = POP(); + if (!Py_IsNone(value)) { + JUMPBY(oparg); + } + Py_DECREF(value); + } + + // stack effect: (__0 -- ) + inst(POP_JUMP_IF_NONE) { + PyObject *value = POP(); + if (Py_IsNone(value)) { + _Py_DECREF_NO_DEALLOC(value); + JUMPBY(oparg); + } + else { + Py_DECREF(value); + } + } + + // error: JUMP_IF_FALSE_OR_POP stack effect depends on jump flag + inst(JUMP_IF_FALSE_OR_POP) { + PyObject *cond = TOP(); + int err; + if (Py_IsTrue(cond)) { + STACK_SHRINK(1); + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsFalse(cond)) { + JUMPBY(oparg); + } + else { + err = PyObject_IsTrue(cond); + if (err > 0) { + STACK_SHRINK(1); + Py_DECREF(cond); + } + else if (err == 0) { + JUMPBY(oparg); + } + else { + goto error; + } + } + } + + // error: JUMP_IF_TRUE_OR_POP stack effect depends on jump flag + inst(JUMP_IF_TRUE_OR_POP) { + PyObject *cond = TOP(); + int err; + if (Py_IsFalse(cond)) { + STACK_SHRINK(1); + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsTrue(cond)) { + JUMPBY(oparg); + } + else { + err = PyObject_IsTrue(cond); + if (err > 0) { + JUMPBY(oparg); + } + else if (err == 0) { + STACK_SHRINK(1); + Py_DECREF(cond); + } + else { + goto error; + } + } + } + + // stack effect: ( -- ) + inst(JUMP_BACKWARD_NO_INTERRUPT) { + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ + JUMPBY(-oparg); + } + + // stack effect: ( -- __0) + inst(GET_LEN) { + // PUSH(len(TOS)) + Py_ssize_t len_i = PyObject_Length(TOP()); + if (len_i < 0) { + goto error; + } + PyObject *len_o = PyLong_FromSsize_t(len_i); + if (len_o == NULL) { + goto error; + } + PUSH(len_o); + } + + // stack effect: (__0, __1 -- ) + inst(MATCH_CLASS) { + // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or + // None on failure. + PyObject *names = POP(); + PyObject *type = POP(); + PyObject *subject = TOP(); + assert(PyTuple_CheckExact(names)); + PyObject *attrs = match_class(tstate, subject, type, oparg, names); + Py_DECREF(names); + Py_DECREF(type); + if (attrs) { + // Success! + assert(PyTuple_CheckExact(attrs)); + SET_TOP(attrs); + } + else if (_PyErr_Occurred(tstate)) { + // Error! + goto error; + } + else { + // Failure! + SET_TOP(Py_NewRef(Py_None)); + } + Py_DECREF(subject); + } + + // stack effect: ( -- __0) + inst(MATCH_MAPPING) { + PyObject *subject = TOP(); + int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; + PyObject *res = match ? Py_True : Py_False; + PUSH(Py_NewRef(res)); + PREDICT(POP_JUMP_IF_FALSE); + } + + // stack effect: ( -- __0) + inst(MATCH_SEQUENCE) { + PyObject *subject = TOP(); + int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; + PyObject *res = match ? Py_True : Py_False; + PUSH(Py_NewRef(res)); + PREDICT(POP_JUMP_IF_FALSE); + } + + // stack effect: ( -- __0) + inst(MATCH_KEYS) { + // On successful match, PUSH(values). Otherwise, PUSH(None). + PyObject *keys = TOP(); + PyObject *subject = SECOND(); + PyObject *values_or_none = match_keys(tstate, subject, keys); + if (values_or_none == NULL) { + goto error; + } + PUSH(values_or_none); + } + + // stack effect: ( -- ) + inst(GET_ITER) { + /* before: [obj]; after [getiter(obj)] */ + PyObject *iterable = TOP(); + PyObject *iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + SET_TOP(iter); + if (iter == NULL) + goto error; + } + + // stack effect: ( -- ) + inst(GET_YIELD_FROM_ITER) { + /* before: [obj]; after [getiter(obj)] */ + PyObject *iterable = TOP(); + PyObject *iter; + if (PyCoro_CheckExact(iterable)) { + /* `iterable` is a coroutine */ + if (!(frame->f_code->co_flags & (CO_COROUTINE | CO_ITERABLE_COROUTINE))) { + /* and it is used in a 'yield from' expression of a + regular generator. */ + Py_DECREF(iterable); + SET_TOP(NULL); + _PyErr_SetString(tstate, PyExc_TypeError, + "cannot 'yield from' a coroutine object " + "in a non-coroutine generator"); + goto error; + } + } + else if (!PyGen_CheckExact(iterable)) { + /* `iterable` is not a generator. */ + iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + SET_TOP(iter); + if (iter == NULL) + goto error; + } + PREDICT(LOAD_CONST); + } + + // stack effect: ( -- __0) + inst(FOR_ITER) { + _PyForIterCache *cache = (_PyForIterCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr--; + _Py_Specialize_ForIter(TOP(), next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(FOR_ITER, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + /* before: [iter]; after: [iter, iter()] *or* [] */ + PyObject *iter = TOP(); + PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); + if (next != NULL) { + PUSH(next); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + } + else { + if (_PyErr_Occurred(tstate)) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { + goto error; + } + else if (tstate->c_tracefunc != NULL) { + call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + } + _PyErr_Clear(tstate); + } + /* iterator ended normally */ + assert(_Py_OPCODE(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg]) == END_FOR); + STACK_SHRINK(1); + Py_DECREF(iter); + /* Skip END_FOR */ + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1); + } + } + + // stack effect: ( -- __0) + inst(FOR_ITER_LIST) { + assert(cframe.use_tracing == 0); + _PyListIterObject *it = (_PyListIterObject *)TOP(); + DEOPT_IF(Py_TYPE(it) != &PyListIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + PyListObject *seq = it->it_seq; + if (seq) { + if (it->it_index < PyList_GET_SIZE(seq)) { + PyObject *next = PyList_GET_ITEM(seq, it->it_index++); + PUSH(Py_NewRef(next)); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + goto end_for_iter_list; // End of this instruction + } + it->it_seq = NULL; + Py_DECREF(seq); + } + STACK_SHRINK(1); + Py_DECREF(it); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1); + end_for_iter_list: + } + + // stack effect: ( -- __0) + inst(FOR_ITER_RANGE) { + assert(cframe.use_tracing == 0); + _PyRangeIterObject *r = (_PyRangeIterObject *)TOP(); + DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + _Py_CODEUNIT next = next_instr[INLINE_CACHE_ENTRIES_FOR_ITER]; + assert(_PyOpcode_Deopt[_Py_OPCODE(next)] == STORE_FAST); + if (r->index >= r->len) { + STACK_SHRINK(1); + Py_DECREF(r); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1); + } + else { + long value = (long)(r->start + + (unsigned long)(r->index++) * r->step); + if (_PyLong_AssignValue(&GETLOCAL(_Py_OPARG(next)), value) < 0) { + goto error; + } + // The STORE_FAST is already done. + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + 1); + } + } + + inst(FOR_ITER_GEN) { + assert(cframe.use_tracing == 0); + PyGenObject *gen = (PyGenObject *)TOP(); + DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); + DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER); + STAT_INC(FOR_ITER, hit); + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + frame->yield_offset = oparg; + _PyFrame_StackPush(gen_frame, Py_NewRef(Py_None)); + gen->gi_frame_state = FRAME_EXECUTING; + gen->gi_exc_state.previous_item = tstate->exc_info; + tstate->exc_info = &gen->gi_exc_state; + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg); + assert(_Py_OPCODE(*next_instr) == END_FOR); + DISPATCH_INLINED(gen_frame); + } + + // stack effect: ( -- __0) + inst(BEFORE_ASYNC_WITH) { + PyObject *mgr = TOP(); + PyObject *res; + PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); + if (enter == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "asynchronous context manager protocol", + Py_TYPE(mgr)->tp_name); + } + goto error; + } + PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__)); + if (exit == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "asynchronous context manager protocol " + "(missed __aexit__ method)", + Py_TYPE(mgr)->tp_name); + } + Py_DECREF(enter); + goto error; + } + SET_TOP(exit); + Py_DECREF(mgr); + res = _PyObject_CallNoArgs(enter); + Py_DECREF(enter); + if (res == NULL) + goto error; + PUSH(res); + PREDICT(GET_AWAITABLE); + } + + // stack effect: ( -- __0) + inst(BEFORE_WITH) { + PyObject *mgr = TOP(); + PyObject *res; + PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__enter__)); + if (enter == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "context manager protocol", + Py_TYPE(mgr)->tp_name); + } + goto error; + } + PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__)); + if (exit == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "context manager protocol " + "(missed __exit__ method)", + Py_TYPE(mgr)->tp_name); + } + Py_DECREF(enter); + goto error; + } + SET_TOP(exit); + Py_DECREF(mgr); + res = _PyObject_CallNoArgs(enter); + Py_DECREF(enter); + if (res == NULL) { + goto error; + } + PUSH(res); + } + + inst(WITH_EXCEPT_START, (exit_func, lasti, unused, val -- exit_func, lasti, unused, val, res)) { + /* At the top of the stack are 4 values: + - val: TOP = exc_info() + - unused: SECOND = previous exception + - lasti: THIRD = lasti of exception in exc_info() + - exit_func: FOURTH = the context.__exit__ bound method + We call FOURTH(type(TOP), TOP, GetTraceback(TOP)). + Then we push the __exit__ return value. + */ + PyObject *exc, *tb; + + assert(val && PyExceptionInstance_Check(val)); + exc = PyExceptionInstance_Class(val); + tb = PyException_GetTraceback(val); + Py_XDECREF(tb); + assert(PyLong_Check(lasti)); + (void)lasti; // Shut up compiler warning if asserts are off + PyObject *stack[4] = {NULL, exc, val, tb}; + res = PyObject_Vectorcall(exit_func, stack + 1, + 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); + ERROR_IF(res == NULL, error); + } + + // stack effect: ( -- __0) + inst(PUSH_EXC_INFO) { + PyObject *value = TOP(); + + _PyErr_StackItem *exc_info = tstate->exc_info; + if (exc_info->exc_value != NULL) { + SET_TOP(exc_info->exc_value); + } + else { + SET_TOP(Py_NewRef(Py_None)); + } + + PUSH(Py_NewRef(value)); + assert(PyExceptionInstance_Check(value)); + exc_info->exc_value = value; + + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_METHOD_WITH_VALUES) { + /* Cached method object */ + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + assert(type_version != 0); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; + DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != + read_u32(cache->keys_version), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_METHOD_WITH_DICT) { + /* Can be either a managed dict, or a tp_dictoffset offset.*/ + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + DEOPT_IF(self_cls->tp_version_tag != read_u32(cache->type_version), + LOAD_ATTR); + /* Treat index as a signed 16 bit value */ + Py_ssize_t dictoffset = self_cls->tp_dictoffset; + assert(dictoffset > 0); + PyDictObject **dictptr = (PyDictObject**)(((char *)self)+dictoffset); + PyDictObject *dict = *dictptr; + DEOPT_IF(dict == NULL, LOAD_ATTR); + DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->keys_version), + LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_METHOD_NO_DICT) { + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(self_cls->tp_dictoffset == 0); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // error: LOAD_ATTR has irregular stack effect + inst(LOAD_ATTR_METHOD_LAZY_DICT) { + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + Py_ssize_t dictoffset = self_cls->tp_dictoffset; + assert(dictoffset > 0); + PyObject *dict = *(PyObject **)((char *)self + dictoffset); + /* This object has a __dict__, just not yet created */ + DEOPT_IF(dict != NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_BOUND_METHOD_EXACT_ARGS) { + DEOPT_IF(is_method(stack_pointer, oparg), CALL); + PyObject *function = PEEK(oparg + 1); + DEOPT_IF(Py_TYPE(function) != &PyMethod_Type, CALL); + STAT_INC(CALL, hit); + PyObject *self = ((PyMethodObject *)function)->im_self; + PEEK(oparg + 1) = Py_NewRef(self); + PyObject *meth = ((PyMethodObject *)function)->im_func; + PEEK(oparg + 2) = Py_NewRef(meth); + Py_DECREF(function); + GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); + } + + // stack effect: ( -- ) + inst(KW_NAMES) { + assert(kwnames == NULL); + assert(oparg < PyTuple_GET_SIZE(consts)); + kwnames = GETITEM(consts, oparg); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL) { + _PyCallCache *cache = (_PyCallCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + int is_meth = is_method(stack_pointer, oparg); + int nargs = oparg + is_meth; + PyObject *callable = PEEK(nargs + 1); + next_instr--; + _Py_Specialize_Call(callable, next_instr, nargs, kwnames); + DISPATCH_SAME_OPARG(); + } + STAT_INC(CALL, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + int total_args, is_meth; + is_meth = is_method(stack_pointer, oparg); + PyObject *function = PEEK(oparg + 1); + if (!is_meth && Py_TYPE(function) == &PyMethod_Type) { + PyObject *self = ((PyMethodObject *)function)->im_self; + PEEK(oparg+1) = Py_NewRef(self); + PyObject *meth = ((PyMethodObject *)function)->im_func; + PEEK(oparg+2) = Py_NewRef(meth); + Py_DECREF(function); + is_meth = 1; + } + total_args = oparg + is_meth; + function = PEEK(total_args + 1); + int positional_args = total_args - KWNAMES_LEN(); + // Check if the call can be inlined or not + if (Py_TYPE(function) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)function)->vectorcall == _PyFunction_Vectorcall) + { + int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(function))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(function)); + STACK_SHRINK(total_args); + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( + tstate, (PyFunctionObject *)function, locals, + stack_pointer, positional_args, kwnames + ); + kwnames = NULL; + STACK_SHRINK(2-is_meth); + // The frame has stolen all the arguments from the stack, + // so there is no need to clean them up. + if (new_frame == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + /* Callable is not a normal Python function */ + PyObject *res; + if (cframe.use_tracing) { + res = trace_call_function( + tstate, function, stack_pointer-total_args, + positional_args, kwnames); + } + else { + res = PyObject_Vectorcall( + function, stack_pointer-total_args, + positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, + kwnames); + } + kwnames = NULL; + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(function); + /* Clear the stack */ + STACK_SHRINK(total_args); + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_PY_EXACT_ARGS) { + assert(kwnames == NULL); + DEOPT_IF(tstate->interp->eval_frame, CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + int is_meth = is_method(stack_pointer, oparg); + int argcount = oparg + is_meth; + PyObject *callable = PEEK(argcount + 1); + DEOPT_IF(!PyFunction_Check(callable), CALL); + PyFunctionObject *func = (PyFunctionObject *)callable; + DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); + PyCodeObject *code = (PyCodeObject *)func->func_code; + DEOPT_IF(code->co_argcount != argcount, CALL); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); + STAT_INC(CALL, hit); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); + STACK_SHRINK(argcount); + for (int i = 0; i < argcount; i++) { + new_frame->localsplus[i] = stack_pointer[i]; + } + for (int i = argcount; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + STACK_SHRINK(2-is_meth); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_PY_WITH_DEFAULTS) { + assert(kwnames == NULL); + DEOPT_IF(tstate->interp->eval_frame, CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + int is_meth = is_method(stack_pointer, oparg); + int argcount = oparg + is_meth; + PyObject *callable = PEEK(argcount + 1); + DEOPT_IF(!PyFunction_Check(callable), CALL); + PyFunctionObject *func = (PyFunctionObject *)callable; + DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); + PyCodeObject *code = (PyCodeObject *)func->func_code; + DEOPT_IF(argcount > code->co_argcount, CALL); + int minargs = cache->min_args; + DEOPT_IF(argcount < minargs, CALL); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); + STAT_INC(CALL, hit); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); + STACK_SHRINK(argcount); + for (int i = 0; i < argcount; i++) { + new_frame->localsplus[i] = stack_pointer[i]; + } + for (int i = argcount; i < code->co_argcount; i++) { + PyObject *def = PyTuple_GET_ITEM(func->func_defaults, + i - minargs); + new_frame->localsplus[i] = Py_NewRef(def); + } + for (int i = code->co_argcount; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + STACK_SHRINK(2-is_meth); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_TYPE_1) { + assert(kwnames == NULL); + assert(cframe.use_tracing == 0); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *obj = TOP(); + PyObject *callable = SECOND(); + DEOPT_IF(callable != (PyObject *)&PyType_Type, CALL); + STAT_INC(CALL, hit); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + PyObject *res = Py_NewRef(Py_TYPE(obj)); + Py_DECREF(callable); + Py_DECREF(obj); + STACK_SHRINK(2); + SET_TOP(res); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_STR_1) { + assert(kwnames == NULL); + assert(cframe.use_tracing == 0); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *callable = PEEK(2); + DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + PyObject *res = PyObject_Str(arg); + Py_DECREF(arg); + Py_DECREF(&PyUnicode_Type); + STACK_SHRINK(2); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_TUPLE_1) { + assert(kwnames == NULL); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *callable = PEEK(2); + DEOPT_IF(callable != (PyObject *)&PyTuple_Type, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + PyObject *res = PySequence_Tuple(arg); + Py_DECREF(arg); + Py_DECREF(&PyTuple_Type); + STACK_SHRINK(2); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_BUILTIN_CLASS) { + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + int kwnames_len = KWNAMES_LEN(); + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyType_Check(callable), CALL); + PyTypeObject *tp = (PyTypeObject *)callable; + DEOPT_IF(tp->tp_vectorcall == NULL, CALL); + STAT_INC(CALL, hit); + STACK_SHRINK(total_args); + PyObject *res = tp->tp_vectorcall((PyObject *)tp, stack_pointer, + total_args-kwnames_len, kwnames); + kwnames = NULL; + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(tp); + STACK_SHRINK(1-is_meth); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_BUILTIN_O) { + assert(cframe.use_tracing == 0); + /* Builtin METH_O functions */ + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_O, CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *arg = TOP(); + PyObject *res = _PyCFunction_TrampolineCall(cfunc, PyCFunction_GET_SELF(callable), arg); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + Py_DECREF(arg); + Py_DECREF(callable); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_BUILTIN_FAST) { + assert(cframe.use_tracing == 0); + /* Builtin METH_FASTCALL functions, without keywords */ + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_FASTCALL, + CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); + STACK_SHRINK(total_args); + /* res = func(self, args, nargs) */ + PyObject *res = ((_PyCFunctionFast)(void(*)(void))cfunc)( + PyCFunction_GET_SELF(callable), + stack_pointer, + total_args); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + Py_DECREF(callable); + if (res == NULL) { + /* Not deopting because this doesn't mean our optimization was + wrong. `res` can be NULL for valid reasons. Eg. getattr(x, + 'invalid'). In those cases an exception is set, so we must + handle it. + */ + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_BUILTIN_FAST_WITH_KEYWORDS) { + assert(cframe.use_tracing == 0); + /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != + (METH_FASTCALL | METH_KEYWORDS), CALL); + STAT_INC(CALL, hit); + STACK_SHRINK(total_args); + /* res = func(self, args, nargs, kwnames) */ + _PyCFunctionFastWithKeywords cfunc = + (_PyCFunctionFastWithKeywords)(void(*)(void)) + PyCFunction_GET_FUNCTION(callable); + PyObject *res = cfunc( + PyCFunction_GET_SELF(callable), + stack_pointer, + total_args - KWNAMES_LEN(), + kwnames + ); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + kwnames = NULL; + + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_LEN) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + /* len(o) */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyObject *callable = PEEK(total_args + 1); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.len, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + Py_ssize_t len_i = PyObject_Length(arg); + if (len_i < 0) { + goto error; + } + PyObject *res = PyLong_FromSsize_t(len_i); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + Py_DECREF(arg); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_ISINSTANCE) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + /* isinstance(o, o2) */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(total_args != 2, CALL); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.isinstance, CALL); + STAT_INC(CALL, hit); + PyObject *cls = POP(); + PyObject *inst = TOP(); + int retval = PyObject_IsInstance(inst, cls); + if (retval < 0) { + Py_DECREF(cls); + goto error; + } + PyObject *res = PyBool_FromLong(retval); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(inst); + Py_DECREF(cls); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_LIST_APPEND) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + assert(oparg == 1); + PyObject *callable = PEEK(3); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.list_append, CALL); + PyObject *list = SECOND(); + DEOPT_IF(!PyList_Check(list), CALL); + STAT_INC(CALL, hit); + PyObject *arg = POP(); + if (_PyList_AppendTakeRef((PyListObject *)list, arg) < 0) { + goto error; + } + STACK_SHRINK(2); + Py_DECREF(list); + Py_DECREF(callable); + // CALL + POP_TOP + JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); + assert(_Py_OPCODE(next_instr[-1]) == POP_TOP); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_METHOD_DESCRIPTOR_O) { + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + DEOPT_IF(total_args != 2, CALL); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != METH_O, CALL); + PyObject *arg = TOP(); + PyObject *self = SECOND(); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = meth->ml_meth; + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *res = _PyCFunction_TrampolineCall(cfunc, self, arg); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(self); + Py_DECREF(arg); + STACK_SHRINK(oparg + 1); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) { + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); + PyTypeObject *d_type = callable->d_common.d_type; + PyObject *self = PEEK(total_args); + DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); + STAT_INC(CALL, hit); + int nargs = total_args-1; + STACK_SHRINK(nargs); + _PyCFunctionFastWithKeywords cfunc = + (_PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; + PyObject *res = cfunc(self, stack_pointer, nargs - KWNAMES_LEN(), + kwnames); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + kwnames = NULL; + + /* Free the arguments. */ + for (int i = 0; i < nargs; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(self); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS) { + assert(kwnames == NULL); + assert(oparg == 0 || oparg == 1); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyMethodDescrObject *callable = (PyMethodDescrObject *)SECOND(); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + PyObject *self = TOP(); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = meth->ml_meth; + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *res = _PyCFunction_TrampolineCall(cfunc, self, NULL); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(self); + STACK_SHRINK(oparg + 1); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // stack effect: (__0, __array[oparg] -- ) + inst(CALL_NO_KW_METHOD_DESCRIPTOR_FAST) { + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + /* Builtin METH_FASTCALL methods, without keywords */ + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); + PyObject *self = PEEK(total_args); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + STAT_INC(CALL, hit); + _PyCFunctionFast cfunc = + (_PyCFunctionFast)(void(*)(void))meth->ml_meth; + int nargs = total_args-1; + STACK_SHRINK(nargs); + PyObject *res = cfunc(self, stack_pointer, nargs); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + /* Clear the stack of the arguments. */ + for (int i = 0; i < nargs; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(self); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + } + + // error: CALL_FUNCTION_EX has irregular stack effect + inst(CALL_FUNCTION_EX) { + PyObject *func, *callargs, *kwargs = NULL, *result; + if (oparg & 0x01) { + kwargs = POP(); + // DICT_MERGE is called before this opcode if there are kwargs. + // It converts all dict subtypes in kwargs into regular dicts. + assert(PyDict_CheckExact(kwargs)); + } + callargs = POP(); + func = TOP(); + if (!PyTuple_CheckExact(callargs)) { + if (check_args_iterable(tstate, func, callargs) < 0) { + Py_DECREF(callargs); + goto error; + } + Py_SETREF(callargs, PySequence_Tuple(callargs)); + if (callargs == NULL) { + goto error; + } + } + assert(PyTuple_CheckExact(callargs)); + + result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing); + Py_DECREF(func); + Py_DECREF(callargs); + Py_XDECREF(kwargs); + + STACK_SHRINK(1); + assert(TOP() == NULL); + SET_TOP(result); + if (result == NULL) { + goto error; + } + CHECK_EVAL_BREAKER(); + } + + // error: MAKE_FUNCTION has irregular stack effect + inst(MAKE_FUNCTION) { + PyObject *codeobj = POP(); + PyFunctionObject *func = (PyFunctionObject *) + PyFunction_New(codeobj, GLOBALS()); + + Py_DECREF(codeobj); + if (func == NULL) { + goto error; + } + + if (oparg & 0x08) { + assert(PyTuple_CheckExact(TOP())); + func->func_closure = POP(); + } + if (oparg & 0x04) { + assert(PyTuple_CheckExact(TOP())); + func->func_annotations = POP(); + } + if (oparg & 0x02) { + assert(PyDict_CheckExact(TOP())); + func->func_kwdefaults = POP(); + } + if (oparg & 0x01) { + assert(PyTuple_CheckExact(TOP())); + func->func_defaults = POP(); + } + + PUSH((PyObject *)func); + } + + // stack effect: ( -- ) + inst(RETURN_GENERATOR) { + assert(PyFunction_Check(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; + PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); + if (gen == NULL) { + goto error; + } + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + _PyFrame_Copy(frame, gen_frame); + assert(frame->frame_obj == NULL); + gen->gi_frame_state = FRAME_CREATED; + gen_frame->owner = FRAME_OWNED_BY_GENERATOR; + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + _PyInterpreterFrame *prev = frame->previous; + _PyThreadState_PopFrame(tstate, frame); + frame = cframe.current_frame = prev; + _PyFrame_StackPush(frame, (PyObject *)gen); + goto resume_frame; + } + + // error: BUILD_SLICE has irregular stack effect + inst(BUILD_SLICE) { + PyObject *start, *stop, *step, *slice; + if (oparg == 3) + step = POP(); + else + step = NULL; + stop = POP(); + start = TOP(); + slice = PySlice_New(start, stop, step); + Py_DECREF(start); + Py_DECREF(stop); + Py_XDECREF(step); + SET_TOP(slice); + if (slice == NULL) + goto error; + } + + // error: FORMAT_VALUE has irregular stack effect + inst(FORMAT_VALUE) { + /* Handles f-string value formatting. */ + PyObject *result; + PyObject *fmt_spec; + PyObject *value; + PyObject *(*conv_fn)(PyObject *); + int which_conversion = oparg & FVC_MASK; + int have_fmt_spec = (oparg & FVS_MASK) == FVS_HAVE_SPEC; + + fmt_spec = have_fmt_spec ? POP() : NULL; + value = POP(); + + /* See if any conversion is specified. */ + switch (which_conversion) { + case FVC_NONE: conv_fn = NULL; break; + case FVC_STR: conv_fn = PyObject_Str; break; + case FVC_REPR: conv_fn = PyObject_Repr; break; + case FVC_ASCII: conv_fn = PyObject_ASCII; break; + default: + _PyErr_Format(tstate, PyExc_SystemError, + "unexpected conversion flag %d", + which_conversion); + goto error; + } + + /* If there's a conversion function, call it and replace + value with that result. Otherwise, just use value, + without conversion. */ + if (conv_fn != NULL) { + result = conv_fn(value); + Py_DECREF(value); + if (result == NULL) { + Py_XDECREF(fmt_spec); + goto error; + } + value = result; + } + + /* If value is a unicode object, and there's no fmt_spec, + then we know the result of format(value) is value + itself. In that case, skip calling format(). I plan to + move this optimization in to PyObject_Format() + itself. */ + if (PyUnicode_CheckExact(value) && fmt_spec == NULL) { + /* Do nothing, just transfer ownership to result. */ + result = value; + } else { + /* Actually call format(). */ + result = PyObject_Format(value, fmt_spec); + Py_DECREF(value); + Py_XDECREF(fmt_spec); + if (result == NULL) { + goto error; + } + } + + PUSH(result); + } + + // stack effect: ( -- __0) + inst(COPY) { + assert(oparg != 0); + PyObject *peek = PEEK(oparg); + PUSH(Py_NewRef(peek)); + } + + inst(BINARY_OP, (lhs, rhs, unused/1 -- res)) { + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr--; + _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0)); + DISPATCH_SAME_OPARG(); + } + STAT_INC(BINARY_OP, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + assert(0 <= oparg); + assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); + assert(binary_ops[oparg]); + res = binary_ops[oparg](lhs, rhs); + Py_DECREF(lhs); + Py_DECREF(rhs); + ERROR_IF(res == NULL, error); + } + + // stack effect: ( -- ) + inst(SWAP) { + assert(oparg != 0); + PyObject *top = TOP(); + SET_TOP(PEEK(oparg)); + PEEK(oparg) = top; + } + + // stack effect: ( -- ) + inst(EXTENDED_ARG) { + assert(oparg); + assert(cframe.use_tracing == 0); + opcode = _Py_OPCODE(*next_instr); + oparg = oparg << 8 | _Py_OPARG(*next_instr); + PRE_DISPATCH_GOTO(); + DISPATCH_GOTO(); + } + + // stack effect: ( -- ) + inst(CACHE) { + Py_UNREACHABLE(); + } + + +// END BYTECODES // + + } + error: + exception_unwind: + handle_eval_breaker: + resume_frame: + resume_with_error: + start_frame: + unbound_local_error: + ; +} + +// Future families go below this point // + +family(binary_subscr) = { + BINARY_SUBSCR, BINARY_SUBSCR_DICT, + BINARY_SUBSCR_GETITEM, BINARY_SUBSCR_LIST_INT, BINARY_SUBSCR_TUPLE_INT }; +family(call) = { + CALL, CALL_PY_EXACT_ARGS, + CALL_PY_WITH_DEFAULTS, CALL_BOUND_METHOD_EXACT_ARGS, CALL_BUILTIN_CLASS, + CALL_BUILTIN_FAST_WITH_KEYWORDS, CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, CALL_NO_KW_BUILTIN_FAST, + CALL_NO_KW_BUILTIN_O, CALL_NO_KW_ISINSTANCE, CALL_NO_KW_LEN, + CALL_NO_KW_LIST_APPEND, CALL_NO_KW_METHOD_DESCRIPTOR_FAST, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS, + CALL_NO_KW_METHOD_DESCRIPTOR_O, CALL_NO_KW_STR_1, CALL_NO_KW_TUPLE_1, + CALL_NO_KW_TYPE_1 }; +family(compare_op) = { + COMPARE_OP, COMPARE_OP_FLOAT_JUMP, + COMPARE_OP_INT_JUMP, COMPARE_OP_STR_JUMP }; +family(for_iter) = { + FOR_ITER, FOR_ITER_LIST, + FOR_ITER_RANGE }; +family(load_attr) = { + LOAD_ATTR, LOAD_ATTR_CLASS, + LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, LOAD_ATTR_INSTANCE_VALUE, LOAD_ATTR_MODULE, + LOAD_ATTR_PROPERTY, LOAD_ATTR_SLOT, LOAD_ATTR_WITH_HINT, + LOAD_ATTR_METHOD_LAZY_DICT, LOAD_ATTR_METHOD_NO_DICT, LOAD_ATTR_METHOD_WITH_DICT, + LOAD_ATTR_METHOD_WITH_VALUES }; +family(load_const) = { LOAD_CONST, LOAD_CONST__LOAD_FAST }; +family(load_fast) = { LOAD_FAST, LOAD_FAST__LOAD_CONST, LOAD_FAST__LOAD_FAST }; +family(load_global) = { + LOAD_GLOBAL, LOAD_GLOBAL_BUILTIN, + LOAD_GLOBAL_MODULE }; +family(store_attr) = { + STORE_ATTR, STORE_ATTR_INSTANCE_VALUE, + STORE_ATTR_SLOT, STORE_ATTR_WITH_HINT }; +family(store_fast) = { STORE_FAST, STORE_FAST__LOAD_FAST, STORE_FAST__STORE_FAST }; +family(store_subscr) = { + STORE_SUBSCR, STORE_SUBSCR_DICT, + STORE_SUBSCR_LIST_INT }; +family(unpack_sequence) = { + UNPACK_SEQUENCE, UNPACK_SEQUENCE_LIST, + UNPACK_SEQUENCE_TUPLE, UNPACK_SEQUENCE_TWO_TUPLE }; diff --git a/Python/ceval.c b/Python/ceval.c index c08c794005d1ab..80bfa21ad0b6f0 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -143,7 +143,7 @@ lltrace_instruction(_PyInterpreterFrame *frame, const char *opname = _PyOpcode_OpName[opcode]; assert(opname != NULL); int offset = (int)(next_instr - _PyCode_CODE(frame->f_code)); - if (HAS_ARG(opcode)) { + if (HAS_ARG((int)_PyOpcode_Deopt[opcode])) { printf("%d: %s %d\n", offset * 2, opname, oparg); } else { @@ -155,7 +155,10 @@ static void lltrace_resume_frame(_PyInterpreterFrame *frame) { PyObject *fobj = frame->f_funcobj; - if (fobj == NULL || !PyFunction_Check(fobj)) { + if (frame->owner == FRAME_OWNED_BY_CSTACK || + fobj == NULL || + !PyFunction_Check(fobj) + ) { printf("\nResuming frame."); return; } @@ -391,8 +394,7 @@ match_keys(PyThreadState *tstate, PyObject *map, PyObject *keys) Py_DECREF(value); Py_DECREF(values); // Return None: - Py_INCREF(Py_None); - values = Py_None; + values = Py_NewRef(Py_None); goto done; } PyTuple_SET_ITEM(values, i, value); @@ -676,11 +678,11 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) #endif #if USE_COMPUTED_GOTOS -#define TARGET(op) TARGET_##op: INSTRUCTION_START(op); -#define DISPATCH_GOTO() goto *opcode_targets[opcode] +# define TARGET(op) TARGET_##op: INSTRUCTION_START(op); +# define DISPATCH_GOTO() goto *opcode_targets[opcode] #else -#define TARGET(op) case op: INSTRUCTION_START(op); -#define DISPATCH_GOTO() goto dispatch_opcode +# define TARGET(op) case op: TARGET_##op: INSTRUCTION_START(op); +# define DISPATCH_GOTO() goto dispatch_opcode #endif /* PRE_DISPATCH_GOTO() does lltrace if enabled. Normally a no-op */ @@ -710,6 +712,16 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) DISPATCH_GOTO(); \ } +#define DISPATCH_INLINED(NEW_FRAME) \ + do { \ + _PyFrame_SetStackPointer(frame, stack_pointer); \ + frame->prev_instr = next_instr - 1; \ + (NEW_FRAME)->previous = frame; \ + frame = cframe.current_frame = (NEW_FRAME); \ + CALL_STAT_INC(inlined_py_calls); \ + goto start_frame; \ + } while (0) + #define CHECK_EVAL_BREAKER() \ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); \ if (_Py_atomic_load_relaxed_int32(eval_breaker)) { \ @@ -734,21 +746,15 @@ GETITEM(PyObject *v, Py_ssize_t i) { /* Code access macros */ /* The integer overflow is checked by an assertion below. */ -#define INSTR_OFFSET() ((int)(next_instr - first_instr)) +#define INSTR_OFFSET() ((int)(next_instr - _PyCode_CODE(frame->f_code))) #define NEXTOPARG() do { \ _Py_CODEUNIT word = *next_instr; \ opcode = _Py_OPCODE(word); \ oparg = _Py_OPARG(word); \ } while (0) -#define JUMPTO(x) (next_instr = first_instr + (x)) +#define JUMPTO(x) (next_instr = _PyCode_CODE(frame->f_code) + (x)) #define JUMPBY(x) (next_instr += (x)) -/* Get opcode and oparg from original instructions, not quickened form. */ -#define TRACING_NEXTOPARG() do { \ - NEXTOPARG(); \ - opcode = _PyOpcode_Deopt[opcode]; \ - } while (0) - /* OpCode prediction macros Some opcodes tend to come in pairs thus making it possible to predict the second code when the first is run. For example, @@ -806,6 +812,7 @@ GETITEM(PyObject *v, Py_ssize_t i) { #define THIRD() (stack_pointer[-3]) #define FOURTH() (stack_pointer[-4]) #define PEEK(n) (stack_pointer[-(n)]) +#define POKE(n, v) (stack_pointer[-(n)] = (v)) #define SET_TOP(v) (stack_pointer[-1] = (v)) #define SET_SECOND(v) (stack_pointer[-2] = (v)) #define BASIC_STACKADJ(n) (stack_pointer += n) @@ -849,10 +856,33 @@ GETITEM(PyObject *v, Py_ssize_t i) { GETLOCAL(i) = value; \ Py_XDECREF(tmp); } while (0) -#define JUMP_TO_INSTRUCTION(op) goto PREDICT_ID(op) +#define GO_TO_INSTRUCTION(op) goto PREDICT_ID(op) +#ifdef Py_STATS +#define UPDATE_MISS_STATS(INSTNAME) \ + do { \ + STAT_INC(opcode, miss); \ + STAT_INC((INSTNAME), miss); \ + /* The counter is always the first cache entry: */ \ + if (ADAPTIVE_COUNTER_IS_ZERO(*next_instr)) { \ + STAT_INC((INSTNAME), deopt); \ + } \ + else { \ + /* This is about to be (incorrectly) incremented: */ \ + STAT_DEC((INSTNAME), deferred); \ + } \ + } while (0) +#else +#define UPDATE_MISS_STATS(INSTNAME) ((void)0) +#endif -#define DEOPT_IF(cond, instname) if (cond) { goto miss; } +#define DEOPT_IF(COND, INSTNAME) \ + if ((COND)) { \ + /* This is only a single jump on release builds! */ \ + UPDATE_MISS_STATS((INSTNAME)); \ + assert(_PyOpcode_Deopt[opcode] == (INSTNAME)); \ + GO_TO_INSTRUCTION(INSTNAME); \ + } #define GLOBALS() frame->f_globals @@ -904,11 +934,23 @@ GETITEM(PyObject *v, Py_ssize_t i) { dtrace_function_entry(frame); \ } -#define ADAPTIVE_COUNTER_IS_ZERO(cache) \ - (cache)->counter < (1<> ADAPTIVE_BACKOFF_BITS) == 0) + +#define ADAPTIVE_COUNTER_IS_MAX(COUNTER) \ + (((COUNTER) >> ADAPTIVE_BACKOFF_BITS) == ((1 << MAX_BACKOFF_VALUE) - 1)) -#define DECREMENT_ADAPTIVE_COUNTER(cache) \ - (cache)->counter -= (1<recursion_headroom--; return -1; } - } - return 0; -} - -static inline int _Py_EnterRecursivePy(PyThreadState *tstate) { - return (tstate->py_recursion_remaining-- <= 0) && - _Py_CheckRecursiveCallPy(tstate); -} - - -static inline void _Py_LeaveRecursiveCallPy(PyThreadState *tstate) { - tstate->py_recursion_remaining++; -} - - -/* It is only between the KW_NAMES instruction and the following CALL, - * that this has any meaning. - */ -typedef struct { - PyObject *kwnames; -} CallShape; - -// GH-89279: Must be a macro to be sure it's inlined by MSVC. -#define is_method(stack_pointer, args) (PEEK((args)+2) != NULL) - -#define KWNAMES_LEN() \ - (call_shape.kwnames == NULL ? 0 : ((int)PyTuple_GET_SIZE(call_shape.kwnames))) - -PyObject* _Py_HOT_FUNCTION -_PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) -{ - _Py_EnsureTstateNotNULL(tstate); - CALL_STAT_INC(pyeval_calls); - -#if USE_COMPUTED_GOTOS -/* Import the static jump table */ -#include "opcode_targets.h" -#endif - -#ifdef Py_STATS - int lastopcode = 0; -#endif - // opcode is an 8-bit value to improve the code generated by MSVC - // for the big switch below (in combination with the EXTRA_CASES macro). - uint8_t opcode; /* Current opcode */ - int oparg; /* Current opcode argument, if any */ - _Py_atomic_int * const eval_breaker = &tstate->interp->ceval.eval_breaker; -#ifdef LLTRACE - int lltrace = 0; -#endif - - _PyCFrame cframe; - CallShape call_shape; - call_shape.kwnames = NULL; // Borrowed reference. Reset by CALL instructions. - - /* WARNING: Because the _PyCFrame lives on the C stack, - * but can be accessed from a heap allocated object (tstate) - * strict stack discipline must be maintained. - */ - _PyCFrame *prev_cframe = tstate->cframe; - cframe.use_tracing = prev_cframe->use_tracing; - cframe.previous = prev_cframe; - tstate->cframe = &cframe; - - frame->is_entry = true; - /* Push frame */ - frame->previous = prev_cframe->current_frame; - cframe.current_frame = frame; - - if (_Py_EnterRecursiveCallTstate(tstate, "")) { - tstate->c_recursion_remaining--; - tstate->py_recursion_remaining--; - goto exit_unwind; - } - - /* support for generator.throw() */ - if (throwflag) { - if (_Py_EnterRecursivePy(tstate)) { - goto exit_unwind; - } - TRACE_FUNCTION_THROW_ENTRY(); - DTRACE_FUNCTION_ENTRY(); - goto resume_with_error; - } - - /* Local "register" variables. - * These are cached values from the frame and code object. */ - - PyObject *names; - PyObject *consts; - _Py_CODEUNIT *first_instr; - _Py_CODEUNIT *next_instr; - PyObject **stack_pointer; - -/* Sets the above local variables from the frame */ -#define SET_LOCALS_FROM_FRAME() \ - { \ - PyCodeObject *co = frame->f_code; \ - names = co->co_names; \ - consts = co->co_consts; \ - first_instr = _PyCode_CODE(co); \ - } \ - assert(_PyInterpreterFrame_LASTI(frame) >= -1); \ - /* Jump back to the last instruction executed... */ \ - next_instr = frame->prev_instr + 1; \ - stack_pointer = _PyFrame_GetStackPointer(frame); \ - /* Set stackdepth to -1. \ - Update when returning or calling trace function. \ - Having stackdepth <= 0 ensures that invalid \ - values are not visible to the cycle GC. \ - We choose -1 rather than 0 to assist debugging. \ - */ \ - frame->stacktop = -1; - - -start_frame: - if (_Py_EnterRecursivePy(tstate)) { - goto exit_unwind; - } - -resume_frame: - SET_LOCALS_FROM_FRAME(); - -#ifdef LLTRACE - { - int r = PyDict_Contains(GLOBALS(), &_Py_ID(__lltrace__)); - if (r < 0) { - goto exit_unwind; - } - lltrace = r; - } - if (lltrace) { - lltrace_resume_frame(frame); - } -#endif - -#ifdef Py_DEBUG - /* _PyEval_EvalFrameDefault() must not be called with an exception set, - because it can clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!_PyErr_Occurred(tstate)); -#endif - - DISPATCH(); - -handle_eval_breaker: - - /* Do periodic things, like check for signals and async I/0. - * We need to do reasonably frequently, but not too frequently. - * All loops should include a check of the eval breaker. - * We also check on return from any builtin function. - */ - if (_Py_HandlePending(tstate) != 0) { - goto error; - } - DISPATCH(); - - { - /* Start instructions */ -#if USE_COMPUTED_GOTOS - { -#else - dispatch_opcode: - switch (opcode) { -#endif - - /* BEWARE! - It is essential that any operation that fails must goto error - and that all operation that succeed call DISPATCH() ! */ - - TARGET(NOP) { - DISPATCH(); - } - - TARGET(RESUME) { - _PyCode_Warmup(frame->f_code); - JUMP_TO_INSTRUCTION(RESUME_QUICK); - } - - TARGET(RESUME_QUICK) { - PREDICTED(RESUME_QUICK); - assert(tstate->cframe == &cframe); - assert(frame == cframe.current_frame); - if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { - goto handle_eval_breaker; - } - DISPATCH(); - } - - TARGET(LOAD_CLOSURE) { - /* We keep LOAD_CLOSURE so that the bytecode stays more readable. */ - PyObject *value = GETLOCAL(oparg); - if (value == NULL) { - goto unbound_local_error; - } - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_FAST_CHECK) { - PyObject *value = GETLOCAL(oparg); - if (value == NULL) { - goto unbound_local_error; - } - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_FAST) { - PyObject *value = GETLOCAL(oparg); - assert(value != NULL); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_CONST) { - PREDICTED(LOAD_CONST); - PyObject *value = GETITEM(consts, oparg); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(STORE_FAST) { - PyObject *value = POP(); - SETLOCAL(oparg, value); - DISPATCH(); - } - - TARGET(LOAD_FAST__LOAD_FAST) { - PyObject *value = GETLOCAL(oparg); - assert(value != NULL); - NEXTOPARG(); - next_instr++; - Py_INCREF(value); - PUSH(value); - value = GETLOCAL(oparg); - assert(value != NULL); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_FAST__LOAD_CONST) { - PyObject *value = GETLOCAL(oparg); - assert(value != NULL); - NEXTOPARG(); - next_instr++; - Py_INCREF(value); - PUSH(value); - value = GETITEM(consts, oparg); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(STORE_FAST__LOAD_FAST) { - PyObject *value = POP(); - SETLOCAL(oparg, value); - NEXTOPARG(); - next_instr++; - value = GETLOCAL(oparg); - assert(value != NULL); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(STORE_FAST__STORE_FAST) { - PyObject *value = POP(); - SETLOCAL(oparg, value); - NEXTOPARG(); - next_instr++; - value = POP(); - SETLOCAL(oparg, value); - DISPATCH(); - } - - TARGET(LOAD_CONST__LOAD_FAST) { - PyObject *value = GETITEM(consts, oparg); - NEXTOPARG(); - next_instr++; - Py_INCREF(value); - PUSH(value); - value = GETLOCAL(oparg); - assert(value != NULL); - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(POP_TOP) { - PyObject *value = POP(); - Py_DECREF(value); - DISPATCH(); - } - - TARGET(PUSH_NULL) { - /* Use BASIC_PUSH as NULL is not a valid object pointer */ - BASIC_PUSH(NULL); - DISPATCH(); - } - - TARGET(UNARY_POSITIVE) { - PyObject *value = TOP(); - PyObject *res = PyNumber_Positive(value); - Py_DECREF(value); - SET_TOP(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(UNARY_NEGATIVE) { - PyObject *value = TOP(); - PyObject *res = PyNumber_Negative(value); - Py_DECREF(value); - SET_TOP(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(UNARY_NOT) { - PyObject *value = TOP(); - int err = PyObject_IsTrue(value); - Py_DECREF(value); - if (err == 0) { - Py_INCREF(Py_True); - SET_TOP(Py_True); - DISPATCH(); - } - else if (err > 0) { - Py_INCREF(Py_False); - SET_TOP(Py_False); - DISPATCH(); - } - STACK_SHRINK(1); - goto error; - } - - TARGET(UNARY_INVERT) { - PyObject *value = TOP(); - PyObject *res = PyNumber_Invert(value); - Py_DECREF(value); - SET_TOP(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(BINARY_OP_MULTIPLY_INT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); - DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); - STAT_INC(BINARY_OP, hit); - PyObject *prod = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); - SET_SECOND(prod); - _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - STACK_SHRINK(1); - if (prod == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_MULTIPLY_FLOAT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); - DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); - STAT_INC(BINARY_OP, hit); - double dprod = ((PyFloatObject *)left)->ob_fval * - ((PyFloatObject *)right)->ob_fval; - PyObject *prod = PyFloat_FromDouble(dprod); - SET_SECOND(prod); - _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); - STACK_SHRINK(1); - if (prod == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_SUBTRACT_INT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); - DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); - STAT_INC(BINARY_OP, hit); - PyObject *sub = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); - SET_SECOND(sub); - _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - STACK_SHRINK(1); - if (sub == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_SUBTRACT_FLOAT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); - DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); - STAT_INC(BINARY_OP, hit); - double dsub = ((PyFloatObject *)left)->ob_fval - ((PyFloatObject *)right)->ob_fval; - PyObject *sub = PyFloat_FromDouble(dsub); - SET_SECOND(sub); - _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); - STACK_SHRINK(1); - if (sub == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_ADD_UNICODE) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); - DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); - STAT_INC(BINARY_OP, hit); - PyObject *res = PyUnicode_Concat(left, right); - STACK_SHRINK(1); - SET_TOP(res); - _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); - _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (TOP() == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); - DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); - _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; - assert(_Py_OPCODE(true_next) == STORE_FAST || - _Py_OPCODE(true_next) == STORE_FAST__LOAD_FAST); - PyObject **target_local = &GETLOCAL(_Py_OPARG(true_next)); - DEOPT_IF(*target_local != left, BINARY_OP); - STAT_INC(BINARY_OP, hit); - /* Handle `left = left + right` or `left += right` for str. - * - * When possible, extend `left` in place rather than - * allocating a new PyUnicodeObject. This attempts to avoid - * quadratic behavior when one neglects to use str.join(). - * - * If `left` has only two references remaining (one from - * the stack, one in the locals), DECREFing `left` leaves - * only the locals reference, so PyUnicode_Append knows - * that the string is safe to mutate. - */ - assert(Py_REFCNT(left) >= 2); - _Py_DECREF_NO_DEALLOC(left); - STACK_SHRINK(2); - PyUnicode_Append(target_local, right); - _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (*target_local == NULL) { - goto error; - } - // The STORE_FAST is already done. - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1); - DISPATCH(); - } - - TARGET(BINARY_OP_ADD_FLOAT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); - DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); - STAT_INC(BINARY_OP, hit); - double dsum = ((PyFloatObject *)left)->ob_fval + - ((PyFloatObject *)right)->ob_fval; - PyObject *sum = PyFloat_FromDouble(dsum); - SET_SECOND(sum); - _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); - STACK_SHRINK(1); - if (sum == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_OP_ADD_INT) { - assert(cframe.use_tracing == 0); - PyObject *left = SECOND(); - PyObject *right = TOP(); - DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); - DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); - STAT_INC(BINARY_OP, hit); - PyObject *sum = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); - SET_SECOND(sum); - _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - STACK_SHRINK(1); - if (sum == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR) { - PREDICTED(BINARY_SUBSCR); - PyObject *sub = POP(); - PyObject *container = TOP(); - PyObject *res = PyObject_GetItem(container, sub); - Py_DECREF(container); - Py_DECREF(sub); - SET_TOP(res); - if (res == NULL) - goto error; - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - DISPATCH(); - } - - TARGET(BINARY_SLICE) { - PyObject *stop = POP(); - PyObject *start = POP(); - PyObject *container = TOP(); - - PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); - if (slice == NULL) { - goto error; - } - PyObject *res = PyObject_GetItem(container, slice); - Py_DECREF(slice); - if (res == NULL) { - goto error; - } - SET_TOP(res); - Py_DECREF(container); - DISPATCH(); - } - - TARGET(STORE_SLICE) { - PyObject *stop = POP(); - PyObject *start = POP(); - PyObject *container = TOP(); - PyObject *v = SECOND(); - - PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); - if (slice == NULL) { - goto error; - } - int err = PyObject_SetItem(container, slice, v); - Py_DECREF(slice); - if (err) { - goto error; - } - STACK_SHRINK(2); - Py_DECREF(v); - Py_DECREF(container); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR_ADAPTIVE) { - _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *sub = TOP(); - PyObject *container = SECOND(); - next_instr--; - if (_Py_Specialize_BinarySubscr(container, sub, next_instr) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(BINARY_SUBSCR, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(BINARY_SUBSCR); - } - } - - TARGET(BINARY_SUBSCR_LIST_INT) { - assert(cframe.use_tracing == 0); - PyObject *sub = TOP(); - PyObject *list = SECOND(); - DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); - DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); - - // Deopt unless 0 <= sub < PyList_Size(list) - Py_ssize_t signed_magnitude = Py_SIZE(sub); - DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); - assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); - Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; - DEOPT_IF(index >= PyList_GET_SIZE(list), BINARY_SUBSCR); - STAT_INC(BINARY_SUBSCR, hit); - PyObject *res = PyList_GET_ITEM(list, index); - assert(res != NULL); - Py_INCREF(res); - STACK_SHRINK(1); - _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); - SET_TOP(res); - Py_DECREF(list); - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR_TUPLE_INT) { - assert(cframe.use_tracing == 0); - PyObject *sub = TOP(); - PyObject *tuple = SECOND(); - DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); - DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); - - // Deopt unless 0 <= sub < PyTuple_Size(list) - Py_ssize_t signed_magnitude = Py_SIZE(sub); - DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); - assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); - Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; - DEOPT_IF(index >= PyTuple_GET_SIZE(tuple), BINARY_SUBSCR); - STAT_INC(BINARY_SUBSCR, hit); - PyObject *res = PyTuple_GET_ITEM(tuple, index); - assert(res != NULL); - Py_INCREF(res); - STACK_SHRINK(1); - _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); - SET_TOP(res); - Py_DECREF(tuple); - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR_DICT) { - assert(cframe.use_tracing == 0); - PyObject *dict = SECOND(); - DEOPT_IF(!PyDict_CheckExact(SECOND()), BINARY_SUBSCR); - STAT_INC(BINARY_SUBSCR, hit); - PyObject *sub = TOP(); - PyObject *res = PyDict_GetItemWithError(dict, sub); - if (res == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_SetKeyError(sub); - } - goto error; - } - Py_INCREF(res); - STACK_SHRINK(1); - Py_DECREF(sub); - SET_TOP(res); - Py_DECREF(dict); - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - DISPATCH(); - } - - TARGET(BINARY_SUBSCR_GETITEM) { - PyObject *sub = TOP(); - PyObject *container = SECOND(); - _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; - uint32_t type_version = read_u32(cache->type_version); - PyTypeObject *tp = Py_TYPE(container); - DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR); - assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE); - PyObject *cached = ((PyHeapTypeObject *)tp)->_spec_cache.getitem; - assert(PyFunction_Check(cached)); - PyFunctionObject *getitem = (PyFunctionObject *)cached; - DEOPT_IF(getitem->func_version != cache->func_version, BINARY_SUBSCR); - PyCodeObject *code = (PyCodeObject *)getitem->func_code; - assert(code->co_argcount == 2); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); - STAT_INC(BINARY_SUBSCR, hit); - Py_INCREF(getitem); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem); - STACK_SHRINK(2); - new_frame->localsplus[0] = container; - new_frame->localsplus[1] = sub; - for (int i = 2; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - CALL_STAT_INC(inlined_py_calls); - goto start_frame; - } - - TARGET(LIST_APPEND) { - PyObject *v = POP(); - PyObject *list = PEEK(oparg); - if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) - goto error; - PREDICT(JUMP_BACKWARD_QUICK); - DISPATCH(); - } - - TARGET(SET_ADD) { - PyObject *v = POP(); - PyObject *set = PEEK(oparg); - int err; - err = PySet_Add(set, v); - Py_DECREF(v); - if (err != 0) - goto error; - PREDICT(JUMP_BACKWARD_QUICK); - DISPATCH(); - } - - TARGET(STORE_SUBSCR) { - PREDICTED(STORE_SUBSCR); - PyObject *sub = TOP(); - PyObject *container = SECOND(); - PyObject *v = THIRD(); - int err; - STACK_SHRINK(3); - /* container[sub] = v */ - err = PyObject_SetItem(container, sub, v); - Py_DECREF(v); - Py_DECREF(container); - Py_DECREF(sub); - if (err != 0) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); - DISPATCH(); - } - - TARGET(STORE_SUBSCR_ADAPTIVE) { - _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *sub = TOP(); - PyObject *container = SECOND(); - next_instr--; - if (_Py_Specialize_StoreSubscr(container, sub, next_instr) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(STORE_SUBSCR, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(STORE_SUBSCR); - } - } - - TARGET(STORE_SUBSCR_LIST_INT) { - assert(cframe.use_tracing == 0); - PyObject *sub = TOP(); - PyObject *list = SECOND(); - PyObject *value = THIRD(); - DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); - DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); - - // Ensure nonnegative, zero-or-one-digit ints. - DEOPT_IF(((size_t)Py_SIZE(sub)) > 1, STORE_SUBSCR); - Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; - // Ensure index < len(list) - DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); - STAT_INC(STORE_SUBSCR, hit); - - PyObject *old_value = PyList_GET_ITEM(list, index); - PyList_SET_ITEM(list, index, value); - STACK_SHRINK(3); - assert(old_value != NULL); - Py_DECREF(old_value); - _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); - Py_DECREF(list); - JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); - DISPATCH(); - } - - TARGET(STORE_SUBSCR_DICT) { - assert(cframe.use_tracing == 0); - PyObject *sub = TOP(); - PyObject *dict = SECOND(); - PyObject *value = THIRD(); - DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); - STACK_SHRINK(3); - STAT_INC(STORE_SUBSCR, hit); - int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); - Py_DECREF(dict); - if (err != 0) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); - DISPATCH(); - } - - TARGET(DELETE_SUBSCR) { - PyObject *sub = TOP(); - PyObject *container = SECOND(); - int err; - STACK_SHRINK(2); - /* del container[sub] */ - err = PyObject_DelItem(container, sub); - Py_DECREF(container); - Py_DECREF(sub); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(PRINT_EXPR) { - PyObject *value = POP(); - PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook)); - PyObject *res; - if (hook == NULL) { - _PyErr_SetString(tstate, PyExc_RuntimeError, - "lost sys.displayhook"); - Py_DECREF(value); - goto error; - } - res = PyObject_CallOneArg(hook, value); - Py_DECREF(value); - if (res == NULL) - goto error; - Py_DECREF(res); - DISPATCH(); - } - - TARGET(RAISE_VARARGS) { - PyObject *cause = NULL, *exc = NULL; - switch (oparg) { - case 2: - cause = POP(); /* cause */ - /* fall through */ - case 1: - exc = POP(); /* exc */ - /* fall through */ - case 0: - if (do_raise(tstate, exc, cause)) { - goto exception_unwind; - } - break; - default: - _PyErr_SetString(tstate, PyExc_SystemError, - "bad RAISE_VARARGS oparg"); - break; - } - goto error; - } - - TARGET(RETURN_VALUE) { - PyObject *retval = POP(); - assert(EMPTY()); - _PyFrame_SetStackPointer(frame, stack_pointer); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); - _Py_LeaveRecursiveCallPy(tstate); - if (!frame->is_entry) { - frame = cframe.current_frame = pop_frame(tstate, frame); - _PyFrame_StackPush(frame, retval); - goto resume_frame; - } - _Py_LeaveRecursiveCallTstate(tstate); - /* Restore previous cframe and return. */ - tstate->cframe = cframe.previous; - tstate->cframe->use_tracing = cframe.use_tracing; - assert(tstate->cframe->current_frame == frame->previous); - assert(!_PyErr_Occurred(tstate)); - return retval; - } - - TARGET(GET_AITER) { - unaryfunc getter = NULL; - PyObject *iter = NULL; - PyObject *obj = TOP(); - PyTypeObject *type = Py_TYPE(obj); - - if (type->tp_as_async != NULL) { - getter = type->tp_as_async->am_aiter; - } - - if (getter != NULL) { - iter = (*getter)(obj); - Py_DECREF(obj); - if (iter == NULL) { - SET_TOP(NULL); - goto error; - } - } - else { - SET_TOP(NULL); - _PyErr_Format(tstate, PyExc_TypeError, - "'async for' requires an object with " - "__aiter__ method, got %.100s", - type->tp_name); - Py_DECREF(obj); - goto error; - } - - if (Py_TYPE(iter)->tp_as_async == NULL || - Py_TYPE(iter)->tp_as_async->am_anext == NULL) { - - SET_TOP(NULL); - _PyErr_Format(tstate, PyExc_TypeError, - "'async for' received an object from __aiter__ " - "that does not implement __anext__: %.100s", - Py_TYPE(iter)->tp_name); - Py_DECREF(iter); - goto error; - } - - SET_TOP(iter); - DISPATCH(); - } - - TARGET(GET_ANEXT) { - unaryfunc getter = NULL; - PyObject *next_iter = NULL; - PyObject *awaitable = NULL; - PyObject *aiter = TOP(); - PyTypeObject *type = Py_TYPE(aiter); - - if (PyAsyncGen_CheckExact(aiter)) { - awaitable = type->tp_as_async->am_anext(aiter); - if (awaitable == NULL) { - goto error; - } - } else { - if (type->tp_as_async != NULL){ - getter = type->tp_as_async->am_anext; - } - - if (getter != NULL) { - next_iter = (*getter)(aiter); - if (next_iter == NULL) { - goto error; - } - } - else { - _PyErr_Format(tstate, PyExc_TypeError, - "'async for' requires an iterator with " - "__anext__ method, got %.100s", - type->tp_name); - goto error; - } - - awaitable = _PyCoro_GetAwaitableIter(next_iter); - if (awaitable == NULL) { - _PyErr_FormatFromCause( - PyExc_TypeError, - "'async for' received an invalid object " - "from __anext__: %.100s", - Py_TYPE(next_iter)->tp_name); - - Py_DECREF(next_iter); - goto error; - } else { - Py_DECREF(next_iter); - } - } - - PUSH(awaitable); - PREDICT(LOAD_CONST); - DISPATCH(); - } - - TARGET(GET_AWAITABLE) { - PREDICTED(GET_AWAITABLE); - PyObject *iterable = TOP(); - PyObject *iter = _PyCoro_GetAwaitableIter(iterable); - - if (iter == NULL) { - format_awaitable_error(tstate, Py_TYPE(iterable), oparg); - } - - Py_DECREF(iterable); - - if (iter != NULL && PyCoro_CheckExact(iter)) { - PyObject *yf = _PyGen_yf((PyGenObject*)iter); - if (yf != NULL) { - /* `iter` is a coroutine object that is being - awaited, `yf` is a pointer to the current awaitable - being awaited on. */ - Py_DECREF(yf); - Py_CLEAR(iter); - _PyErr_SetString(tstate, PyExc_RuntimeError, - "coroutine is being awaited already"); - /* The code below jumps to `error` if `iter` is NULL. */ - } - } - - SET_TOP(iter); /* Even if it's NULL */ - - if (iter == NULL) { - goto error; - } - - PREDICT(LOAD_CONST); - DISPATCH(); - } - - TARGET(SEND) { - assert(frame->is_entry); - assert(STACK_LEVEL() >= 2); - PyObject *v = POP(); - PyObject *receiver = TOP(); - PySendResult gen_status; - PyObject *retval; - if (tstate->c_tracefunc == NULL) { - gen_status = PyIter_Send(receiver, v, &retval); - } else { - if (Py_IsNone(v) && PyIter_Check(receiver)) { - retval = Py_TYPE(receiver)->tp_iternext(receiver); - } - else { - retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v); - } - if (retval == NULL) { - if (tstate->c_tracefunc != NULL - && _PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) - call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); - if (_PyGen_FetchStopIterationValue(&retval) == 0) { - gen_status = PYGEN_RETURN; - } - else { - gen_status = PYGEN_ERROR; - } - } - else { - gen_status = PYGEN_NEXT; - } - } - Py_DECREF(v); - if (gen_status == PYGEN_ERROR) { - assert(retval == NULL); - goto error; - } - if (gen_status == PYGEN_RETURN) { - assert(retval != NULL); - Py_DECREF(receiver); - SET_TOP(retval); - JUMPBY(oparg); - DISPATCH(); - } - assert(gen_status == PYGEN_NEXT); - assert(retval != NULL); - PUSH(retval); - DISPATCH(); - } - - TARGET(ASYNC_GEN_WRAP) { - PyObject *v = TOP(); - assert(frame->f_code->co_flags & CO_ASYNC_GENERATOR); - PyObject *w = _PyAsyncGenValueWrapperNew(v); - if (w == NULL) { - goto error; - } - SET_TOP(w); - Py_DECREF(v); - DISPATCH(); - } - - TARGET(YIELD_VALUE) { - // NOTE: It's important that YIELD_VALUE never raises an exception! - // The compiler treats any exception raised here as a failed close() - // or throw() call. - assert(oparg == STACK_LEVEL()); - assert(frame->is_entry); - PyObject *retval = POP(); - _PyFrame_GetGenerator(frame)->gi_frame_state = FRAME_SUSPENDED; - _PyFrame_SetStackPointer(frame, stack_pointer); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); - _Py_LeaveRecursiveCallPy(tstate); - _Py_LeaveRecursiveCallTstate(tstate); - /* Restore previous cframe and return. */ - tstate->cframe = cframe.previous; - tstate->cframe->use_tracing = cframe.use_tracing; - assert(tstate->cframe->current_frame == frame->previous); - assert(!_PyErr_Occurred(tstate)); - return retval; - } - - TARGET(POP_EXCEPT) { - _PyErr_StackItem *exc_info = tstate->exc_info; - PyObject *value = exc_info->exc_value; - exc_info->exc_value = POP(); - Py_XDECREF(value); - DISPATCH(); - } - - TARGET(RERAISE) { - if (oparg) { - PyObject *lasti = PEEK(oparg + 1); - if (PyLong_Check(lasti)) { - frame->prev_instr = first_instr + PyLong_AsLong(lasti); - assert(!_PyErr_Occurred(tstate)); - } - else { - assert(PyLong_Check(lasti)); - _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); - goto error; - } - } - PyObject *val = POP(); - assert(val && PyExceptionInstance_Check(val)); - PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); - PyObject *tb = PyException_GetTraceback(val); - _PyErr_Restore(tstate, exc, val, tb); - goto exception_unwind; - } - - TARGET(PREP_RERAISE_STAR) { - PyObject *excs = POP(); - assert(PyList_Check(excs)); - PyObject *orig = POP(); - - PyObject *val = _PyExc_PrepReraiseStar(orig, excs); - Py_DECREF(excs); - Py_DECREF(orig); - - if (val == NULL) { - goto error; - } - - PUSH(val); - DISPATCH(); - } - - TARGET(END_ASYNC_FOR) { - PyObject *val = POP(); - assert(val && PyExceptionInstance_Check(val)); - if (PyErr_GivenExceptionMatches(val, PyExc_StopAsyncIteration)) { - Py_DECREF(val); - Py_DECREF(POP()); - DISPATCH(); - } - else { - PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); - PyObject *tb = PyException_GetTraceback(val); - _PyErr_Restore(tstate, exc, val, tb); - goto exception_unwind; - } - } - - TARGET(CLEANUP_THROW) { - assert(throwflag); - PyObject *exc_value = TOP(); - assert(exc_value && PyExceptionInstance_Check(exc_value)); - if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) { - PyObject *value = ((PyStopIterationObject *)exc_value)->value; - Py_INCREF(value); - Py_DECREF(POP()); // The StopIteration. - Py_DECREF(POP()); // The last sent value. - Py_DECREF(POP()); // The delegated sub-iterator. - PUSH(value); - DISPATCH(); - } - Py_INCREF(exc_value); - PyObject *exc_type = Py_NewRef(Py_TYPE(exc_value)); - PyObject *exc_traceback = PyException_GetTraceback(exc_value); - _PyErr_Restore(tstate, exc_type, exc_value, exc_traceback); - goto exception_unwind; - } - - TARGET(LOAD_ASSERTION_ERROR) { - PyObject *value = PyExc_AssertionError; - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_BUILD_CLASS) { - PyObject *bc; - if (PyDict_CheckExact(BUILTINS())) { - bc = _PyDict_GetItemWithError(BUILTINS(), - &_Py_ID(__build_class__)); - if (bc == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_SetString(tstate, PyExc_NameError, - "__build_class__ not found"); - } - goto error; - } - Py_INCREF(bc); - } - else { - bc = PyObject_GetItem(BUILTINS(), &_Py_ID(__build_class__)); - if (bc == NULL) { - if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) - _PyErr_SetString(tstate, PyExc_NameError, - "__build_class__ not found"); - goto error; - } - } - PUSH(bc); - DISPATCH(); - } - - TARGET(STORE_NAME) { - PyObject *name = GETITEM(names, oparg); - PyObject *v = POP(); - PyObject *ns = LOCALS(); - int err; - if (ns == NULL) { - _PyErr_Format(tstate, PyExc_SystemError, - "no locals found when storing %R", name); - Py_DECREF(v); - goto error; - } - if (PyDict_CheckExact(ns)) - err = PyDict_SetItem(ns, name, v); - else - err = PyObject_SetItem(ns, name, v); - Py_DECREF(v); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(DELETE_NAME) { - PyObject *name = GETITEM(names, oparg); - PyObject *ns = LOCALS(); - int err; - if (ns == NULL) { - _PyErr_Format(tstate, PyExc_SystemError, - "no locals when deleting %R", name); - goto error; - } - err = PyObject_DelItem(ns, name); - if (err != 0) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, - name); - goto error; - } - DISPATCH(); - } - - TARGET(UNPACK_SEQUENCE) { - PREDICTED(UNPACK_SEQUENCE); - PyObject *seq = POP(); - PyObject **top = stack_pointer + oparg; - if (!unpack_iterable(tstate, seq, oparg, -1, top)) { - Py_DECREF(seq); - goto error; - } - STACK_GROW(oparg); - Py_DECREF(seq); - JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - DISPATCH(); - } - - TARGET(UNPACK_SEQUENCE_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *seq = TOP(); - next_instr--; - _Py_Specialize_UnpackSequence(seq, next_instr, oparg); - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(UNPACK_SEQUENCE, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(UNPACK_SEQUENCE); - } - } - - TARGET(UNPACK_SEQUENCE_TWO_TUPLE) { - PyObject *seq = TOP(); - DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); - DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); - STAT_INC(UNPACK_SEQUENCE, hit); - SET_TOP(Py_NewRef(PyTuple_GET_ITEM(seq, 1))); - PUSH(Py_NewRef(PyTuple_GET_ITEM(seq, 0))); - Py_DECREF(seq); - JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - DISPATCH(); - } - - TARGET(UNPACK_SEQUENCE_TUPLE) { - PyObject *seq = TOP(); - DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); - DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); - STAT_INC(UNPACK_SEQUENCE, hit); - STACK_SHRINK(1); - PyObject **items = _PyTuple_ITEMS(seq); - while (oparg--) { - PUSH(Py_NewRef(items[oparg])); - } - Py_DECREF(seq); - JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - DISPATCH(); - } - - TARGET(UNPACK_SEQUENCE_LIST) { - PyObject *seq = TOP(); - DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); - DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); - STAT_INC(UNPACK_SEQUENCE, hit); - STACK_SHRINK(1); - PyObject **items = _PyList_ITEMS(seq); - while (oparg--) { - PUSH(Py_NewRef(items[oparg])); - } - Py_DECREF(seq); - JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - DISPATCH(); - } - - TARGET(UNPACK_EX) { - int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); - PyObject *seq = POP(); - PyObject **top = stack_pointer + totalargs; - if (!unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top)) { - Py_DECREF(seq); - goto error; - } - STACK_GROW(totalargs); - Py_DECREF(seq); - DISPATCH(); - } - - TARGET(STORE_ATTR) { - PREDICTED(STORE_ATTR); - PyObject *name = GETITEM(names, oparg); - PyObject *owner = TOP(); - PyObject *v = SECOND(); - int err; - STACK_SHRINK(2); - err = PyObject_SetAttr(owner, name, v); - Py_DECREF(v); - Py_DECREF(owner); - if (err != 0) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); - DISPATCH(); - } - - TARGET(DELETE_ATTR) { - PyObject *name = GETITEM(names, oparg); - PyObject *owner = POP(); - int err; - err = PyObject_SetAttr(owner, name, (PyObject *)NULL); - Py_DECREF(owner); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(STORE_GLOBAL) { - PyObject *name = GETITEM(names, oparg); - PyObject *v = POP(); - int err; - err = PyDict_SetItem(GLOBALS(), name, v); - Py_DECREF(v); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(DELETE_GLOBAL) { - PyObject *name = GETITEM(names, oparg); - int err; - err = PyDict_DelItem(GLOBALS(), name); - if (err != 0) { - if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - DISPATCH(); - } - - TARGET(LOAD_NAME) { - PyObject *name = GETITEM(names, oparg); - PyObject *locals = LOCALS(); - PyObject *v; - if (locals == NULL) { - _PyErr_Format(tstate, PyExc_SystemError, - "no locals when loading %R", name); - goto error; - } - if (PyDict_CheckExact(locals)) { - v = PyDict_GetItemWithError(locals, name); - if (v != NULL) { - Py_INCREF(v); - } - else if (_PyErr_Occurred(tstate)) { - goto error; - } - } - else { - v = PyObject_GetItem(locals, name); - if (v == NULL) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) - goto error; - _PyErr_Clear(tstate); - } - } - if (v == NULL) { - v = PyDict_GetItemWithError(GLOBALS(), name); - if (v != NULL) { - Py_INCREF(v); - } - else if (_PyErr_Occurred(tstate)) { - goto error; - } - else { - if (PyDict_CheckExact(BUILTINS())) { - v = PyDict_GetItemWithError(BUILTINS(), name); - if (v == NULL) { - if (!_PyErr_Occurred(tstate)) { - format_exc_check_arg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - Py_INCREF(v); - } - else { - v = PyObject_GetItem(BUILTINS(), name); - if (v == NULL) { - if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - format_exc_check_arg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - } - } - } - PUSH(v); - DISPATCH(); - } - - TARGET(LOAD_GLOBAL) { - PREDICTED(LOAD_GLOBAL); - int push_null = oparg & 1; - PEEK(0) = NULL; - PyObject *name = GETITEM(names, oparg>>1); - PyObject *v; - if (PyDict_CheckExact(GLOBALS()) - && PyDict_CheckExact(BUILTINS())) - { - v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), - (PyDictObject *)BUILTINS(), - name); - if (v == NULL) { - if (!_PyErr_Occurred(tstate)) { - /* _PyDict_LoadGlobal() returns NULL without raising - * an exception if the key doesn't exist */ - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - Py_INCREF(v); - } - else { - /* Slow-path if globals or builtins is not a dict */ - - /* namespace 1: globals */ - v = PyObject_GetItem(GLOBALS(), name); - if (v == NULL) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - goto error; - } - _PyErr_Clear(tstate); - - /* namespace 2: builtins */ - v = PyObject_GetItem(BUILTINS(), name); - if (v == NULL) { - if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - format_exc_check_arg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - goto error; - } - } - } - /* Skip over inline cache */ - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); - STACK_GROW(push_null); - PUSH(v); - DISPATCH(); - } - - TARGET(LOAD_GLOBAL_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *name = GETITEM(names, oparg>>1); - next_instr--; - if (_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(LOAD_GLOBAL, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(LOAD_GLOBAL); - } - } - - TARGET(LOAD_GLOBAL_MODULE) { - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); - PyDictObject *dict = (PyDictObject *)GLOBALS(); - _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; - uint32_t version = read_u32(cache->module_keys_version); - DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); - assert(DK_IS_UNICODE(dict->ma_keys)); - PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); - PyObject *res = entries[cache->index].me_value; - DEOPT_IF(res == NULL, LOAD_GLOBAL); - int push_null = oparg & 1; - PEEK(0) = NULL; - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); - STAT_INC(LOAD_GLOBAL, hit); - STACK_GROW(push_null+1); - Py_INCREF(res); - SET_TOP(res); - DISPATCH(); - } - - TARGET(LOAD_GLOBAL_BUILTIN) { - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); - DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL); - PyDictObject *mdict = (PyDictObject *)GLOBALS(); - PyDictObject *bdict = (PyDictObject *)BUILTINS(); - _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; - uint32_t mod_version = read_u32(cache->module_keys_version); - uint16_t bltn_version = cache->builtin_keys_version; - DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL); - DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL); - assert(DK_IS_UNICODE(bdict->ma_keys)); - PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); - PyObject *res = entries[cache->index].me_value; - DEOPT_IF(res == NULL, LOAD_GLOBAL); - int push_null = oparg & 1; - PEEK(0) = NULL; - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); - STAT_INC(LOAD_GLOBAL, hit); - STACK_GROW(push_null+1); - Py_INCREF(res); - SET_TOP(res); - DISPATCH(); - } - - TARGET(DELETE_FAST) { - PyObject *v = GETLOCAL(oparg); - if (v != NULL) { - SETLOCAL(oparg, NULL); - DISPATCH(); - } - goto unbound_local_error; - } - - TARGET(MAKE_CELL) { - // "initial" is probably NULL but not if it's an arg (or set - // via PyFrame_LocalsToFast() before MAKE_CELL has run). - PyObject *initial = GETLOCAL(oparg); - PyObject *cell = PyCell_New(initial); - if (cell == NULL) { - goto resume_with_error; - } - SETLOCAL(oparg, cell); - DISPATCH(); - } - - TARGET(DELETE_DEREF) { - PyObject *cell = GETLOCAL(oparg); - PyObject *oldobj = PyCell_GET(cell); - if (oldobj != NULL) { - PyCell_SET(cell, NULL); - Py_DECREF(oldobj); - DISPATCH(); - } - format_exc_unbound(tstate, frame->f_code, oparg); - goto error; - } - - TARGET(LOAD_CLASSDEREF) { - PyObject *name, *value, *locals = LOCALS(); - assert(locals); - assert(oparg >= 0 && oparg < frame->f_code->co_nlocalsplus); - name = PyTuple_GET_ITEM(frame->f_code->co_localsplusnames, oparg); - if (PyDict_CheckExact(locals)) { - value = PyDict_GetItemWithError(locals, name); - if (value != NULL) { - Py_INCREF(value); - } - else if (_PyErr_Occurred(tstate)) { - goto error; - } - } - else { - value = PyObject_GetItem(locals, name); - if (value == NULL) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - goto error; - } - _PyErr_Clear(tstate); - } - } - if (!value) { - PyObject *cell = GETLOCAL(oparg); - value = PyCell_GET(cell); - if (value == NULL) { - format_exc_unbound(tstate, frame->f_code, oparg); - goto error; - } - Py_INCREF(value); - } - PUSH(value); - DISPATCH(); - } - - TARGET(LOAD_DEREF) { - PyObject *cell = GETLOCAL(oparg); - PyObject *value = PyCell_GET(cell); - if (value == NULL) { - format_exc_unbound(tstate, frame->f_code, oparg); - goto error; - } - Py_INCREF(value); - PUSH(value); - DISPATCH(); - } - - TARGET(STORE_DEREF) { - PyObject *v = POP(); - PyObject *cell = GETLOCAL(oparg); - PyObject *oldobj = PyCell_GET(cell); - PyCell_SET(cell, v); - Py_XDECREF(oldobj); - DISPATCH(); - } - - TARGET(COPY_FREE_VARS) { - /* Copy closure variables to free variables */ - PyCodeObject *co = frame->f_code; - assert(PyFunction_Check(frame->f_funcobj)); - PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; - int offset = co->co_nlocals + co->co_nplaincellvars; - assert(oparg == co->co_nfreevars); - for (int i = 0; i < oparg; ++i) { - PyObject *o = PyTuple_GET_ITEM(closure, i); - Py_INCREF(o); - frame->localsplus[offset + i] = o; - } - DISPATCH(); - } - - TARGET(BUILD_STRING) { - PyObject *str; - str = _PyUnicode_JoinArray(&_Py_STR(empty), - stack_pointer - oparg, oparg); - if (str == NULL) - goto error; - while (--oparg >= 0) { - PyObject *item = POP(); - Py_DECREF(item); - } - PUSH(str); - DISPATCH(); - } - - TARGET(BUILD_TUPLE) { - STACK_SHRINK(oparg); - PyObject *tup = _PyTuple_FromArraySteal(stack_pointer, oparg); - if (tup == NULL) - goto error; - PUSH(tup); - DISPATCH(); - } - - TARGET(BUILD_LIST) { - PyObject *list = PyList_New(oparg); - if (list == NULL) - goto error; - while (--oparg >= 0) { - PyObject *item = POP(); - PyList_SET_ITEM(list, oparg, item); - } - PUSH(list); - DISPATCH(); - } - - TARGET(LIST_TO_TUPLE) { - PyObject *list = POP(); - PyObject *tuple = PyList_AsTuple(list); - Py_DECREF(list); - if (tuple == NULL) { - goto error; - } - PUSH(tuple); - DISPATCH(); - } - - TARGET(LIST_EXTEND) { - PyObject *iterable = POP(); - PyObject *list = PEEK(oparg); - PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); - if (none_val == NULL) { - if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && - (Py_TYPE(iterable)->tp_iter == NULL && !PySequence_Check(iterable))) - { - _PyErr_Clear(tstate); - _PyErr_Format(tstate, PyExc_TypeError, - "Value after * must be an iterable, not %.200s", - Py_TYPE(iterable)->tp_name); - } - Py_DECREF(iterable); - goto error; - } - Py_DECREF(none_val); - Py_DECREF(iterable); - DISPATCH(); - } - - TARGET(SET_UPDATE) { - PyObject *iterable = POP(); - PyObject *set = PEEK(oparg); - int err = _PySet_Update(set, iterable); - Py_DECREF(iterable); - if (err < 0) { - goto error; - } - DISPATCH(); - } - - TARGET(BUILD_SET) { - PyObject *set = PySet_New(NULL); - int err = 0; - int i; - if (set == NULL) - goto error; - for (i = oparg; i > 0; i--) { - PyObject *item = PEEK(i); - if (err == 0) - err = PySet_Add(set, item); - Py_DECREF(item); - } - STACK_SHRINK(oparg); - if (err != 0) { - Py_DECREF(set); - goto error; - } - PUSH(set); - DISPATCH(); - } - - TARGET(BUILD_MAP) { - PyObject *map = _PyDict_FromItems( - &PEEK(2*oparg), 2, - &PEEK(2*oparg - 1), 2, - oparg); - if (map == NULL) - goto error; - - while (oparg--) { - Py_DECREF(POP()); - Py_DECREF(POP()); - } - PUSH(map); - DISPATCH(); - } - - TARGET(SETUP_ANNOTATIONS) { - int err; - PyObject *ann_dict; - if (LOCALS() == NULL) { - _PyErr_Format(tstate, PyExc_SystemError, - "no locals found when setting up annotations"); - goto error; - } - /* check if __annotations__ in locals()... */ - if (PyDict_CheckExact(LOCALS())) { - ann_dict = _PyDict_GetItemWithError(LOCALS(), - &_Py_ID(__annotations__)); - if (ann_dict == NULL) { - if (_PyErr_Occurred(tstate)) { - goto error; - } - /* ...if not, create a new one */ - ann_dict = PyDict_New(); - if (ann_dict == NULL) { - goto error; - } - err = PyDict_SetItem(LOCALS(), &_Py_ID(__annotations__), - ann_dict); - Py_DECREF(ann_dict); - if (err != 0) { - goto error; - } - } - } - else { - /* do the same if locals() is not a dict */ - ann_dict = PyObject_GetItem(LOCALS(), &_Py_ID(__annotations__)); - if (ann_dict == NULL) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - goto error; - } - _PyErr_Clear(tstate); - ann_dict = PyDict_New(); - if (ann_dict == NULL) { - goto error; - } - err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), - ann_dict); - Py_DECREF(ann_dict); - if (err != 0) { - goto error; - } - } - else { - Py_DECREF(ann_dict); - } - } - DISPATCH(); - } - - TARGET(BUILD_CONST_KEY_MAP) { - PyObject *map; - PyObject *keys = TOP(); - if (!PyTuple_CheckExact(keys) || - PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { - _PyErr_SetString(tstate, PyExc_SystemError, - "bad BUILD_CONST_KEY_MAP keys argument"); - goto error; - } - map = _PyDict_FromItems( - &PyTuple_GET_ITEM(keys, 0), 1, - &PEEK(oparg + 1), 1, oparg); - if (map == NULL) { - goto error; - } - - Py_DECREF(POP()); - while (oparg--) { - Py_DECREF(POP()); - } - PUSH(map); - DISPATCH(); - } - - TARGET(DICT_UPDATE) { - PyObject *update = POP(); - PyObject *dict = PEEK(oparg); - if (PyDict_Update(dict, update) < 0) { - if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object is not a mapping", - Py_TYPE(update)->tp_name); - } - Py_DECREF(update); - goto error; - } - Py_DECREF(update); - DISPATCH(); - } - - TARGET(DICT_MERGE) { - PyObject *update = POP(); - PyObject *dict = PEEK(oparg); - - if (_PyDict_MergeEx(dict, update, 2) < 0) { - format_kwargs_error(tstate, PEEK(2 + oparg), update); - Py_DECREF(update); - goto error; - } - Py_DECREF(update); - PREDICT(CALL_FUNCTION_EX); - DISPATCH(); - } - - TARGET(MAP_ADD) { - PyObject *value = TOP(); - PyObject *key = SECOND(); - PyObject *map; - STACK_SHRINK(2); - map = PEEK(oparg); /* dict */ - assert(PyDict_CheckExact(map)); - /* map[key] = value */ - if (_PyDict_SetItem_Take2((PyDictObject *)map, key, value) != 0) { - goto error; - } - PREDICT(JUMP_BACKWARD_QUICK); - DISPATCH(); - } - - TARGET(LOAD_ATTR) { - PREDICTED(LOAD_ATTR); - PyObject *name = GETITEM(names, oparg >> 1); - PyObject *owner = TOP(); - if (oparg & 1) { - /* Designed to work in tandem with CALL. */ - PyObject* meth = NULL; - - int meth_found = _PyObject_GetMethod(owner, name, &meth); - - if (meth == NULL) { - /* Most likely attribute wasn't found. */ - goto error; - } - - if (meth_found) { - /* We can bypass temporary bound method object. - meth is unbound method and obj is self. - - meth | self | arg1 | ... | argN - */ - SET_TOP(meth); - PUSH(owner); // self - } - else { - /* meth is not an unbound method (but a regular attr, or - something was returned by a descriptor protocol). Set - the second element of the stack to NULL, to signal - CALL that it's not a method call. - - NULL | meth | arg1 | ... | argN - */ - SET_TOP(NULL); - Py_DECREF(owner); - PUSH(meth); - } - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - PyObject *res = PyObject_GetAttr(owner, name); - if (res == NULL) { - goto error; - } - Py_DECREF(owner); - SET_TOP(res); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *owner = TOP(); - PyObject *name = GETITEM(names, oparg>>1); - next_instr--; - if (_Py_Specialize_LoadAttr(owner, next_instr, name) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(LOAD_ATTR, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(LOAD_ATTR); - } - } - - TARGET(LOAD_ATTR_INSTANCE_VALUE) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyObject *res; - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); - assert(tp->tp_dictoffset < 0); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - res = _PyDictOrValues_GetValues(dorv)->values[cache->index]; - DEOPT_IF(res == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_MODULE) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyObject *res; - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); - PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; - assert(dict != NULL); - DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->version), - LOAD_ATTR); - assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); - assert(cache->index < dict->ma_keys->dk_nentries); - PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + cache->index; - res = ep->me_value; - DEOPT_IF(res == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_WITH_HINT) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyObject *res; - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); - DEOPT_IF(dict == NULL, LOAD_ATTR); - assert(PyDict_CheckExact((PyObject *)dict)); - PyObject *name = GETITEM(names, oparg>>1); - uint16_t hint = cache->index; - DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR); - if (DK_IS_UNICODE(dict->ma_keys)) { - PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name, LOAD_ATTR); - res = ep->me_value; - } - else { - PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name, LOAD_ATTR); - res = ep->me_value; - } - DEOPT_IF(res == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_SLOT) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyObject *res; - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); - char *addr = (char *)owner + cache->index; - res = *(PyObject **)addr; - DEOPT_IF(res == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_CLASS) { - assert(cframe.use_tracing == 0); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - - PyObject *cls = TOP(); - DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, - LOAD_ATTR); - assert(type_version != 0); - - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - Py_INCREF(res); - SET_TOP(NULL); - STACK_GROW((oparg & 1)); - SET_TOP(res); - Py_DECREF(cls); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_PROPERTY) { - assert(cframe.use_tracing == 0); - DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - - PyObject *owner = TOP(); - PyTypeObject *cls = Py_TYPE(owner); - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); - assert(type_version != 0); - PyObject *fget = read_obj(cache->descr); - assert(Py_IS_TYPE(fget, &PyFunction_Type)); - PyFunctionObject *f = (PyFunctionObject *)fget; - uint32_t func_version = read_u32(cache->keys_version); - assert(func_version != 0); - DEOPT_IF(f->func_version != func_version, LOAD_ATTR); - PyCodeObject *code = (PyCodeObject *)f->func_code; - assert(code->co_argcount == 1); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(fget); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); - SET_TOP(NULL); - int shrink_stack = !(oparg & 1); - STACK_SHRINK(shrink_stack); - new_frame->localsplus[0] = owner; - for (int i = 1; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - CALL_STAT_INC(inlined_py_calls); - goto start_frame; - } - - TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { - assert(cframe.use_tracing == 0); - DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - PyObject *owner = TOP(); - PyTypeObject *cls = Py_TYPE(owner); - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); - assert(type_version != 0); - PyObject *getattribute = read_obj(cache->descr); - assert(Py_IS_TYPE(getattribute, &PyFunction_Type)); - PyFunctionObject *f = (PyFunctionObject *)getattribute; - uint32_t func_version = read_u32(cache->keys_version); - assert(func_version != 0); - DEOPT_IF(f->func_version != func_version, LOAD_ATTR); - PyCodeObject *code = (PyCodeObject *)f->func_code; - assert(code->co_argcount == 2); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); - STAT_INC(LOAD_ATTR, hit); - - PyObject *name = GETITEM(names, oparg >> 1); - Py_INCREF(f); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); - SET_TOP(NULL); - int shrink_stack = !(oparg & 1); - STACK_SHRINK(shrink_stack); - Py_INCREF(name); - new_frame->localsplus[0] = owner; - new_frame->localsplus[1] = name; - for (int i = 2; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - CALL_STAT_INC(inlined_py_calls); - goto start_frame; - } - - TARGET(STORE_ATTR_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *owner = TOP(); - PyObject *name = GETITEM(names, oparg); - next_instr--; - if (_Py_Specialize_StoreAttr(owner, next_instr, name) < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(STORE_ATTR, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(STORE_ATTR); - } - } - - TARGET(STORE_ATTR_INSTANCE_VALUE) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); - STAT_INC(STORE_ATTR, hit); - Py_ssize_t index = cache->index; - STACK_SHRINK(1); - PyObject *value = POP(); - PyDictValues *values = _PyDictOrValues_GetValues(dorv); - PyObject *old_value = values->values[index]; - values->values[index] = value; - if (old_value == NULL) { - _PyDictValues_AddToInsertionOrder(values, index); - } - else { - Py_DECREF(old_value); - } - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); - DISPATCH(); - } - - TARGET(STORE_ATTR_WITH_HINT) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); - DEOPT_IF(dict == NULL, STORE_ATTR); - assert(PyDict_CheckExact((PyObject *)dict)); - PyObject *name = GETITEM(names, oparg); - uint16_t hint = cache->index; - DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR); - PyObject *value, *old_value; - if (DK_IS_UNICODE(dict->ma_keys)) { - PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name, STORE_ATTR); - old_value = ep->me_value; - DEOPT_IF(old_value == NULL, STORE_ATTR); - STACK_SHRINK(1); - value = POP(); - ep->me_value = value; - } - else { - PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name, STORE_ATTR); - old_value = ep->me_value; - DEOPT_IF(old_value == NULL, STORE_ATTR); - STACK_SHRINK(1); - value = POP(); - ep->me_value = value; - } - Py_DECREF(old_value); - STAT_INC(STORE_ATTR, hit); - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { - _PyObject_GC_TRACK(dict); - } - /* PEP 509 */ - dict->ma_version_tag = DICT_NEXT_VERSION(); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); - DISPATCH(); - } - - TARGET(STORE_ATTR_SLOT) { - assert(cframe.use_tracing == 0); - PyObject *owner = TOP(); - PyTypeObject *tp = Py_TYPE(owner); - _PyAttrCache *cache = (_PyAttrCache *)next_instr; - uint32_t type_version = read_u32(cache->version); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); - char *addr = (char *)owner + cache->index; - STAT_INC(STORE_ATTR, hit); - STACK_SHRINK(1); - PyObject *value = POP(); - PyObject *old_value = *(PyObject **)addr; - *(PyObject **)addr = value; - Py_XDECREF(old_value); - Py_DECREF(owner); - JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); - DISPATCH(); - } - - TARGET(COMPARE_OP) { - PREDICTED(COMPARE_OP); - assert(oparg <= Py_GE); - PyObject *right = POP(); - PyObject *left = TOP(); - PyObject *res = PyObject_RichCompare(left, right, oparg); - SET_TOP(res); - Py_DECREF(left); - Py_DECREF(right); - if (res == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); - DISPATCH(); - } - - TARGET(COMPARE_OP_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *right = TOP(); - PyObject *left = SECOND(); - next_instr--; - _Py_Specialize_CompareOp(left, right, next_instr, oparg); - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(COMPARE_OP, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(COMPARE_OP); - } - } - - TARGET(COMPARE_OP_FLOAT_JUMP) { - assert(cframe.use_tracing == 0); - // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false) - _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; - int when_to_jump_mask = cache->mask; - PyObject *right = TOP(); - PyObject *left = SECOND(); - DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); - DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); - double dleft = PyFloat_AS_DOUBLE(left); - double dright = PyFloat_AS_DOUBLE(right); - int sign = (dleft > dright) - (dleft < dright); - DEOPT_IF(isnan(dleft), COMPARE_OP); - DEOPT_IF(isnan(dright), COMPARE_OP); - STAT_INC(COMPARE_OP, hit); - JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); - NEXTOPARG(); - STACK_SHRINK(2); - _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); - _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); - int jump = (1 << (sign + 1)) & when_to_jump_mask; - if (!jump) { - next_instr++; - } - else { - JUMPBY(1 + oparg); - } - DISPATCH(); - } - - TARGET(COMPARE_OP_INT_JUMP) { - assert(cframe.use_tracing == 0); - // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false) - _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; - int when_to_jump_mask = cache->mask; - PyObject *right = TOP(); - PyObject *left = SECOND(); - DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); - DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); - DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP); - DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP); - STAT_INC(COMPARE_OP, hit); - assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1); - Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0]; - Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0]; - int sign = (ileft > iright) - (ileft < iright); - JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); - NEXTOPARG(); - STACK_SHRINK(2); - _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); - int jump = (1 << (sign + 1)) & when_to_jump_mask; - if (!jump) { - next_instr++; - } - else { - JUMPBY(1 + oparg); - } - DISPATCH(); - } - - TARGET(COMPARE_OP_STR_JUMP) { - assert(cframe.use_tracing == 0); - // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false) - _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; - int invert = cache->mask; - PyObject *right = TOP(); - PyObject *left = SECOND(); - DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); - DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); - STAT_INC(COMPARE_OP, hit); - int res = _PyUnicode_Equal(left, right); - if (res < 0) { - goto error; - } - assert(oparg == Py_EQ || oparg == Py_NE); - JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); - NEXTOPARG(); - assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); - STACK_SHRINK(2); - _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); - _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - assert(res == 0 || res == 1); - assert(invert == 0 || invert == 1); - int jump = res ^ invert; - if (!jump) { - next_instr++; - } - else { - JUMPBY(1 + oparg); - } - DISPATCH(); - } - - TARGET(IS_OP) { - PyObject *right = POP(); - PyObject *left = TOP(); - int res = Py_Is(left, right) ^ oparg; - PyObject *b = res ? Py_True : Py_False; - Py_INCREF(b); - SET_TOP(b); - Py_DECREF(left); - Py_DECREF(right); - DISPATCH(); - } - - TARGET(CONTAINS_OP) { - PyObject *right = POP(); - PyObject *left = POP(); - int res = PySequence_Contains(right, left); - Py_DECREF(left); - Py_DECREF(right); - if (res < 0) { - goto error; - } - PyObject *b = (res^oparg) ? Py_True : Py_False; - Py_INCREF(b); - PUSH(b); - DISPATCH(); - } - - TARGET(CHECK_EG_MATCH) { - PyObject *match_type = POP(); - if (check_except_star_type_valid(tstate, match_type) < 0) { - Py_DECREF(match_type); - goto error; - } - - PyObject *exc_value = TOP(); - PyObject *match = NULL, *rest = NULL; - int res = exception_group_match(exc_value, match_type, - &match, &rest); - Py_DECREF(match_type); - if (res < 0) { - goto error; - } - - if (match == NULL || rest == NULL) { - assert(match == NULL); - assert(rest == NULL); - goto error; - } - if (Py_IsNone(match)) { - PUSH(match); - Py_XDECREF(rest); - } - else { - /* Total or partial match - update the stack from - * [val] - * to - * [rest, match] - * (rest can be Py_None) - */ - - SET_TOP(rest); - PUSH(match); - PyErr_SetExcInfo(NULL, Py_NewRef(match), NULL); - Py_DECREF(exc_value); - } - DISPATCH(); - } - - TARGET(CHECK_EXC_MATCH) { - PyObject *right = POP(); - PyObject *left = TOP(); - assert(PyExceptionInstance_Check(left)); - if (check_except_type_valid(tstate, right) < 0) { - Py_DECREF(right); - goto error; - } - - int res = PyErr_GivenExceptionMatches(left, right); - Py_DECREF(right); - PUSH(Py_NewRef(res ? Py_True : Py_False)); - DISPATCH(); - } - - TARGET(IMPORT_NAME) { - PyObject *name = GETITEM(names, oparg); - PyObject *fromlist = POP(); - PyObject *level = TOP(); - PyObject *res; - res = import_name(tstate, frame, name, fromlist, level); - Py_DECREF(level); - Py_DECREF(fromlist); - SET_TOP(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(IMPORT_STAR) { - PyObject *from = POP(), *locals; - int err; - if (_PyFrame_FastToLocalsWithError(frame) < 0) { - Py_DECREF(from); - goto error; - } - - locals = LOCALS(); - if (locals == NULL) { - _PyErr_SetString(tstate, PyExc_SystemError, - "no locals found during 'import *'"); - Py_DECREF(from); - goto error; - } - err = import_all_from(tstate, locals, from); - _PyFrame_LocalsToFast(frame, 0); - Py_DECREF(from); - if (err != 0) - goto error; - DISPATCH(); - } - - TARGET(IMPORT_FROM) { - PyObject *name = GETITEM(names, oparg); - PyObject *from = TOP(); - PyObject *res; - res = import_from(tstate, from, name); - PUSH(res); - if (res == NULL) - goto error; - DISPATCH(); - } - - TARGET(JUMP_FORWARD) { - JUMPBY(oparg); - DISPATCH(); - } - - TARGET(JUMP_BACKWARD) { - _PyCode_Warmup(frame->f_code); - JUMP_TO_INSTRUCTION(JUMP_BACKWARD_QUICK); - } - - TARGET(POP_JUMP_IF_FALSE) { - PREDICTED(POP_JUMP_IF_FALSE); - PyObject *cond = POP(); - if (Py_IsTrue(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - } - else if (Py_IsFalse(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - JUMPBY(oparg); - } - else { - int err = PyObject_IsTrue(cond); - Py_DECREF(cond); - if (err > 0) - ; - else if (err == 0) { - JUMPBY(oparg); - } - else - goto error; - } - DISPATCH(); - } - - TARGET(POP_JUMP_IF_TRUE) { - PyObject *cond = POP(); - if (Py_IsFalse(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - } - else if (Py_IsTrue(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - JUMPBY(oparg); - } - else { - int err = PyObject_IsTrue(cond); - Py_DECREF(cond); - if (err > 0) { - JUMPBY(oparg); - } - else if (err == 0) - ; - else - goto error; - } - DISPATCH(); - } - - TARGET(POP_JUMP_IF_NOT_NONE) { - PyObject *value = POP(); - if (!Py_IsNone(value)) { - JUMPBY(oparg); - } - Py_DECREF(value); - DISPATCH(); - } - - TARGET(POP_JUMP_IF_NONE) { - PyObject *value = POP(); - if (Py_IsNone(value)) { - _Py_DECREF_NO_DEALLOC(value); - JUMPBY(oparg); - } - else { - Py_DECREF(value); - } - DISPATCH(); - } - - TARGET(JUMP_IF_FALSE_OR_POP) { - PyObject *cond = TOP(); - int err; - if (Py_IsTrue(cond)) { - STACK_SHRINK(1); - _Py_DECREF_NO_DEALLOC(cond); - DISPATCH(); - } - if (Py_IsFalse(cond)) { - JUMPBY(oparg); - DISPATCH(); - } - err = PyObject_IsTrue(cond); - if (err > 0) { - STACK_SHRINK(1); - Py_DECREF(cond); - } - else if (err == 0) - JUMPBY(oparg); - else - goto error; - DISPATCH(); - } - - TARGET(JUMP_IF_TRUE_OR_POP) { - PyObject *cond = TOP(); - int err; - if (Py_IsFalse(cond)) { - STACK_SHRINK(1); - _Py_DECREF_NO_DEALLOC(cond); - DISPATCH(); - } - if (Py_IsTrue(cond)) { - JUMPBY(oparg); - DISPATCH(); - } - err = PyObject_IsTrue(cond); - if (err > 0) { - JUMPBY(oparg); - } - else if (err == 0) { - STACK_SHRINK(1); - Py_DECREF(cond); - } - else - goto error; - DISPATCH(); - } - - TARGET(JUMP_BACKWARD_NO_INTERRUPT) { - /* This bytecode is used in the `yield from` or `await` loop. - * If there is an interrupt, we want it handled in the innermost - * generator or coroutine, so we deliberately do not check it here. - * (see bpo-30039). - */ - JUMPBY(-oparg); - DISPATCH(); - } - - TARGET(JUMP_BACKWARD_QUICK) { - PREDICTED(JUMP_BACKWARD_QUICK); - assert(oparg < INSTR_OFFSET()); - JUMPBY(-oparg); - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(GET_LEN) { - // PUSH(len(TOS)) - Py_ssize_t len_i = PyObject_Length(TOP()); - if (len_i < 0) { - goto error; - } - PyObject *len_o = PyLong_FromSsize_t(len_i); - if (len_o == NULL) { - goto error; - } - PUSH(len_o); - DISPATCH(); - } - - TARGET(MATCH_CLASS) { - // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or - // None on failure. - PyObject *names = POP(); - PyObject *type = POP(); - PyObject *subject = TOP(); - assert(PyTuple_CheckExact(names)); - PyObject *attrs = match_class(tstate, subject, type, oparg, names); - Py_DECREF(names); - Py_DECREF(type); - if (attrs) { - // Success! - assert(PyTuple_CheckExact(attrs)); - SET_TOP(attrs); - } - else if (_PyErr_Occurred(tstate)) { - // Error! - goto error; - } - else { - // Failure! - Py_INCREF(Py_None); - SET_TOP(Py_None); - } - Py_DECREF(subject); - DISPATCH(); - } - - TARGET(MATCH_MAPPING) { - PyObject *subject = TOP(); - int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; - PyObject *res = match ? Py_True : Py_False; - Py_INCREF(res); - PUSH(res); - PREDICT(POP_JUMP_IF_FALSE); - DISPATCH(); - } - - TARGET(MATCH_SEQUENCE) { - PyObject *subject = TOP(); - int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; - PyObject *res = match ? Py_True : Py_False; - Py_INCREF(res); - PUSH(res); - PREDICT(POP_JUMP_IF_FALSE); - DISPATCH(); - } - - TARGET(MATCH_KEYS) { - // On successful match, PUSH(values). Otherwise, PUSH(None). - PyObject *keys = TOP(); - PyObject *subject = SECOND(); - PyObject *values_or_none = match_keys(tstate, subject, keys); - if (values_or_none == NULL) { - goto error; - } - PUSH(values_or_none); - DISPATCH(); - } - - TARGET(GET_ITER) { - /* before: [obj]; after [getiter(obj)] */ - PyObject *iterable = TOP(); - PyObject *iter = PyObject_GetIter(iterable); - Py_DECREF(iterable); - SET_TOP(iter); - if (iter == NULL) - goto error; - DISPATCH(); - } - - TARGET(GET_YIELD_FROM_ITER) { - /* before: [obj]; after [getiter(obj)] */ - PyObject *iterable = TOP(); - PyObject *iter; - if (PyCoro_CheckExact(iterable)) { - /* `iterable` is a coroutine */ - if (!(frame->f_code->co_flags & (CO_COROUTINE | CO_ITERABLE_COROUTINE))) { - /* and it is used in a 'yield from' expression of a - regular generator. */ - Py_DECREF(iterable); - SET_TOP(NULL); - _PyErr_SetString(tstate, PyExc_TypeError, - "cannot 'yield from' a coroutine object " - "in a non-coroutine generator"); - goto error; - } - } - else if (!PyGen_CheckExact(iterable)) { - /* `iterable` is not a generator. */ - iter = PyObject_GetIter(iterable); - Py_DECREF(iterable); - SET_TOP(iter); - if (iter == NULL) - goto error; - } - PREDICT(LOAD_CONST); - DISPATCH(); - } - - TARGET(FOR_ITER) { - PREDICTED(FOR_ITER); - /* before: [iter]; after: [iter, iter()] *or* [] */ - PyObject *iter = TOP(); - PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); - if (next != NULL) { - PUSH(next); - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); - DISPATCH(); - } - if (_PyErr_Occurred(tstate)) { - if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { - goto error; - } - else if (tstate->c_tracefunc != NULL) { - call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); - } - _PyErr_Clear(tstate); - } - /* iterator ended normally */ - STACK_SHRINK(1); - Py_DECREF(iter); - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg); - DISPATCH(); - } - - TARGET(FOR_ITER_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyForIterCache *cache = (_PyForIterCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - next_instr--; - _Py_Specialize_ForIter(TOP(), next_instr); - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(FOR_ITER, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(FOR_ITER); - } - } - - TARGET(FOR_ITER_LIST) { - assert(cframe.use_tracing == 0); - _PyListIterObject *it = (_PyListIterObject *)TOP(); - DEOPT_IF(Py_TYPE(it) != &PyListIter_Type, FOR_ITER); - STAT_INC(FOR_ITER, hit); - PyListObject *seq = it->it_seq; - if (seq) { - if (it->it_index < PyList_GET_SIZE(seq)) { - PyObject *next = PyList_GET_ITEM(seq, it->it_index++); - Py_INCREF(next); - PUSH(next); - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); - DISPATCH(); - } - it->it_seq = NULL; - Py_DECREF(seq); - } - STACK_SHRINK(1); - Py_DECREF(it); - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg); - DISPATCH(); - } - - TARGET(FOR_ITER_RANGE) { - assert(cframe.use_tracing == 0); - _PyRangeIterObject *r = (_PyRangeIterObject *)TOP(); - DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); - STAT_INC(FOR_ITER, hit); - _Py_CODEUNIT next = next_instr[INLINE_CACHE_ENTRIES_FOR_ITER]; - assert(_PyOpcode_Deopt[_Py_OPCODE(next)] == STORE_FAST); - if (r->index >= r->len) { - STACK_SHRINK(1); - Py_DECREF(r); - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg); - DISPATCH(); - } - long value = (long)(r->start + - (unsigned long)(r->index++) * r->step); - if (_PyLong_AssignValue(&GETLOCAL(_Py_OPARG(next)), value) < 0) { - goto error; - } - // The STORE_FAST is already done. - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + 1); - DISPATCH(); - } - - TARGET(BEFORE_ASYNC_WITH) { - PyObject *mgr = TOP(); - PyObject *res; - PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); - if (enter == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "asynchronous context manager protocol", - Py_TYPE(mgr)->tp_name); - } - goto error; - } - PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__)); - if (exit == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "asynchronous context manager protocol " - "(missed __aexit__ method)", - Py_TYPE(mgr)->tp_name); - } - Py_DECREF(enter); - goto error; - } - SET_TOP(exit); - Py_DECREF(mgr); - res = _PyObject_CallNoArgs(enter); - Py_DECREF(enter); - if (res == NULL) - goto error; - PUSH(res); - PREDICT(GET_AWAITABLE); - DISPATCH(); - } - - TARGET(BEFORE_WITH) { - PyObject *mgr = TOP(); - PyObject *res; - PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__enter__)); - if (enter == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "context manager protocol", - Py_TYPE(mgr)->tp_name); - } - goto error; - } - PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__)); - if (exit == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "context manager protocol " - "(missed __exit__ method)", - Py_TYPE(mgr)->tp_name); - } - Py_DECREF(enter); - goto error; - } - SET_TOP(exit); - Py_DECREF(mgr); - res = _PyObject_CallNoArgs(enter); - Py_DECREF(enter); - if (res == NULL) { - goto error; - } - PUSH(res); - DISPATCH(); - } - - TARGET(WITH_EXCEPT_START) { - /* At the top of the stack are 4 values: - - TOP = exc_info() - - SECOND = previous exception - - THIRD: lasti of exception in exc_info() - - FOURTH: the context.__exit__ bound method - We call FOURTH(type(TOP), TOP, GetTraceback(TOP)). - Then we push the __exit__ return value. - */ - PyObject *exit_func; - PyObject *exc, *val, *tb, *res; - - val = TOP(); - assert(val && PyExceptionInstance_Check(val)); - exc = PyExceptionInstance_Class(val); - tb = PyException_GetTraceback(val); - Py_XDECREF(tb); - assert(PyLong_Check(PEEK(3))); - exit_func = PEEK(4); - PyObject *stack[4] = {NULL, exc, val, tb}; - res = PyObject_Vectorcall(exit_func, stack + 1, - 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); - if (res == NULL) - goto error; - - PUSH(res); - DISPATCH(); - } - - TARGET(PUSH_EXC_INFO) { - PyObject *value = TOP(); - - _PyErr_StackItem *exc_info = tstate->exc_info; - if (exc_info->exc_value != NULL) { - SET_TOP(exc_info->exc_value); - } - else { - Py_INCREF(Py_None); - SET_TOP(Py_None); - } - - Py_INCREF(value); - PUSH(value); - assert(PyExceptionInstance_Check(value)); - exc_info->exc_value = value; - - DISPATCH(); - } - - TARGET(LOAD_ATTR_METHOD_WITH_VALUES) { - /* Cached method object */ - assert(cframe.use_tracing == 0); - PyObject *self = TOP(); - PyTypeObject *self_cls = Py_TYPE(self); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - uint32_t type_version = read_u32(cache->type_version); - assert(type_version != 0); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; - DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != - read_u32(cache->keys_version), LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); - Py_INCREF(res); - SET_TOP(res); - PUSH(self); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_METHOD_WITH_DICT) { - /* Can be either a managed dict, or a tp_dictoffset offset.*/ - assert(cframe.use_tracing == 0); - PyObject *self = TOP(); - PyTypeObject *self_cls = Py_TYPE(self); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - - DEOPT_IF(self_cls->tp_version_tag != read_u32(cache->type_version), - LOAD_ATTR); - /* Treat index as a signed 16 bit value */ - Py_ssize_t dictoffset = self_cls->tp_dictoffset; - assert(dictoffset > 0); - PyDictObject **dictptr = (PyDictObject**)(((char *)self)+dictoffset); - PyDictObject *dict = *dictptr; - DEOPT_IF(dict == NULL, LOAD_ATTR); - DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->keys_version), - LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); - Py_INCREF(res); - SET_TOP(res); - PUSH(self); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_METHOD_NO_DICT) { - assert(cframe.use_tracing == 0); - PyObject *self = TOP(); - PyTypeObject *self_cls = Py_TYPE(self); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_dictoffset == 0); - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); - Py_INCREF(res); - SET_TOP(res); - PUSH(self); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(LOAD_ATTR_METHOD_LAZY_DICT) { - assert(cframe.use_tracing == 0); - PyObject *self = TOP(); - PyTypeObject *self_cls = Py_TYPE(self); - _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; - uint32_t type_version = read_u32(cache->type_version); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - Py_ssize_t dictoffset = self_cls->tp_dictoffset; - assert(dictoffset > 0); - PyObject *dict = *(PyObject **)((char *)self + dictoffset); - /* This object has a __dict__, just not yet created */ - DEOPT_IF(dict != NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - PyObject *res = read_obj(cache->descr); - assert(res != NULL); - assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); - Py_INCREF(res); - SET_TOP(res); - PUSH(self); - JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); - DISPATCH(); - } - - TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { - DEOPT_IF(is_method(stack_pointer, oparg), CALL); - PyObject *function = PEEK(oparg + 1); - DEOPT_IF(Py_TYPE(function) != &PyMethod_Type, CALL); - STAT_INC(CALL, hit); - PyObject *meth = ((PyMethodObject *)function)->im_func; - PyObject *self = ((PyMethodObject *)function)->im_self; - Py_INCREF(meth); - Py_INCREF(self); - PEEK(oparg + 1) = self; - PEEK(oparg + 2) = meth; - Py_DECREF(function); - JUMP_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); - } - - TARGET(KW_NAMES) { - assert(call_shape.kwnames == NULL); - assert(oparg < PyTuple_GET_SIZE(consts)); - call_shape.kwnames = GETITEM(consts, oparg); - DISPATCH(); - } - - TARGET(CALL) { - PREDICTED(CALL); - int total_args, is_meth; - is_meth = is_method(stack_pointer, oparg); - PyObject *function = PEEK(oparg + 1); - if (!is_meth && Py_TYPE(function) == &PyMethod_Type) { - PyObject *meth = ((PyMethodObject *)function)->im_func; - PyObject *self = ((PyMethodObject *)function)->im_self; - Py_INCREF(meth); - Py_INCREF(self); - PEEK(oparg+1) = self; - PEEK(oparg+2) = meth; - Py_DECREF(function); - is_meth = 1; - } - total_args = oparg + is_meth; - function = PEEK(total_args + 1); - int positional_args = total_args - KWNAMES_LEN(); - // Check if the call can be inlined or not - if (Py_TYPE(function) == &PyFunction_Type && - tstate->interp->eval_frame == NULL && - ((PyFunctionObject *)function)->vectorcall == _PyFunction_Vectorcall) - { - int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(function))->co_flags; - PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(function)); - STACK_SHRINK(total_args); - _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)function, locals, - stack_pointer, positional_args, call_shape.kwnames - ); - call_shape.kwnames = NULL; - STACK_SHRINK(2-is_meth); - // The frame has stolen all the arguments from the stack, - // so there is no need to clean them up. - if (new_frame == NULL) { - goto error; - } - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - cframe.current_frame = frame = new_frame; - CALL_STAT_INC(inlined_py_calls); - goto start_frame; - } - /* Callable is not a normal Python function */ - PyObject *res; - if (cframe.use_tracing) { - res = trace_call_function( - tstate, function, stack_pointer-total_args, - positional_args, call_shape.kwnames); - } - else { - res = PyObject_Vectorcall( - function, stack_pointer-total_args, - positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, - call_shape.kwnames); - } - call_shape.kwnames = NULL; - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - Py_DECREF(function); - /* Clear the stack */ - STACK_SHRINK(total_args); - for (int i = 0; i < total_args; i++) { - Py_DECREF(stack_pointer[i]); - } - STACK_SHRINK(2-is_meth); - PUSH(res); - if (res == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(CALL_ADAPTIVE) { - _PyCallCache *cache = (_PyCallCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - next_instr--; - int is_meth = is_method(stack_pointer, oparg); - int nargs = oparg + is_meth; - PyObject *callable = PEEK(nargs + 1); - int err = _Py_Specialize_Call(callable, next_instr, nargs, - call_shape.kwnames); - if (err < 0) { - goto error; - } - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(CALL, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(CALL); - } - } - - TARGET(CALL_PY_EXACT_ARGS) { - PREDICTED(CALL_PY_EXACT_ARGS); - assert(call_shape.kwnames == NULL); - DEOPT_IF(tstate->interp->eval_frame, CALL); - _PyCallCache *cache = (_PyCallCache *)next_instr; - int is_meth = is_method(stack_pointer, oparg); - int argcount = oparg + is_meth; - PyObject *callable = PEEK(argcount + 1); - DEOPT_IF(!PyFunction_Check(callable), CALL); - PyFunctionObject *func = (PyFunctionObject *)callable; - DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); - PyCodeObject *code = (PyCodeObject *)func->func_code; - DEOPT_IF(code->co_argcount != argcount, CALL); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); - STAT_INC(CALL, hit); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); - CALL_STAT_INC(inlined_py_calls); - STACK_SHRINK(argcount); - for (int i = 0; i < argcount; i++) { - new_frame->localsplus[i] = stack_pointer[i]; - } - for (int i = argcount; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - STACK_SHRINK(2-is_meth); - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - goto start_frame; - } - - TARGET(CALL_PY_WITH_DEFAULTS) { - assert(call_shape.kwnames == NULL); - DEOPT_IF(tstate->interp->eval_frame, CALL); - _PyCallCache *cache = (_PyCallCache *)next_instr; - int is_meth = is_method(stack_pointer, oparg); - int argcount = oparg + is_meth; - PyObject *callable = PEEK(argcount + 1); - DEOPT_IF(!PyFunction_Check(callable), CALL); - PyFunctionObject *func = (PyFunctionObject *)callable; - DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); - PyCodeObject *code = (PyCodeObject *)func->func_code; - DEOPT_IF(argcount > code->co_argcount, CALL); - int minargs = cache->min_args; - DEOPT_IF(argcount < minargs, CALL); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); - STAT_INC(CALL, hit); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); - CALL_STAT_INC(inlined_py_calls); - STACK_SHRINK(argcount); - for (int i = 0; i < argcount; i++) { - new_frame->localsplus[i] = stack_pointer[i]; - } - for (int i = argcount; i < code->co_argcount; i++) { - PyObject *def = PyTuple_GET_ITEM(func->func_defaults, - i - minargs); - Py_INCREF(def); - new_frame->localsplus[i] = def; - } - for (int i = code->co_argcount; i < code->co_nlocalsplus; i++) { - new_frame->localsplus[i] = NULL; - } - STACK_SHRINK(2-is_meth); - _PyFrame_SetStackPointer(frame, stack_pointer); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - frame->prev_instr = next_instr - 1; - new_frame->previous = frame; - frame = cframe.current_frame = new_frame; - goto start_frame; - } - - TARGET(CALL_NO_KW_TYPE_1) { - assert(call_shape.kwnames == NULL); - assert(cframe.use_tracing == 0); - assert(oparg == 1); - DEOPT_IF(is_method(stack_pointer, 1), CALL); - PyObject *obj = TOP(); - PyObject *callable = SECOND(); - DEOPT_IF(callable != (PyObject *)&PyType_Type, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *res = Py_NewRef(Py_TYPE(obj)); - Py_DECREF(callable); - Py_DECREF(obj); - STACK_SHRINK(2); - SET_TOP(res); - DISPATCH(); - } - - TARGET(CALL_NO_KW_STR_1) { - assert(call_shape.kwnames == NULL); - assert(cframe.use_tracing == 0); - assert(oparg == 1); - DEOPT_IF(is_method(stack_pointer, 1), CALL); - PyObject *callable = PEEK(2); - DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *arg = TOP(); - PyObject *res = PyObject_Str(arg); - Py_DECREF(arg); - Py_DECREF(&PyUnicode_Type); - STACK_SHRINK(2); - SET_TOP(res); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(CALL_NO_KW_TUPLE_1) { - assert(call_shape.kwnames == NULL); - assert(oparg == 1); - DEOPT_IF(is_method(stack_pointer, 1), CALL); - PyObject *callable = PEEK(2); - DEOPT_IF(callable != (PyObject *)&PyTuple_Type, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *arg = TOP(); - PyObject *res = PySequence_Tuple(arg); - Py_DECREF(arg); - Py_DECREF(&PyTuple_Type); - STACK_SHRINK(2); - SET_TOP(res); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(CALL_BUILTIN_CLASS) { - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - int kwnames_len = KWNAMES_LEN(); - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(!PyType_Check(callable), CALL); - PyTypeObject *tp = (PyTypeObject *)callable; - DEOPT_IF(tp->tp_vectorcall == NULL, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - STACK_SHRINK(total_args); - PyObject *res = tp->tp_vectorcall((PyObject *)tp, stack_pointer, - total_args-kwnames_len, call_shape.kwnames); - call_shape.kwnames = NULL; - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - Py_DECREF(stack_pointer[i]); - } - Py_DECREF(tp); - STACK_SHRINK(1-is_meth); - SET_TOP(res); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } - - TARGET(CALL_NO_KW_BUILTIN_O) { - assert(cframe.use_tracing == 0); - /* Builtin METH_O functions */ - assert(call_shape.kwnames == NULL); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - DEOPT_IF(total_args != 1, CALL); - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); - DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_O, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); - // This is slower but CPython promises to check all non-vectorcall - // function calls. - if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { - goto error; - } - PyObject *arg = TOP(); - PyObject *res = cfunc(PyCFunction_GET_SELF(callable), arg); - _Py_LeaveRecursiveCallTstate(tstate); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - Py_DECREF(arg); - Py_DECREF(callable); - STACK_SHRINK(2-is_meth); - SET_TOP(res); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } + } + return 0; +} - TARGET(CALL_NO_KW_BUILTIN_FAST) { - assert(cframe.use_tracing == 0); - /* Builtin METH_FASTCALL functions, without keywords */ - assert(call_shape.kwnames == NULL); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); - DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_FASTCALL, - CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); - STACK_SHRINK(total_args); - /* res = func(self, args, nargs) */ - PyObject *res = ((_PyCFunctionFast)(void(*)(void))cfunc)( - PyCFunction_GET_SELF(callable), - stack_pointer, - total_args); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - Py_DECREF(stack_pointer[i]); - } - STACK_SHRINK(2-is_meth); - PUSH(res); - Py_DECREF(callable); - if (res == NULL) { - /* Not deopting because this doesn't mean our optimization was - wrong. `res` can be NULL for valid reasons. Eg. getattr(x, - 'invalid'). In those cases an exception is set, so we must - handle it. - */ - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } +static inline int _Py_EnterRecursivePy(PyThreadState *tstate) { + return (tstate->py_recursion_remaining-- <= 0) && + _Py_CheckRecursiveCallPy(tstate); +} - TARGET(CALL_BUILTIN_FAST_WITH_KEYWORDS) { - assert(cframe.use_tracing == 0); - /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); - DEOPT_IF(PyCFunction_GET_FLAGS(callable) != - (METH_FASTCALL | METH_KEYWORDS), CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - STACK_SHRINK(total_args); - /* res = func(self, args, nargs, kwnames) */ - _PyCFunctionFastWithKeywords cfunc = - (_PyCFunctionFastWithKeywords)(void(*)(void)) - PyCFunction_GET_FUNCTION(callable); - PyObject *res = cfunc( - PyCFunction_GET_SELF(callable), - stack_pointer, - total_args - KWNAMES_LEN(), - call_shape.kwnames - ); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - call_shape.kwnames = NULL; - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - Py_DECREF(stack_pointer[i]); - } - STACK_SHRINK(2-is_meth); - PUSH(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } +static inline void _Py_LeaveRecursiveCallPy(PyThreadState *tstate) { + tstate->py_recursion_remaining++; +} - TARGET(CALL_NO_KW_LEN) { - assert(cframe.use_tracing == 0); - assert(call_shape.kwnames == NULL); - /* len(o) */ - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - DEOPT_IF(total_args != 1, CALL); - PyObject *callable = PEEK(total_args + 1); - PyInterpreterState *interp = _PyInterpreterState_GET(); - DEOPT_IF(callable != interp->callable_cache.len, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *arg = TOP(); - Py_ssize_t len_i = PyObject_Length(arg); - if (len_i < 0) { - goto error; - } - PyObject *res = PyLong_FromSsize_t(len_i); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - STACK_SHRINK(2-is_meth); - SET_TOP(res); - Py_DECREF(callable); - Py_DECREF(arg); - if (res == NULL) { - goto error; - } - DISPATCH(); - } - TARGET(CALL_NO_KW_ISINSTANCE) { - assert(cframe.use_tracing == 0); - assert(call_shape.kwnames == NULL); - /* isinstance(o, o2) */ - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyObject *callable = PEEK(total_args + 1); - DEOPT_IF(total_args != 2, CALL); - PyInterpreterState *interp = _PyInterpreterState_GET(); - DEOPT_IF(callable != interp->callable_cache.isinstance, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyObject *cls = POP(); - PyObject *inst = TOP(); - int retval = PyObject_IsInstance(inst, cls); - if (retval < 0) { - Py_DECREF(cls); - goto error; - } - PyObject *res = PyBool_FromLong(retval); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - STACK_SHRINK(2-is_meth); - SET_TOP(res); - Py_DECREF(inst); - Py_DECREF(cls); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - DISPATCH(); - } +// GH-89279: Must be a macro to be sure it's inlined by MSVC. +#define is_method(stack_pointer, args) (PEEK((args)+2) != NULL) - TARGET(CALL_NO_KW_LIST_APPEND) { - assert(cframe.use_tracing == 0); - assert(call_shape.kwnames == NULL); - assert(oparg == 1); - PyObject *callable = PEEK(3); - PyInterpreterState *interp = _PyInterpreterState_GET(); - DEOPT_IF(callable != interp->callable_cache.list_append, CALL); - PyObject *list = SECOND(); - DEOPT_IF(!PyList_Check(list), CALL); - STAT_INC(CALL, hit); - // CALL + POP_TOP - JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); - assert(_Py_OPCODE(next_instr[-1]) == POP_TOP); - PyObject *arg = POP(); - if (_PyList_AppendTakeRef((PyListObject *)list, arg) < 0) { - goto error; - } - STACK_SHRINK(2); - Py_DECREF(list); - Py_DECREF(callable); - DISPATCH(); - } +#define KWNAMES_LEN() \ + (kwnames == NULL ? 0 : ((int)PyTuple_GET_SIZE(kwnames))) + +/* Disable unused label warnings. They are handy for debugging, even + if computed gotos aren't used. */ + +/* TBD - what about other compilers? */ +#if defined(__GNUC__) +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wunused-label" +#elif defined(_MSC_VER) /* MS_WINDOWS */ +# pragma warning(push) +# pragma warning(disable:4102) +#endif - TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { - assert(call_shape.kwnames == NULL); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); - DEOPT_IF(total_args != 2, CALL); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; - DEOPT_IF(meth->ml_flags != METH_O, CALL); - PyObject *arg = TOP(); - PyObject *self = SECOND(); - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyCFunction cfunc = meth->ml_meth; - // This is slower but CPython promises to check all non-vectorcall - // function calls. - if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { - goto error; - } - PyObject *res = cfunc(self, arg); - _Py_LeaveRecursiveCallTstate(tstate); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - Py_DECREF(self); - Py_DECREF(arg); - STACK_SHRINK(oparg + 1); - SET_TOP(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } +PyObject* _Py_HOT_FUNCTION +_PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) +{ + _Py_EnsureTstateNotNULL(tstate); + CALL_STAT_INC(pyeval_calls); - TARGET(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) { - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; - DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); - PyTypeObject *d_type = callable->d_common.d_type; - PyObject *self = PEEK(total_args); - DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - int nargs = total_args-1; - STACK_SHRINK(nargs); - _PyCFunctionFastWithKeywords cfunc = - (_PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; - PyObject *res = cfunc(self, stack_pointer, nargs - KWNAMES_LEN(), - call_shape.kwnames); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - call_shape.kwnames = NULL; - - /* Free the arguments. */ - for (int i = 0; i < nargs; i++) { - Py_DECREF(stack_pointer[i]); - } - Py_DECREF(self); - STACK_SHRINK(2-is_meth); - SET_TOP(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } +#if USE_COMPUTED_GOTOS +/* Import the static jump table */ +#include "opcode_targets.h" +#endif - TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS) { - assert(call_shape.kwnames == NULL); - assert(oparg == 0 || oparg == 1); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - DEOPT_IF(total_args != 1, CALL); - PyMethodDescrObject *callable = (PyMethodDescrObject *)SECOND(); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; - PyObject *self = TOP(); - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); - DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - PyCFunction cfunc = meth->ml_meth; - // This is slower but CPython promises to check all non-vectorcall - // function calls. - if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { - goto error; - } - PyObject *res = cfunc(self, NULL); - _Py_LeaveRecursiveCallTstate(tstate); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - Py_DECREF(self); - STACK_SHRINK(oparg + 1); - SET_TOP(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } +#ifdef Py_STATS + int lastopcode = 0; +#endif + // opcode is an 8-bit value to improve the code generated by MSVC + // for the big switch below (in combination with the EXTRA_CASES macro). + uint8_t opcode; /* Current opcode */ + int oparg; /* Current opcode argument, if any */ + _Py_atomic_int * const eval_breaker = &tstate->interp->ceval.eval_breaker; +#ifdef LLTRACE + int lltrace = 0; +#endif - TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_FAST) { - assert(call_shape.kwnames == NULL); - int is_meth = is_method(stack_pointer, oparg); - int total_args = oparg + is_meth; - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); - /* Builtin METH_FASTCALL methods, without keywords */ - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; - DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); - PyObject *self = PEEK(total_args); - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); - STAT_INC(CALL, hit); - JUMPBY(INLINE_CACHE_ENTRIES_CALL); - _PyCFunctionFast cfunc = - (_PyCFunctionFast)(void(*)(void))meth->ml_meth; - int nargs = total_args-1; - STACK_SHRINK(nargs); - PyObject *res = cfunc(self, stack_pointer, nargs); - assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Clear the stack of the arguments. */ - for (int i = 0; i < nargs; i++) { - Py_DECREF(stack_pointer[i]); - } - Py_DECREF(self); - STACK_SHRINK(2-is_meth); - SET_TOP(res); - Py_DECREF(callable); - if (res == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } + _PyCFrame cframe; + _PyInterpreterFrame entry_frame; + PyObject *kwnames = NULL; // Borrowed reference. Reset by CALL instructions. - TARGET(CALL_FUNCTION_EX) { - PREDICTED(CALL_FUNCTION_EX); - PyObject *func, *callargs, *kwargs = NULL, *result; - if (oparg & 0x01) { - kwargs = POP(); - // DICT_MERGE is called before this opcode if there are kwargs. - // It converts all dict subtypes in kwargs into regular dicts. - assert(PyDict_CheckExact(kwargs)); - } - callargs = POP(); - func = TOP(); - if (!PyTuple_CheckExact(callargs)) { - if (check_args_iterable(tstate, func, callargs) < 0) { - Py_DECREF(callargs); - goto error; - } - Py_SETREF(callargs, PySequence_Tuple(callargs)); - if (callargs == NULL) { - goto error; - } - } - assert(PyTuple_CheckExact(callargs)); + /* WARNING: Because the _PyCFrame lives on the C stack, + * but can be accessed from a heap allocated object (tstate) + * strict stack discipline must be maintained. + */ + _PyCFrame *prev_cframe = tstate->cframe; + cframe.use_tracing = prev_cframe->use_tracing; + cframe.previous = prev_cframe; + tstate->cframe = &cframe; - result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing); - Py_DECREF(func); - Py_DECREF(callargs); - Py_XDECREF(kwargs); - - STACK_SHRINK(1); - assert(TOP() == NULL); - SET_TOP(result); - if (result == NULL) { - goto error; - } - CHECK_EVAL_BREAKER(); - DISPATCH(); - } + assert(tstate->interp->interpreter_trampoline != NULL); +#ifdef Py_DEBUG + /* Set these to invalid but identifiable values for debugging. */ + entry_frame.f_funcobj = (PyObject*)0xaaa0; + entry_frame.f_locals = (PyObject*)0xaaa1; + entry_frame.frame_obj = (PyFrameObject*)0xaaa2; + entry_frame.f_globals = (PyObject*)0xaaa3; + entry_frame.f_builtins = (PyObject*)0xaaa4; +#endif + entry_frame.f_code = tstate->interp->interpreter_trampoline; + entry_frame.prev_instr = + _PyCode_CODE(tstate->interp->interpreter_trampoline); + entry_frame.stacktop = 0; + entry_frame.owner = FRAME_OWNED_BY_CSTACK; + entry_frame.yield_offset = 0; + /* Push frame */ + entry_frame.previous = prev_cframe->current_frame; + frame->previous = &entry_frame; + cframe.current_frame = frame; - TARGET(MAKE_FUNCTION) { - PyObject *codeobj = POP(); - PyFunctionObject *func = (PyFunctionObject *) - PyFunction_New(codeobj, GLOBALS()); + if (_Py_EnterRecursiveCallTstate(tstate, "")) { + tstate->c_recursion_remaining--; + tstate->py_recursion_remaining--; + goto exit_unwind; + } - Py_DECREF(codeobj); - if (func == NULL) { - goto error; - } + /* support for generator.throw() */ + if (throwflag) { + if (_Py_EnterRecursivePy(tstate)) { + goto exit_unwind; + } + TRACE_FUNCTION_THROW_ENTRY(); + DTRACE_FUNCTION_ENTRY(); + goto resume_with_error; + } - if (oparg & 0x08) { - assert(PyTuple_CheckExact(TOP())); - func->func_closure = POP(); - } - if (oparg & 0x04) { - assert(PyTuple_CheckExact(TOP())); - func->func_annotations = POP(); - } - if (oparg & 0x02) { - assert(PyDict_CheckExact(TOP())); - func->func_kwdefaults = POP(); - } - if (oparg & 0x01) { - assert(PyTuple_CheckExact(TOP())); - func->func_defaults = POP(); - } + /* Local "register" variables. + * These are cached values from the frame and code object. */ - PUSH((PyObject *)func); - DISPATCH(); - } + PyObject *names; + PyObject *consts; + _Py_CODEUNIT *next_instr; + PyObject **stack_pointer; - TARGET(RETURN_GENERATOR) { - assert(PyFunction_Check(frame->f_funcobj)); - PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; - PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); - if (gen == NULL) { - goto error; - } - assert(EMPTY()); - _PyFrame_SetStackPointer(frame, stack_pointer); - _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; - _PyFrame_Copy(frame, gen_frame); - assert(frame->frame_obj == NULL); - gen->gi_frame_state = FRAME_CREATED; - gen_frame->owner = FRAME_OWNED_BY_GENERATOR; - _Py_LeaveRecursiveCallPy(tstate); - if (!frame->is_entry) { - _PyInterpreterFrame *prev = frame->previous; - _PyThreadState_PopFrame(tstate, frame); - frame = cframe.current_frame = prev; - _PyFrame_StackPush(frame, (PyObject *)gen); - goto resume_frame; - } - _Py_LeaveRecursiveCallTstate(tstate); - /* Make sure that frame is in a valid state */ - frame->stacktop = 0; - frame->f_locals = NULL; - Py_INCREF(frame->f_funcobj); - Py_INCREF(frame->f_code); - /* Restore previous cframe and return. */ - tstate->cframe = cframe.previous; - tstate->cframe->use_tracing = cframe.use_tracing; - assert(tstate->cframe->current_frame == frame->previous); - assert(!_PyErr_Occurred(tstate)); - return (PyObject *)gen; - } +/* Sets the above local variables from the frame */ +#define SET_LOCALS_FROM_FRAME() \ + { \ + PyCodeObject *co = frame->f_code; \ + names = co->co_names; \ + consts = co->co_consts; \ + } \ + assert(_PyInterpreterFrame_LASTI(frame) >= -1); \ + /* Jump back to the last instruction executed... */ \ + next_instr = frame->prev_instr + 1; \ + stack_pointer = _PyFrame_GetStackPointer(frame); \ + /* Set stackdepth to -1. \ + Update when returning or calling trace function. \ + Having stackdepth <= 0 ensures that invalid \ + values are not visible to the cycle GC. \ + We choose -1 rather than 0 to assist debugging. \ + */ \ + frame->stacktop = -1; - TARGET(BUILD_SLICE) { - PyObject *start, *stop, *step, *slice; - if (oparg == 3) - step = POP(); - else - step = NULL; - stop = POP(); - start = TOP(); - slice = PySlice_New(start, stop, step); - Py_DECREF(start); - Py_DECREF(stop); - Py_XDECREF(step); - SET_TOP(slice); - if (slice == NULL) - goto error; - DISPATCH(); - } - TARGET(FORMAT_VALUE) { - /* Handles f-string value formatting. */ - PyObject *result; - PyObject *fmt_spec; - PyObject *value; - PyObject *(*conv_fn)(PyObject *); - int which_conversion = oparg & FVC_MASK; - int have_fmt_spec = (oparg & FVS_MASK) == FVS_HAVE_SPEC; - - fmt_spec = have_fmt_spec ? POP() : NULL; - value = POP(); - - /* See if any conversion is specified. */ - switch (which_conversion) { - case FVC_NONE: conv_fn = NULL; break; - case FVC_STR: conv_fn = PyObject_Str; break; - case FVC_REPR: conv_fn = PyObject_Repr; break; - case FVC_ASCII: conv_fn = PyObject_ASCII; break; - default: - _PyErr_Format(tstate, PyExc_SystemError, - "unexpected conversion flag %d", - which_conversion); - goto error; - } +start_frame: + if (_Py_EnterRecursivePy(tstate)) { + goto exit_unwind; + } - /* If there's a conversion function, call it and replace - value with that result. Otherwise, just use value, - without conversion. */ - if (conv_fn != NULL) { - result = conv_fn(value); - Py_DECREF(value); - if (result == NULL) { - Py_XDECREF(fmt_spec); - goto error; - } - value = result; - } +resume_frame: + SET_LOCALS_FROM_FRAME(); - /* If value is a unicode object, and there's no fmt_spec, - then we know the result of format(value) is value - itself. In that case, skip calling format(). I plan to - move this optimization in to PyObject_Format() - itself. */ - if (PyUnicode_CheckExact(value) && fmt_spec == NULL) { - /* Do nothing, just transfer ownership to result. */ - result = value; - } else { - /* Actually call format(). */ - result = PyObject_Format(value, fmt_spec); - Py_DECREF(value); - Py_XDECREF(fmt_spec); - if (result == NULL) { - goto error; - } +#ifdef LLTRACE + { + if (frame != &entry_frame) { + int r = PyDict_Contains(GLOBALS(), &_Py_ID(__lltrace__)); + if (r < 0) { + goto exit_unwind; } - - PUSH(result); - DISPATCH(); + lltrace = r; } - - TARGET(COPY) { - assert(oparg != 0); - PyObject *peek = PEEK(oparg); - Py_INCREF(peek); - PUSH(peek); - DISPATCH(); + if (lltrace) { + lltrace_resume_frame(frame); } + } +#endif - TARGET(BINARY_OP) { - PREDICTED(BINARY_OP); - PyObject *rhs = POP(); - PyObject *lhs = TOP(); - assert(0 <= oparg); - assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); - assert(binary_ops[oparg]); - PyObject *res = binary_ops[oparg](lhs, rhs); - Py_DECREF(lhs); - Py_DECREF(rhs); - SET_TOP(res); - if (res == NULL) { - goto error; - } - JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP); - DISPATCH(); - } +#ifdef Py_DEBUG + /* _PyEval_EvalFrameDefault() must not be called with an exception set, + because it can clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!_PyErr_Occurred(tstate)); +#endif - TARGET(BINARY_OP_ADAPTIVE) { - assert(cframe.use_tracing == 0); - _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; - if (ADAPTIVE_COUNTER_IS_ZERO(cache)) { - PyObject *lhs = SECOND(); - PyObject *rhs = TOP(); - next_instr--; - _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0)); - DISPATCH_SAME_OPARG(); - } - else { - STAT_INC(BINARY_OP, deferred); - DECREMENT_ADAPTIVE_COUNTER(cache); - JUMP_TO_INSTRUCTION(BINARY_OP); - } - } + DISPATCH(); - TARGET(SWAP) { - assert(oparg != 0); - PyObject *top = TOP(); - SET_TOP(PEEK(oparg)); - PEEK(oparg) = top; - DISPATCH(); - } +handle_eval_breaker: - TARGET(EXTENDED_ARG) { - assert(oparg); - oparg <<= 8; - oparg |= _Py_OPARG(*next_instr); - // We might be tracing. To avoid breaking tracing guarantees in - // quickened instructions, always deoptimize the next opcode: - opcode = _PyOpcode_Deopt[_Py_OPCODE(*next_instr)]; - PRE_DISPATCH_GOTO(); - // CPython hasn't traced the following instruction historically - // (DO_TRACING would clobber our extended oparg anyways), so just - // skip our usual cframe.use_tracing check before dispatch. Also, - // make sure the next instruction isn't a RESUME, since that needs - // to trace properly (and shouldn't have an extended arg anyways): - assert(opcode != RESUME); - DISPATCH_GOTO(); - } + /* Do periodic things, like check for signals and async I/0. + * We need to do reasonably frequently, but not too frequently. + * All loops should include a check of the eval breaker. + * We also check on return from any builtin function. + */ + if (_Py_HandlePending(tstate) != 0) { + goto error; + } + DISPATCH(); - TARGET(EXTENDED_ARG_QUICK) { - assert(cframe.use_tracing == 0); - assert(oparg); - int oldoparg = oparg; - NEXTOPARG(); - oparg |= oldoparg << 8; - DISPATCH_GOTO(); - } + { + /* Start instructions */ +#if !USE_COMPUTED_GOTOS + dispatch_opcode: + switch (opcode) +#endif + { - TARGET(CACHE) { - Py_UNREACHABLE(); - } +#include "generated_cases.c.h" #if USE_COMPUTED_GOTOS TARGET_DO_TRACING: @@ -5023,7 +1233,8 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int if (INSTR_OFFSET() >= frame->f_code->_co_firsttraceable) { int instr_prev = _PyInterpreterFrame_LASTI(frame); frame->prev_instr = next_instr; - TRACING_NEXTOPARG(); + NEXTOPARG(); + // No _PyOpcode_Deopt here, since RESUME has no optimized forms: if (opcode == RESUME) { if (oparg < 2) { CHECK_EVAL_BREAKER(); @@ -5070,8 +1281,29 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int } } } - TRACING_NEXTOPARG(); + NEXTOPARG(); PRE_DISPATCH_GOTO(); + // No _PyOpcode_Deopt here, since EXTENDED_ARG has no optimized forms: + while (opcode == EXTENDED_ARG) { + // CPython hasn't ever traced the instruction after an EXTENDED_ARG. + // Inline the EXTENDED_ARG here, so we can avoid branching there: + INSTRUCTION_START(EXTENDED_ARG); + opcode = _Py_OPCODE(*next_instr); + oparg = oparg << 8 | _Py_OPARG(*next_instr); + // Make sure the next instruction isn't a RESUME, since that needs + // to trace properly (and shouldn't have an EXTENDED_ARG, anyways): + assert(opcode != RESUME); + PRE_DISPATCH_GOTO(); + } + opcode = _PyOpcode_Deopt[opcode]; + if (_PyOpcode_Caches[opcode]) { + _Py_CODEUNIT *counter = &next_instr[1]; + // The instruction is going to decrement the counter, so we need to + // increment it here to make sure it doesn't try to specialize: + if (!ADAPTIVE_COUNTER_IS_MAX(*counter)) { + INCREMENT_ADAPTIVE_COUNTER(*counter); + } + } DISPATCH_GOTO(); } @@ -5096,27 +1328,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int or goto error. */ Py_UNREACHABLE(); -/* Specialization misses */ - -miss: - { - STAT_INC(opcode, miss); - opcode = _PyOpcode_Deopt[opcode]; - STAT_INC(opcode, miss); - /* The counter is always the first cache entry: */ - _Py_CODEUNIT *counter = (_Py_CODEUNIT *)next_instr; - *counter -= 1; - if (*counter == 0) { - int adaptive_opcode = _PyOpcode_Adaptive[opcode]; - assert(adaptive_opcode); - _Py_SET_OPCODE(next_instr[-1], adaptive_opcode); - STAT_INC(opcode, deopt); - *counter = adaptive_counter_start(); - } - next_instr--; - DISPATCH_GOTO(); - } - unbound_local_error: { format_exc_check_arg(tstate, PyExc_UnboundLocalError, @@ -5126,8 +1337,16 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int goto error; } +pop_4_error: + STACK_SHRINK(1); +pop_3_error: + STACK_SHRINK(1); +pop_2_error: + STACK_SHRINK(1); +pop_1_error: + STACK_SHRINK(1); error: - call_shape.kwnames = NULL; + kwnames = NULL; /* Double-check exception status. */ #ifdef NDEBUG if (!_PyErr_Occurred(tstate)) { @@ -5139,6 +1358,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #endif /* Log traceback info. */ + assert(frame != &entry_frame); if (!_PyFrame_IsIncomplete(frame)) { PyFrameObject *f = _PyFrame_GetFrameObject(frame); if (f != NULL) { @@ -5211,7 +1431,9 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int exit_unwind: assert(_PyErr_Occurred(tstate)); _Py_LeaveRecursiveCallPy(tstate); - if (frame->is_entry) { + assert(frame != &entry_frame); + frame = cframe.current_frame = pop_frame(tstate, frame); + if (frame == &entry_frame) { /* Restore previous cframe and exit */ tstate->cframe = cframe.previous; tstate->cframe->use_tracing = cframe.use_tracing; @@ -5219,13 +1441,17 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int _Py_LeaveRecursiveCallTstate(tstate); return NULL; } - frame = cframe.current_frame = pop_frame(tstate, frame); resume_with_error: SET_LOCALS_FROM_FRAME(); goto error; } +#if defined(__GNUC__) +# pragma GCC diagnostic pop +#elif defined(_MSC_VER) /* MS_WINDOWS */ +# pragma warning(pop) +#endif static void format_missing(PyThreadState *tstate, const char *kind, @@ -5691,8 +1917,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func, for (; i < defcount; i++) { if (localsplus[m+i] == NULL) { PyObject *def = defs[i]; - Py_INCREF(def); - localsplus[m+i] = def; + localsplus[m+i] = Py_NewRef(def); } } } @@ -5708,8 +1933,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func, if (func->func_kwdefaults != NULL) { PyObject *def = PyDict_GetItemWithError(func->func_kwdefaults, varname); if (def) { - Py_INCREF(def); - localsplus[i] = def; + localsplus[i] = Py_NewRef(def); continue; } else if (_PyErr_Occurred(tstate)) { @@ -5782,20 +2006,48 @@ _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, } static void -_PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame * frame) +clear_thread_frame(PyThreadState *tstate, _PyInterpreterFrame * frame) { + assert(frame->owner == FRAME_OWNED_BY_THREAD); // Make sure that this is, indeed, the top frame. We can't check this in // _PyThreadState_PopFrame, since f_code is already cleared at that point: assert((PyObject **)frame + frame->f_code->co_framesize == - tstate->datastack_top); + tstate->datastack_top); tstate->c_recursion_remaining--; assert(frame->frame_obj == NULL || frame->frame_obj->f_frame == frame); - assert(frame->owner == FRAME_OWNED_BY_THREAD); _PyFrame_Clear(frame); tstate->c_recursion_remaining++; _PyThreadState_PopFrame(tstate, frame); } +static void +clear_gen_frame(PyThreadState *tstate, _PyInterpreterFrame * frame) +{ + assert(frame->owner == FRAME_OWNED_BY_GENERATOR); + PyGenObject *gen = _PyFrame_GetGenerator(frame); + gen->gi_frame_state = FRAME_CLEARED; + assert(tstate->exc_info == &gen->gi_exc_state); + tstate->exc_info = gen->gi_exc_state.previous_item; + gen->gi_exc_state.previous_item = NULL; + tstate->c_recursion_remaining--; + assert(frame->frame_obj == NULL || frame->frame_obj->f_frame == frame); + _PyFrame_Clear(frame); + tstate->c_recursion_remaining++; + frame->previous = NULL; +} + +static void +_PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame * frame) +{ + if (frame->owner == FRAME_OWNED_BY_THREAD) { + clear_thread_frame(tstate, frame); + } + else { + clear_gen_frame(tstate, frame); + } +} + + PyObject * _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func, PyObject *locals, @@ -5821,13 +2073,7 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func, return NULL; } EVAL_CALL_STAT_INC(EVAL_CALL_VECTOR); - PyObject *retval = _PyEval_EvalFrame(tstate, frame, 0); - assert( - _PyFrame_GetStackPointer(frame) == _PyFrame_Stackbase(frame) || - _PyFrame_GetStackPointer(frame) == frame->localsplus - ); - _PyEvalFrameClearAndPop(tstate, frame); - return retval; + return _PyEval_EvalFrame(tstate, frame, 0); } /* Legacy API */ @@ -5872,15 +2118,13 @@ PyEval_EvalCodeEx(PyObject *_co, PyObject *globals, PyObject *locals, newargs[i] = args[i]; } for (int i = 0; i < kwcount; i++) { - Py_INCREF(kws[2*i]); - PyTuple_SET_ITEM(kwnames, i, kws[2*i]); + PyTuple_SET_ITEM(kwnames, i, Py_NewRef(kws[2*i])); newargs[argcount+i] = kws[2*i+1]; } allargs = newargs; } for (int i = 0; i < kwcount; i++) { - Py_INCREF(kws[2*i]); - PyTuple_SET_ITEM(kwnames, i, kws[2*i]); + PyTuple_SET_ITEM(kwnames, i, Py_NewRef(kws[2*i])); } PyFrameConstructor constr = { .fc_globals = globals, @@ -6175,8 +2419,7 @@ call_exc_trace(Py_tracefunc func, PyObject *self, int err; _PyErr_Fetch(tstate, &type, &value, &orig_traceback); if (value == NULL) { - value = Py_None; - Py_INCREF(value); + value = Py_NewRef(Py_None); } _PyErr_NormalizeException(tstate, &type, &value, &orig_traceback); traceback = (orig_traceback != NULL) ? orig_traceback : Py_None; @@ -6341,38 +2584,23 @@ _PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) /* The caller must hold the GIL */ assert(PyGILState_Check()); - static int reentrant = 0; - if (reentrant) { - _PyErr_SetString(tstate, PyExc_RuntimeError, "Cannot install a profile function " - "while another profile function is being installed"); - reentrant = 0; - return -1; - } - reentrant = 1; - /* Call _PySys_Audit() in the context of the current thread state, even if tstate is not the current thread state. */ PyThreadState *current_tstate = _PyThreadState_GET(); if (_PySys_Audit(current_tstate, "sys.setprofile", NULL) < 0) { - reentrant = 0; return -1; } - PyObject *profileobj = tstate->c_profileobj; - - tstate->c_profilefunc = NULL; - tstate->c_profileobj = NULL; - /* Must make sure that tracing is not ignored if 'profileobj' is freed */ - _PyThreadState_UpdateTracingState(tstate); - Py_XDECREF(profileobj); - - Py_XINCREF(arg); - tstate->c_profileobj = arg; tstate->c_profilefunc = func; - + PyObject *old_profileobj = tstate->c_profileobj; + tstate->c_profileobj = Py_XNewRef(arg); /* Flag that tracing or profiling is turned on */ _PyThreadState_UpdateTracingState(tstate); - reentrant = 0; + + // gh-98257: Only call Py_XDECREF() once the new profile function is fully + // set, so it's safe to call sys.setprofile() again (reentrant call). + Py_XDECREF(old_profileobj); + return 0; } @@ -6414,39 +2642,23 @@ _PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) /* The caller must hold the GIL */ assert(PyGILState_Check()); - static int reentrant = 0; - - if (reentrant) { - _PyErr_SetString(tstate, PyExc_RuntimeError, "Cannot install a trace function " - "while another trace function is being installed"); - reentrant = 0; - return -1; - } - reentrant = 1; - /* Call _PySys_Audit() in the context of the current thread state, even if tstate is not the current thread state. */ PyThreadState *current_tstate = _PyThreadState_GET(); if (_PySys_Audit(current_tstate, "sys.settrace", NULL) < 0) { - reentrant = 0; return -1; } - PyObject *traceobj = tstate->c_traceobj; - - tstate->c_tracefunc = NULL; - tstate->c_traceobj = NULL; - /* Must make sure that profiling is not ignored if 'traceobj' is freed */ - _PyThreadState_UpdateTracingState(tstate); - Py_XINCREF(arg); - Py_XDECREF(traceobj); - tstate->c_traceobj = arg; tstate->c_tracefunc = func; - + PyObject *old_traceobj = tstate->c_traceobj; + tstate->c_traceobj = Py_XNewRef(arg); /* Flag that tracing or profiling is turned on */ _PyThreadState_UpdateTracingState(tstate); - reentrant = 0; + // gh-98257: Only call Py_XDECREF() once the new trace function is fully + // set, so it's safe to call sys.settrace() again (reentrant call). + Py_XDECREF(old_traceobj); + return 0; } @@ -6510,8 +2722,7 @@ _PyEval_SetAsyncGenFirstiter(PyObject *firstiter) return -1; } - Py_XINCREF(firstiter); - Py_XSETREF(tstate->async_gen_firstiter, firstiter); + Py_XSETREF(tstate->async_gen_firstiter, Py_XNewRef(firstiter)); return 0; } @@ -6531,8 +2742,7 @@ _PyEval_SetAsyncGenFinalizer(PyObject *finalizer) return -1; } - Py_XINCREF(finalizer); - Py_XSETREF(tstate->async_gen_finalizer, finalizer); + Py_XSETREF(tstate->async_gen_finalizer, Py_XNewRef(finalizer)); return 0; } @@ -6929,7 +3139,7 @@ import_from(PyThreadState *tstate, PyObject *v, PyObject *name) name, pkgname_or_unknown ); /* NULL checks for errmsg and pkgname done by PyErr_SetImportError. */ - PyErr_SetImportError(errmsg, pkgname, NULL); + _PyErr_SetImportErrorWithNameFrom(errmsg, pkgname, NULL, name); } else { PyObject *spec = PyObject_GetAttr(v, &_Py_ID(__spec__)); @@ -6942,7 +3152,7 @@ import_from(PyThreadState *tstate, PyObject *v, PyObject *name) errmsg = PyUnicode_FromFormat(fmt, name, pkgname_or_unknown, pkgpath); /* NULL checks for errmsg and pkgname done by PyErr_SetImportError. */ - PyErr_SetImportError(errmsg, pkgname, pkgpath); + _PyErr_SetImportErrorWithNameFrom(errmsg, pkgname, pkgpath, name); } Py_XDECREF(errmsg); diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index fd737b5738e889..83f4e91e54573a 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -5,6 +5,7 @@ #include "pycore_pyerrors.h" // _PyErr_Fetch() #include "pycore_pylifecycle.h" // _PyErr_Print() #include "pycore_initconfig.h" // _PyStatus_OK() +#include "pycore_interp.h" // _Py_RunGC() #include "pycore_pymem.h" // _PyMem_IsPtrFreed() /* @@ -69,7 +70,8 @@ COMPUTE_EVAL_BREAKER(PyInterpreterState *interp, && _Py_ThreadCanHandleSignals(interp)) | (_Py_atomic_load_relaxed_int32(&ceval2->pending.calls_to_do) && _Py_ThreadCanHandlePendingCalls()) - | ceval2->pending.async_exc); + | ceval2->pending.async_exc + | _Py_atomic_load_relaxed_int32(&ceval2->gc_scheduled)); } @@ -817,11 +819,10 @@ make_pending_calls(PyInterpreterState *interp) } /* don't perform recursive pending calls */ - static int busy = 0; - if (busy) { + if (interp->ceval.pending.busy) { return 0; } - busy = 1; + interp->ceval.pending.busy = 1; /* unsignal before starting to call callbacks, so that any callback added in-between re-signals */ @@ -849,11 +850,11 @@ make_pending_calls(PyInterpreterState *interp) } } - busy = 0; + interp->ceval.pending.busy = 0; return res; error: - busy = 0; + interp->ceval.pending.busy = 0; SIGNAL_PENDING_CALLS(interp); return res; } @@ -938,6 +939,7 @@ _Py_HandlePending(PyThreadState *tstate) { _PyRuntimeState * const runtime = &_PyRuntime; struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *interp_ceval_state = &tstate->interp->ceval; /* Pending signals */ if (_Py_atomic_load_relaxed_int32(&ceval->signals_pending)) { @@ -947,20 +949,26 @@ _Py_HandlePending(PyThreadState *tstate) } /* Pending calls */ - struct _ceval_state *ceval2 = &tstate->interp->ceval; - if (_Py_atomic_load_relaxed_int32(&ceval2->pending.calls_to_do)) { + if (_Py_atomic_load_relaxed_int32(&interp_ceval_state->pending.calls_to_do)) { if (make_pending_calls(tstate->interp) != 0) { return -1; } } + /* GC scheduled to run */ + if (_Py_atomic_load_relaxed_int32(&interp_ceval_state->gc_scheduled)) { + _Py_atomic_store_relaxed(&interp_ceval_state->gc_scheduled, 0); + COMPUTE_EVAL_BREAKER(tstate->interp, ceval, interp_ceval_state); + _Py_RunGC(tstate); + } + /* GIL drop request */ - if (_Py_atomic_load_relaxed_int32(&ceval2->gil_drop_request)) { + if (_Py_atomic_load_relaxed_int32(&interp_ceval_state->gil_drop_request)) { /* Give another thread a chance */ if (_PyThreadState_Swap(&runtime->gilstate, NULL) != tstate) { Py_FatalError("tstate mix-up"); } - drop_gil(ceval, ceval2, tstate); + drop_gil(ceval, interp_ceval_state, tstate); /* Other threads may run now */ @@ -981,16 +989,17 @@ _Py_HandlePending(PyThreadState *tstate) return -1; } -#ifdef MS_WINDOWS - // bpo-42296: On Windows, _PyEval_SignalReceived() can be called in a - // different thread than the Python thread, in which case + + // It is possible that some of the conditions that trigger the eval breaker + // are called in a different thread than the Python thread. An example of + // this is bpo-42296: On Windows, _PyEval_SignalReceived() can be called in + // a different thread than the Python thread, in which case // _Py_ThreadCanHandleSignals() is wrong. Recompute eval_breaker in the // current Python thread with the correct _Py_ThreadCanHandleSignals() // value. It prevents to interrupt the eval loop at every instruction if // the current Python thread cannot handle signals (if // _Py_ThreadCanHandleSignals() is false). - COMPUTE_EVAL_BREAKER(tstate->interp, ceval, ceval2); -#endif + COMPUTE_EVAL_BREAKER(tstate->interp, ceval, interp_ceval_state); return 0; } diff --git a/Python/clinic/_warnings.c.h b/Python/clinic/_warnings.c.h index 13ebbf45b8e168..8838a42afc1c2a 100644 --- a/Python/clinic/_warnings.c.h +++ b/Python/clinic/_warnings.c.h @@ -199,4 +199,21 @@ warnings_warn_explicit(PyObject *module, PyObject *const *args, Py_ssize_t nargs exit: return return_value; } -/*[clinic end generated code: output=2eac4fabc87a4d56 input=a9049054013a1b77]*/ + +PyDoc_STRVAR(warnings_filters_mutated__doc__, +"_filters_mutated($module, /)\n" +"--\n" +"\n"); + +#define WARNINGS_FILTERS_MUTATED_METHODDEF \ + {"_filters_mutated", (PyCFunction)warnings_filters_mutated, METH_NOARGS, warnings_filters_mutated__doc__}, + +static PyObject * +warnings_filters_mutated_impl(PyObject *module); + +static PyObject * +warnings_filters_mutated(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return warnings_filters_mutated_impl(module); +} +/*[clinic end generated code: output=0d264d1ddfc37100 input=a9049054013a1b77]*/ diff --git a/Python/clinic/sysmodule.c.h b/Python/clinic/sysmodule.c.h index 6864b8b0e03b2f..5678d0ac2a608b 100644 --- a/Python/clinic/sysmodule.c.h +++ b/Python/clinic/sysmodule.c.h @@ -884,33 +884,6 @@ sys_gettotalrefcount(PyObject *module, PyObject *Py_UNUSED(ignored)) #endif /* defined(Py_REF_DEBUG) */ -PyDoc_STRVAR(sys__getquickenedcount__doc__, -"_getquickenedcount($module, /)\n" -"--\n" -"\n"); - -#define SYS__GETQUICKENEDCOUNT_METHODDEF \ - {"_getquickenedcount", (PyCFunction)sys__getquickenedcount, METH_NOARGS, sys__getquickenedcount__doc__}, - -static Py_ssize_t -sys__getquickenedcount_impl(PyObject *module); - -static PyObject * -sys__getquickenedcount(PyObject *module, PyObject *Py_UNUSED(ignored)) -{ - PyObject *return_value = NULL; - Py_ssize_t _return_value; - - _return_value = sys__getquickenedcount_impl(module); - if ((_return_value == -1) && PyErr_Occurred()) { - goto exit; - } - return_value = PyLong_FromSsize_t(_return_value); - -exit: - return return_value; -} - PyDoc_STRVAR(sys_getallocatedblocks__doc__, "getallocatedblocks($module, /)\n" "--\n" @@ -1231,7 +1204,7 @@ PyDoc_STRVAR(sys_activate_stack_trampoline__doc__, "activate_stack_trampoline($module, backend, /)\n" "--\n" "\n" -"Activate the perf profiler trampoline."); +"Activate stack profiler trampoline *backend*."); #define SYS_ACTIVATE_STACK_TRAMPOLINE_METHODDEF \ {"activate_stack_trampoline", (PyCFunction)sys_activate_stack_trampoline, METH_O, sys_activate_stack_trampoline__doc__}, @@ -1268,7 +1241,9 @@ PyDoc_STRVAR(sys_deactivate_stack_trampoline__doc__, "deactivate_stack_trampoline($module, /)\n" "--\n" "\n" -"Dectivate the perf profiler trampoline."); +"Deactivate the current stack profiler trampoline backend.\n" +"\n" +"If no stack profiler is activated, this function has no effect."); #define SYS_DEACTIVATE_STACK_TRAMPOLINE_METHODDEF \ {"deactivate_stack_trampoline", (PyCFunction)sys_deactivate_stack_trampoline, METH_NOARGS, sys_deactivate_stack_trampoline__doc__}, @@ -1286,7 +1261,7 @@ PyDoc_STRVAR(sys_is_stack_trampoline_active__doc__, "is_stack_trampoline_active($module, /)\n" "--\n" "\n" -"Returns *True* if the perf profiler trampoline is active."); +"Return *True* if a stack profiler trampoline is active."); #define SYS_IS_STACK_TRAMPOLINE_ACTIVE_METHODDEF \ {"is_stack_trampoline_active", (PyCFunction)sys_is_stack_trampoline_active, METH_NOARGS, sys_is_stack_trampoline_active__doc__}, @@ -1343,4 +1318,4 @@ sys_is_stack_trampoline_active(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef SYS_GETANDROIDAPILEVEL_METHODDEF #define SYS_GETANDROIDAPILEVEL_METHODDEF #endif /* !defined(SYS_GETANDROIDAPILEVEL_METHODDEF) */ -/*[clinic end generated code: output=15318cdd96b62b06 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=79228e569529129c input=a9049054013a1b77]*/ diff --git a/Python/codecs.c b/Python/codecs.c index 33965f885f7064..b2087b499dfdba 100644 --- a/Python/codecs.c +++ b/Python/codecs.c @@ -235,8 +235,7 @@ PyObject *args_tuple(PyObject *object, args = PyTuple_New(1 + (errors != NULL)); if (args == NULL) return NULL; - Py_INCREF(object); - PyTuple_SET_ITEM(args,0,object); + PyTuple_SET_ITEM(args, 0, Py_NewRef(object)); if (errors) { PyObject *v; @@ -263,8 +262,7 @@ PyObject *codec_getitem(const char *encoding, int index) return NULL; v = PyTuple_GET_ITEM(codecs, index); Py_DECREF(codecs); - Py_INCREF(v); - return v; + return Py_NewRef(v); } /* Helper functions to create an incremental codec. */ @@ -430,8 +428,7 @@ _PyCodec_EncodeInternal(PyObject *object, "encoder must return a tuple (object, integer)"); goto onError; } - v = PyTuple_GET_ITEM(result,0); - Py_INCREF(v); + v = Py_NewRef(PyTuple_GET_ITEM(result,0)); /* We don't check or use the second (integer) entry. */ Py_DECREF(args); @@ -475,8 +472,7 @@ _PyCodec_DecodeInternal(PyObject *object, "decoder must return a tuple (object,integer)"); goto onError; } - v = PyTuple_GET_ITEM(result,0); - Py_INCREF(v); + v = Py_NewRef(PyTuple_GET_ITEM(result,0)); /* We don't check or use the second (integer) entry. */ Py_DECREF(args); @@ -571,8 +567,7 @@ PyObject *codec_getitem_checked(const char *encoding, if (codec == NULL) return NULL; - v = PyTuple_GET_ITEM(codec, index); - Py_INCREF(v); + v = Py_NewRef(PyTuple_GET_ITEM(codec, index)); Py_DECREF(codec); return v; } diff --git a/Python/compile.c b/Python/compile.c index 2da36d0f6316e0..dd8596defb8efe 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -114,21 +114,33 @@ (opcode) == RAISE_VARARGS || \ (opcode) == RERAISE) +#define IS_SUPERINSTRUCTION_OPCODE(opcode) \ + ((opcode) == LOAD_FAST__LOAD_FAST || \ + (opcode) == LOAD_FAST__LOAD_CONST || \ + (opcode) == LOAD_CONST__LOAD_FAST || \ + (opcode) == STORE_FAST__LOAD_FAST || \ + (opcode) == STORE_FAST__STORE_FAST) + #define IS_TOP_LEVEL_AWAIT(c) ( \ - (c->c_flags->cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \ + (c->c_flags.cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \ && (c->u->u_ste->ste_type == ModuleBlock)) -struct location { - int lineno; - int end_lineno; - int col_offset; - int end_col_offset; -}; +typedef _PyCompilerSrcLocation location; #define LOCATION(LNO, END_LNO, COL, END_COL) \ - ((const struct location){(LNO), (END_LNO), (COL), (END_COL)}) + ((const location){(LNO), (END_LNO), (COL), (END_COL)}) + +static location NO_LOCATION = {-1, -1, -1, -1}; -static struct location NO_LOCATION = {-1, -1, -1, -1}; +/* Return true if loc1 starts after loc2 ends. */ +static inline bool +location_is_after(location loc1, location loc2) { + return (loc1.lineno > loc2.end_lineno) || + ((loc1.lineno == loc2.end_lineno) && + (loc1.col_offset > loc2.end_col_offset)); +} + +#define LOC(x) SRC_LOCATION_FROM_AST(x) typedef struct jump_target_label_ { int id; @@ -153,7 +165,7 @@ static struct jump_target_label_ NO_LABEL = {-1}; struct instr { int i_opcode; int i_oparg; - struct location i_loc; + location i_loc; /* The following fields should not be set by the front-end: */ struct basicblock_ *i_target; /* target block (if jump instruction) */ struct basicblock_ *i_except; /* target block when exception is raised */ @@ -258,6 +270,8 @@ typedef struct basicblock_ { int b_iused; /* length of instruction array (b_instr) */ int b_ialloc; + /* Used by add_checks_for_loads_of_unknown_variables */ + uint64_t b_unsafe_locals_mask; /* Number of predecessors that a block has. */ int b_predecessors; /* depth of stack upon entry of block, computed by stackdepth() */ @@ -386,7 +400,6 @@ struct compiler_unit { struct fblockinfo u_fblock[CO_MAXBLOCKS]; int u_firstlineno; /* the first lineno of the block */ - struct location u_loc; /* line/column info of the current stmt */ }; /* This struct captures the global state of a compilation. @@ -404,8 +417,8 @@ handled by the symbol analysis pass. struct compiler { PyObject *c_filename; struct symtable *c_st; - PyFutureFeatures *c_future; /* pointer to module's __future__ */ - PyCompilerFlags *c_flags; + PyFutureFeatures c_future; /* module's __future__ */ + PyCompilerFlags c_flags; int c_optimize; /* optimization level */ int c_interactive; /* true if in interactive mode */ @@ -418,7 +431,7 @@ struct compiler { }; #define CFG_BUILDER(c) (&((c)->u->u_cfg_builder)) -#define COMPILER_LOC(c) ((c)->u->u_loc) + typedef struct { // A list of strings corresponding to name captures. It is used to track: @@ -449,12 +462,12 @@ static int basicblock_next_instr(basicblock *); static basicblock *cfg_builder_new_block(cfg_builder *g); static int cfg_builder_maybe_start_new_block(cfg_builder *g); -static int cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, struct location loc); +static int cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, location loc); static void compiler_free(struct compiler *); -static int compiler_error(struct compiler *, const char *, ...); -static int compiler_warn(struct compiler *, const char *, ...); -static int compiler_nameop(struct compiler *, identifier, expr_context_ty); +static int compiler_error(struct compiler *, location loc, const char *, ...); +static int compiler_warn(struct compiler *, location loc, const char *, ...); +static int compiler_nameop(struct compiler *, location, identifier, expr_context_ty); static PyCodeObject *compiler_mod(struct compiler *, mod_ty); static int compiler_visit_stmt(struct compiler *, stmt_ty); @@ -472,33 +485,34 @@ static int compiler_with(struct compiler *, stmt_ty, int); static int compiler_async_with(struct compiler *, stmt_ty, int); static int compiler_async_for(struct compiler *, stmt_ty); static int compiler_call_simple_kw_helper(struct compiler *c, + location loc, asdl_keyword_seq *keywords, Py_ssize_t nkwelts); -static int compiler_call_helper(struct compiler *c, int n, - asdl_expr_seq *args, +static int compiler_call_helper(struct compiler *c, location loc, + int n, asdl_expr_seq *args, asdl_keyword_seq *keywords); static int compiler_try_except(struct compiler *, stmt_ty); static int compiler_try_star_except(struct compiler *, stmt_ty); static int compiler_set_qualname(struct compiler *); static int compiler_sync_comprehension_generator( - struct compiler *c, + struct compiler *c, location loc, asdl_comprehension_seq *generators, int gen_index, int depth, expr_ty elt, expr_ty val, int type); static int compiler_async_comprehension_generator( - struct compiler *c, + struct compiler *c, location loc, asdl_comprehension_seq *generators, int gen_index, int depth, expr_ty elt, expr_ty val, int type); static int compiler_pattern(struct compiler *, pattern_ty, pattern_context *); static int compiler_match(struct compiler *, stmt_ty); -static int compiler_pattern_subpattern(struct compiler *, pattern_ty, - pattern_context *); +static int compiler_pattern_subpattern(struct compiler *, + pattern_ty, pattern_context *); -static void remove_redundant_nops(basicblock *bb); +static int remove_redundant_nops(basicblock *bb); static PyCodeObject *assemble(struct compiler *, int addNone); @@ -515,8 +529,7 @@ _Py_Mangle(PyObject *privateobj, PyObject *ident) if (privateobj == NULL || !PyUnicode_Check(privateobj) || PyUnicode_READ_CHAR(ident, 0) != '_' || PyUnicode_READ_CHAR(ident, 1) != '_') { - Py_INCREF(ident); - return ident; + return Py_NewRef(ident); } nlen = PyUnicode_GET_LENGTH(ident); plen = PyUnicode_GET_LENGTH(privateobj); @@ -532,16 +545,14 @@ _Py_Mangle(PyObject *privateobj, PyObject *ident) if ((PyUnicode_READ_CHAR(ident, nlen-1) == '_' && PyUnicode_READ_CHAR(ident, nlen-2) == '_') || PyUnicode_FindChar(ident, '.', 0, nlen, 1) != -1) { - Py_INCREF(ident); - return ident; /* Don't mangle __whatever__ */ + return Py_NewRef(ident); /* Don't mangle __whatever__ */ } /* Strip leading underscores from class name */ ipriv = 0; while (PyUnicode_READ_CHAR(privateobj, ipriv) == '_') ipriv++; if (ipriv == plen) { - Py_INCREF(ident); - return ident; /* Don't mangle if class is just underscores */ + return Py_NewRef(ident); /* Don't mangle if class is just underscores */ } plen -= ipriv; @@ -572,11 +583,11 @@ _Py_Mangle(PyObject *privateobj, PyObject *ident) return result; } + static int -compiler_init(struct compiler *c) +compiler_setup(struct compiler *c, mod_ty mod, PyObject *filename, + PyCompilerFlags flags, int optimize, PyArena *arena) { - memset(c, 0, sizeof(struct compiler)); - c->c_const_cache = PyDict_New(); if (!c->c_const_cache) { return 0; @@ -584,58 +595,65 @@ compiler_init(struct compiler *c) c->c_stack = PyList_New(0); if (!c->c_stack) { - Py_CLEAR(c->c_const_cache); return 0; } - return 1; -} - -PyCodeObject * -_PyAST_Compile(mod_ty mod, PyObject *filename, PyCompilerFlags *flags, - int optimize, PyArena *arena) -{ - struct compiler c; - PyCodeObject *co = NULL; - PyCompilerFlags local_flags = _PyCompilerFlags_INIT; - int merged; - if (!compiler_init(&c)) - return NULL; - Py_INCREF(filename); - c.c_filename = filename; - c.c_arena = arena; - c.c_future = _PyFuture_FromAST(mod, filename); - if (c.c_future == NULL) - goto finally; - if (!flags) { - flags = &local_flags; + c->c_filename = Py_NewRef(filename); + c->c_arena = arena; + if (!_PyFuture_FromAST(mod, filename, &c->c_future)) { + return 0; } - merged = c.c_future->ff_features | flags->cf_flags; - c.c_future->ff_features = merged; - flags->cf_flags = merged; - c.c_flags = flags; - c.c_optimize = (optimize == -1) ? _Py_GetConfig()->optimization_level : optimize; - c.c_nestlevel = 0; + int merged = c->c_future.ff_features | flags.cf_flags; + c->c_future.ff_features = merged; + flags.cf_flags = merged; + c->c_flags = flags; + c->c_optimize = (optimize == -1) ? _Py_GetConfig()->optimization_level : optimize; + c->c_nestlevel = 0; _PyASTOptimizeState state; - state.optimize = c.c_optimize; + state.optimize = c->c_optimize; state.ff_features = merged; if (!_PyAST_Optimize(mod, arena, &state)) { - goto finally; + return 0; } - - c.c_st = _PySymtable_Build(mod, filename, c.c_future); - if (c.c_st == NULL) { - if (!PyErr_Occurred()) + c->c_st = _PySymtable_Build(mod, filename, &c->c_future); + if (c->c_st == NULL) { + if (!PyErr_Occurred()) { PyErr_SetString(PyExc_SystemError, "no symtable"); - goto finally; + } + return 0; } + return 1; +} - co = compiler_mod(&c, mod); +static struct compiler* +new_compiler(mod_ty mod, PyObject *filename, PyCompilerFlags *pflags, + int optimize, PyArena *arena) +{ + PyCompilerFlags flags = pflags ? *pflags : _PyCompilerFlags_INIT; + struct compiler *c = PyMem_Calloc(1, sizeof(struct compiler)); + if (c == NULL) { + return NULL; + } + if (!compiler_setup(c, mod, filename, flags, optimize, arena)) { + compiler_free(c); + return NULL; + } + return c; +} + +PyCodeObject * +_PyAST_Compile(mod_ty mod, PyObject *filename, PyCompilerFlags *pflags, + int optimize, PyArena *arena) +{ + struct compiler *c = new_compiler(mod, filename, pflags, optimize, arena); + if (c == NULL) { + return NULL; + } - finally: - compiler_free(&c); + PyCodeObject *co = compiler_mod(c, mod); + compiler_free(c); assert(co || PyErr_Occurred()); return co; } @@ -645,11 +663,10 @@ compiler_free(struct compiler *c) { if (c->c_st) _PySymtable_Free(c->c_st); - if (c->c_future) - PyObject_Free(c->c_future); Py_XDECREF(c->c_filename); - Py_DECREF(c->c_const_cache); - Py_DECREF(c->c_stack); + Py_XDECREF(c->c_const_cache); + Py_XDECREF(c->c_stack); + PyMem_Free(c); } static PyObject * @@ -846,8 +863,7 @@ compiler_set_qualname(struct compiler *c) return 0; } else { - Py_INCREF(parent->u_qualname); - base = parent->u_qualname; + base = Py_NewRef(parent->u_qualname); } } } @@ -863,8 +879,7 @@ compiler_set_qualname(struct compiler *c) return 0; } else { - Py_INCREF(u->u_name); - name = u->u_name; + name = Py_NewRef(u->u_name); } u->u_qualname = name; @@ -988,28 +1003,6 @@ basicblock_next_instr(basicblock *b) return b->b_iused++; } -/* Set the line number and column offset for the following instructions. - - The line number is reset in the following cases: - - when entering a new scope - - on each statement - - on each expression and sub-expression - - before the "except" and "finally" clauses -*/ - -#define SET_LOC(c, x) \ - (c)->u->u_loc.lineno = (x)->lineno; \ - (c)->u->u_loc.end_lineno = (x)->end_lineno; \ - (c)->u->u_loc.col_offset = (x)->col_offset; \ - (c)->u->u_loc.end_col_offset = (x)->end_col_offset; - -// Artificial instructions -#define UNSET_LOC(c) \ - (c)->u->u_loc.lineno = -1; \ - (c)->u->u_loc.end_lineno = -1; \ - (c)->u->u_loc.col_offset = -1; \ - (c)->u->u_loc.end_col_offset = -1; - /* Return the stack effect of opcode with argument oparg. @@ -1035,6 +1028,8 @@ stack_effect(int opcode, int oparg, int jump) return -1; case SWAP: return 0; + case END_FOR: + return -2; /* Unary operators */ case UNARY_POSITIVE: @@ -1091,8 +1086,7 @@ stack_effect(int opcode, int oparg, int jump) case UNPACK_EX: return (oparg&0xFF) + (oparg>>8); case FOR_ITER: - /* -1 at end of iterator, 1 if continue iterating. */ - return jump > 0 ? -1 : 1; + return 1; case SEND: return jump > 0 ? -1 : 0; case STORE_ATTR: @@ -1168,6 +1162,9 @@ stack_effect(int opcode, int oparg, int jump) * if an exception be raised. */ return jump ? 1 : 0; + case STOPITERATION_ERROR: + return 0; + case PREP_RERAISE_STAR: return -1; case RERAISE: @@ -1267,6 +1264,8 @@ stack_effect(int opcode, int oparg, int jump) return 1; case BINARY_OP: return -1; + case INTERPRETER_EXIT: + return -1; default: return PY_INVALID_STACK_EFFECT; } @@ -1290,7 +1289,7 @@ PyCompile_OpcodeStackEffect(int opcode, int oparg) */ static int -basicblock_addop(basicblock *b, int opcode, int oparg, struct location loc) +basicblock_addop(basicblock *b, int opcode, int oparg, location loc) { assert(IS_WITHIN_OPCODE_RANGE(opcode)); assert(!IS_ASSEMBLER_OPCODE(opcode)); @@ -1336,7 +1335,7 @@ cfg_builder_maybe_start_new_block(cfg_builder *g) } static int -cfg_builder_addop(cfg_builder *g, int opcode, int oparg, struct location loc) +cfg_builder_addop(cfg_builder *g, int opcode, int oparg, location loc) { if (cfg_builder_maybe_start_new_block(g) != 0) { return -1; @@ -1345,7 +1344,7 @@ cfg_builder_addop(cfg_builder *g, int opcode, int oparg, struct location loc) } static int -cfg_builder_addop_noarg(cfg_builder *g, int opcode, struct location loc) +cfg_builder_addop_noarg(cfg_builder *g, int opcode, location loc) { assert(!HAS_ARG(opcode)); return cfg_builder_addop(g, opcode, 0, loc); @@ -1386,8 +1385,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) // None and Ellipsis are singleton, and key is the singleton. // No need to merge object and key. if (o == Py_None || o == Py_Ellipsis) { - Py_INCREF(o); - return o; + return Py_NewRef(o); } PyObject *key = _PyCode_ConstantKey(o); @@ -1426,8 +1424,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) v = u; } if (v != item) { - Py_INCREF(v); - PyTuple_SET_ITEM(o, i, v); + PyTuple_SET_ITEM(o, i, Py_NewRef(v)); Py_DECREF(item); } @@ -1462,8 +1459,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) } PyObject *u; if (PyTuple_CheckExact(k)) { - u = PyTuple_GET_ITEM(k, 1); - Py_INCREF(u); + u = Py_NewRef(PyTuple_GET_ITEM(k, 1)); Py_DECREF(k); } else { @@ -1503,27 +1499,27 @@ compiler_add_const(struct compiler *c, PyObject *o) } static int -compiler_addop_load_const(struct compiler *c, PyObject *o) +compiler_addop_load_const(struct compiler *c, location loc, PyObject *o) { Py_ssize_t arg = compiler_add_const(c, o); if (arg < 0) return 0; - return cfg_builder_addop_i(CFG_BUILDER(c), LOAD_CONST, arg, COMPILER_LOC(c)); + return cfg_builder_addop_i(CFG_BUILDER(c), LOAD_CONST, arg, loc); } static int -compiler_addop_o(struct compiler *c, int opcode, PyObject *dict, - PyObject *o) +compiler_addop_o(struct compiler *c, location loc, + int opcode, PyObject *dict, PyObject *o) { Py_ssize_t arg = dict_add_o(dict, o); if (arg < 0) return 0; - return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, COMPILER_LOC(c)); + return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, loc); } static int -compiler_addop_name(struct compiler *c, int opcode, PyObject *dict, - PyObject *o) +compiler_addop_name(struct compiler *c, location loc, + int opcode, PyObject *dict, PyObject *o) { Py_ssize_t arg; @@ -1542,14 +1538,14 @@ compiler_addop_name(struct compiler *c, int opcode, PyObject *dict, arg <<= 1; arg |= 1; } - return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, COMPILER_LOC(c)); + return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, loc); } /* Add an opcode with an integer argument. Returns 0 on failure, 1 on success. */ static int -cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, struct location loc) +cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, location loc) { /* oparg value is unsigned, but a signed C int is usually used to store it in the C code (like Python/ceval.c). @@ -1564,7 +1560,8 @@ cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, struct locatio } static int -cfg_builder_addop_j(cfg_builder *g, int opcode, jump_target_label target, struct location loc) +cfg_builder_addop_j(cfg_builder *g, location loc, + int opcode, jump_target_label target) { assert(IS_LABEL(target)); assert(IS_JUMP_OPCODE(opcode) || IS_BLOCK_PUSH_OPCODE(opcode)); @@ -1572,102 +1569,84 @@ cfg_builder_addop_j(cfg_builder *g, int opcode, jump_target_label target, struct } -#define ADDOP(C, OP) { \ - if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), COMPILER_LOC(C))) \ - return 0; \ -} - -#define ADDOP_NOLINE(C, OP) { \ - if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), NO_LOCATION)) \ +#define ADDOP(C, LOC, OP) { \ + if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC))) \ return 0; \ } -#define ADDOP_IN_SCOPE(C, OP) { \ - if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), COMPILER_LOC(C))) { \ +#define ADDOP_IN_SCOPE(C, LOC, OP) { \ + if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC))) { \ compiler_exit_scope(c); \ return 0; \ } \ } -#define ADDOP_LOAD_CONST(C, O) { \ - if (!compiler_addop_load_const((C), (O))) \ +#define ADDOP_LOAD_CONST(C, LOC, O) { \ + if (!compiler_addop_load_const((C), (LOC), (O))) \ return 0; \ } /* Same as ADDOP_LOAD_CONST, but steals a reference. */ -#define ADDOP_LOAD_CONST_NEW(C, O) { \ +#define ADDOP_LOAD_CONST_NEW(C, LOC, O) { \ PyObject *__new_const = (O); \ if (__new_const == NULL) { \ return 0; \ } \ - if (!compiler_addop_load_const((C), __new_const)) { \ + if (!compiler_addop_load_const((C), (LOC), __new_const)) { \ Py_DECREF(__new_const); \ return 0; \ } \ Py_DECREF(__new_const); \ } -#define ADDOP_N(C, OP, O, TYPE) { \ +#define ADDOP_N(C, LOC, OP, O, TYPE) { \ assert(!HAS_CONST(OP)); /* use ADDOP_LOAD_CONST_NEW */ \ - if (!compiler_addop_o((C), (OP), (C)->u->u_ ## TYPE, (O))) { \ + if (!compiler_addop_o((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O))) { \ Py_DECREF((O)); \ return 0; \ } \ Py_DECREF((O)); \ } -#define ADDOP_NAME(C, OP, O, TYPE) { \ - if (!compiler_addop_name((C), (OP), (C)->u->u_ ## TYPE, (O))) \ +#define ADDOP_NAME(C, LOC, OP, O, TYPE) { \ + if (!compiler_addop_name((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O))) \ return 0; \ } -#define ADDOP_I(C, OP, O) { \ - if (!cfg_builder_addop_i(CFG_BUILDER(C), (OP), (O), COMPILER_LOC(C))) \ +#define ADDOP_I(C, LOC, OP, O) { \ + if (!cfg_builder_addop_i(CFG_BUILDER(C), (OP), (O), (LOC))) \ return 0; \ } -#define ADDOP_I_NOLINE(C, OP, O) { \ - if (!cfg_builder_addop_i(CFG_BUILDER(C), (OP), (O), NO_LOCATION)) \ +#define ADDOP_JUMP(C, LOC, OP, O) { \ + if (!cfg_builder_addop_j(CFG_BUILDER(C), (LOC), (OP), (O))) \ return 0; \ } -#define ADDOP_JUMP(C, OP, O) { \ - if (!cfg_builder_addop_j(CFG_BUILDER(C), (OP), (O), COMPILER_LOC(C))) \ +#define ADDOP_COMPARE(C, LOC, CMP) { \ + if (!compiler_addcompare((C), (LOC), (cmpop_ty)(CMP))) \ return 0; \ } -/* Add a jump with no line number. - * Used for artificial jumps that have no corresponding - * token in the source code. */ -#define ADDOP_JUMP_NOLINE(C, OP, O) { \ - if (!cfg_builder_addop_j(CFG_BUILDER(C), (OP), (O), NO_LOCATION)) \ - return 0; \ -} +#define ADDOP_BINARY(C, LOC, BINOP) \ + RETURN_IF_FALSE(addop_binary((C), (LOC), (BINOP), false)) -#define ADDOP_COMPARE(C, CMP) { \ - if (!compiler_addcompare((C), (cmpop_ty)(CMP))) \ - return 0; \ -} +#define ADDOP_INPLACE(C, LOC, BINOP) \ + RETURN_IF_FALSE(addop_binary((C), (LOC), (BINOP), true)) + +#define ADD_YIELD_FROM(C, LOC, await) \ + RETURN_IF_FALSE(compiler_add_yield_from((C), (LOC), (await))) -#define ADDOP_BINARY(C, BINOP) \ - RETURN_IF_FALSE(addop_binary((C), (BINOP), false)) +#define POP_EXCEPT_AND_RERAISE(C, LOC) \ + RETURN_IF_FALSE(compiler_pop_except_and_reraise((C), (LOC))) -#define ADDOP_INPLACE(C, BINOP) \ - RETURN_IF_FALSE(addop_binary((C), (BINOP), true)) +#define ADDOP_YIELD(C, LOC) \ + RETURN_IF_FALSE(addop_yield((C), (LOC))) /* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use the ASDL name to synthesize the name of the C type and the visit function. */ -#define ADD_YIELD_FROM(C, await) \ - RETURN_IF_FALSE(compiler_add_yield_from((C), (await))) - -#define POP_EXCEPT_AND_RERAISE(C) \ - RETURN_IF_FALSE(compiler_pop_except_and_reraise((C))) - -#define ADDOP_YIELD(C) \ - RETURN_IF_FALSE(addop_yield(C)) - #define VISIT(C, TYPE, V) {\ if (!compiler_visit_ ## TYPE((C), (V))) \ return 0; \ @@ -1711,6 +1690,8 @@ static int compiler_enter_scope(struct compiler *c, identifier name, int scope_type, void *key, int lineno) { + location loc = LOCATION(lineno, lineno, 0, 0); + struct compiler_unit *u; u = (struct compiler_unit *)PyObject_Calloc(1, sizeof( @@ -1728,8 +1709,7 @@ compiler_enter_scope(struct compiler *c, identifier name, compiler_unit_free(u); return 0; } - Py_INCREF(name); - u->u_name = name; + u->u_name = Py_NewRef(name); u->u_varnames = list2dict(u->u_ste->ste_varnames); u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0); if (!u->u_varnames || !u->u_cellvars) { @@ -1758,7 +1738,6 @@ compiler_enter_scope(struct compiler *c, identifier name, u->u_nfblocks = 0; u->u_firstlineno = lineno; - u->u_loc = LOCATION(lineno, lineno, 0, 0); u->u_consts = PyDict_New(); if (!u->u_consts) { compiler_unit_free(u); @@ -1781,8 +1760,7 @@ compiler_enter_scope(struct compiler *c, identifier name, return 0; } Py_DECREF(capsule); - u->u_private = c->u->u_private; - Py_XINCREF(u->u_private); + u->u_private = Py_XNewRef(c->u->u_private); } c->u = u; @@ -1794,16 +1772,16 @@ compiler_enter_scope(struct compiler *c, identifier name, } if (u->u_scope_type == COMPILER_SCOPE_MODULE) { - c->u->u_loc.lineno = 0; + loc.lineno = 0; } else { if (!compiler_set_qualname(c)) return 0; } - ADDOP_I(c, RESUME, 0); + ADDOP_I(c, loc, RESUME, 0); if (u->u_scope_type == COMPILER_SCOPE_MODULE) { - c->u->u_loc.lineno = -1; + loc.lineno = -1; } return 1; } @@ -1911,12 +1889,13 @@ find_ann(asdl_stmt_seq *stmts) */ static int -compiler_push_fblock(struct compiler *c, enum fblocktype t, jump_target_label block_label, +compiler_push_fblock(struct compiler *c, location loc, + enum fblocktype t, jump_target_label block_label, jump_target_label exit, void *datum) { struct fblockinfo *f; if (c->u->u_nfblocks >= CO_MAXBLOCKS) { - return compiler_error(c, "too many statically nested blocks"); + return compiler_error(c, loc, "too many statically nested blocks"); } f = &c->u->u_fblock[c->u->u_nfblocks++]; f->fb_type = t; @@ -1937,40 +1916,41 @@ compiler_pop_fblock(struct compiler *c, enum fblocktype t, jump_target_label blo } static int -compiler_call_exit_with_nones(struct compiler *c) { - ADDOP_LOAD_CONST(c, Py_None); - ADDOP_LOAD_CONST(c, Py_None); - ADDOP_LOAD_CONST(c, Py_None); - ADDOP_I(c, CALL, 2); +compiler_call_exit_with_nones(struct compiler *c, location loc) +{ + ADDOP_LOAD_CONST(c, loc, Py_None); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADDOP_I(c, loc, CALL, 2); return 1; } static int -compiler_add_yield_from(struct compiler *c, int await) +compiler_add_yield_from(struct compiler *c, location loc, int await) { NEW_JUMP_TARGET_LABEL(c, send); NEW_JUMP_TARGET_LABEL(c, fail); NEW_JUMP_TARGET_LABEL(c, exit); USE_LABEL(c, send); - ADDOP_JUMP(c, SEND, exit); + ADDOP_JUMP(c, loc, SEND, exit); // Set up a virtual try/except to handle when StopIteration is raised during // a close or throw call. The only way YIELD_VALUE raises if they do! - ADDOP_JUMP(c, SETUP_FINALLY, fail); - ADDOP_I(c, YIELD_VALUE, 0); - ADDOP_NOLINE(c, POP_BLOCK); - ADDOP_I(c, RESUME, await ? 3 : 2); - ADDOP_JUMP(c, JUMP_NO_INTERRUPT, send); + ADDOP_JUMP(c, loc, SETUP_FINALLY, fail); + ADDOP_I(c, loc, YIELD_VALUE, 0); + ADDOP(c, NO_LOCATION, POP_BLOCK); + ADDOP_I(c, loc, RESUME, await ? 3 : 2); + ADDOP_JUMP(c, loc, JUMP_NO_INTERRUPT, send); USE_LABEL(c, fail); - ADDOP(c, CLEANUP_THROW); + ADDOP(c, loc, CLEANUP_THROW); USE_LABEL(c, exit); return 1; } static int -compiler_pop_except_and_reraise(struct compiler *c) +compiler_pop_except_and_reraise(struct compiler *c, location loc) { /* Stack contents * [exc_info, lasti, exc] COPY 3 @@ -1979,9 +1959,9 @@ compiler_pop_except_and_reraise(struct compiler *c) * (exception_unwind clears the stack) */ - ADDOP_I(c, COPY, 3); - ADDOP(c, POP_EXCEPT); - ADDOP_I(c, RERAISE, 1); + ADDOP_I(c, loc, COPY, 3); + ADDOP(c, loc, POP_EXCEPT); + ADDOP_I(c, loc, RERAISE, 1); return 1; } @@ -1991,8 +1971,8 @@ compiler_pop_except_and_reraise(struct compiler *c) * be popped. */ static int -compiler_unwind_fblock(struct compiler *c, struct fblockinfo *info, - int preserve_tos) +compiler_unwind_fblock(struct compiler *c, location *ploc, + struct fblockinfo *info, int preserve_tos) { switch (info->fb_type) { case WHILE_LOOP: @@ -2004,20 +1984,20 @@ compiler_unwind_fblock(struct compiler *c, struct fblockinfo *info, case FOR_LOOP: /* Pop the iterator */ if (preserve_tos) { - ADDOP_I(c, SWAP, 2); + ADDOP_I(c, *ploc, SWAP, 2); } - ADDOP(c, POP_TOP); + ADDOP(c, *ploc, POP_TOP); return 1; case TRY_EXCEPT: - ADDOP(c, POP_BLOCK); + ADDOP(c, *ploc, POP_BLOCK); return 1; case FINALLY_TRY: /* This POP_BLOCK gets the line number of the unwinding statement */ - ADDOP(c, POP_BLOCK); + ADDOP(c, *ploc, POP_BLOCK); if (preserve_tos) { - if (!compiler_push_fblock(c, POP_VALUE, NO_LABEL, NO_LABEL, NULL)) { + if (!compiler_push_fblock(c, *ploc, POP_VALUE, NO_LABEL, NO_LABEL, NULL)) { return 0; } } @@ -2029,79 +2009,82 @@ compiler_unwind_fblock(struct compiler *c, struct fblockinfo *info, /* The finally block should appear to execute after the * statement causing the unwinding, so make the unwinding * instruction artificial */ - UNSET_LOC(c); + *ploc = NO_LOCATION; return 1; case FINALLY_END: if (preserve_tos) { - ADDOP_I(c, SWAP, 2); + ADDOP_I(c, *ploc, SWAP, 2); } - ADDOP(c, POP_TOP); /* exc_value */ + ADDOP(c, *ploc, POP_TOP); /* exc_value */ if (preserve_tos) { - ADDOP_I(c, SWAP, 2); + ADDOP_I(c, *ploc, SWAP, 2); } - ADDOP(c, POP_BLOCK); - ADDOP(c, POP_EXCEPT); + ADDOP(c, *ploc, POP_BLOCK); + ADDOP(c, *ploc, POP_EXCEPT); return 1; case WITH: case ASYNC_WITH: - SET_LOC(c, (stmt_ty)info->fb_datum); - ADDOP(c, POP_BLOCK); + *ploc = LOC((stmt_ty)info->fb_datum); + ADDOP(c, *ploc, POP_BLOCK); if (preserve_tos) { - ADDOP_I(c, SWAP, 2); + ADDOP_I(c, *ploc, SWAP, 2); } - if(!compiler_call_exit_with_nones(c)) { + if(!compiler_call_exit_with_nones(c, *ploc)) { return 0; } if (info->fb_type == ASYNC_WITH) { - ADDOP_I(c, GET_AWAITABLE, 2); - ADDOP_LOAD_CONST(c, Py_None); - ADD_YIELD_FROM(c, 1); + ADDOP_I(c, *ploc, GET_AWAITABLE, 2); + ADDOP_LOAD_CONST(c, *ploc, Py_None); + ADD_YIELD_FROM(c, *ploc, 1); } - ADDOP(c, POP_TOP); + ADDOP(c, *ploc, POP_TOP); /* The exit block should appear to execute after the * statement causing the unwinding, so make the unwinding * instruction artificial */ - UNSET_LOC(c); + *ploc = NO_LOCATION; return 1; - case HANDLER_CLEANUP: + case HANDLER_CLEANUP: { if (info->fb_datum) { - ADDOP(c, POP_BLOCK); + ADDOP(c, *ploc, POP_BLOCK); } if (preserve_tos) { - ADDOP_I(c, SWAP, 2); + ADDOP_I(c, *ploc, SWAP, 2); } - ADDOP(c, POP_BLOCK); - ADDOP(c, POP_EXCEPT); + ADDOP(c, *ploc, POP_BLOCK); + ADDOP(c, *ploc, POP_EXCEPT); if (info->fb_datum) { - ADDOP_LOAD_CONST(c, Py_None); - compiler_nameop(c, info->fb_datum, Store); - compiler_nameop(c, info->fb_datum, Del); + ADDOP_LOAD_CONST(c, *ploc, Py_None); + compiler_nameop(c, *ploc, info->fb_datum, Store); + compiler_nameop(c, *ploc, info->fb_datum, Del); } return 1; - - case POP_VALUE: + } + case POP_VALUE: { if (preserve_tos) { - ADDOP_I(c, SWAP, 2); + ADDOP_I(c, *ploc, SWAP, 2); } - ADDOP(c, POP_TOP); + ADDOP(c, *ploc, POP_TOP); return 1; + } } Py_UNREACHABLE(); } /** Unwind block stack. If loop is not NULL, then stop when the first loop is encountered. */ static int -compiler_unwind_fblock_stack(struct compiler *c, int preserve_tos, struct fblockinfo **loop) { +compiler_unwind_fblock_stack(struct compiler *c, location *ploc, + int preserve_tos, struct fblockinfo **loop) +{ if (c->u->u_nfblocks == 0) { return 1; } struct fblockinfo *top = &c->u->u_fblock[c->u->u_nfblocks-1]; if (top->fb_type == EXCEPTION_GROUP_HANDLER) { return compiler_error( - c, "'break', 'continue' and 'return' cannot appear in an except* block"); + c, *ploc, "'break', 'continue' and 'return' cannot appear in an except* block"); } if (loop != NULL && (top->fb_type == WHILE_LOOP || top->fb_type == FOR_LOOP)) { *loop = top; @@ -2109,10 +2092,10 @@ compiler_unwind_fblock_stack(struct compiler *c, int preserve_tos, struct fblock } struct fblockinfo copy = *top; c->u->u_nfblocks--; - if (!compiler_unwind_fblock(c, ©, preserve_tos)) { + if (!compiler_unwind_fblock(c, ploc, ©, preserve_tos)) { return 0; } - if (!compiler_unwind_fblock_stack(c, preserve_tos, loop)) { + if (!compiler_unwind_fblock_stack(c, ploc, preserve_tos, loop)) { return 0; } c->u->u_fblock[c->u->u_nfblocks] = copy; @@ -2124,7 +2107,7 @@ compiler_unwind_fblock_stack(struct compiler *c, int preserve_tos, struct fblock and for annotations. */ static int -compiler_body(struct compiler *c, asdl_stmt_seq *stmts) +compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) { int i = 0; stmt_ty st; @@ -2136,11 +2119,11 @@ compiler_body(struct compiler *c, asdl_stmt_seq *stmts) If body is empty, then lineno will be set later in assemble. */ if (c->u->u_scope_type == COMPILER_SCOPE_MODULE && asdl_seq_LEN(stmts)) { st = (stmt_ty)asdl_seq_GET(stmts, 0); - SET_LOC(c, st); + loc = LOC(st); } /* Every annotated class and module should have __annotations__. */ if (find_ann(stmts)) { - ADDOP(c, SETUP_ANNOTATIONS); + ADDOP(c, loc, SETUP_ANNOTATIONS); } if (!asdl_seq_LEN(stmts)) return 1; @@ -2152,8 +2135,7 @@ compiler_body(struct compiler *c, asdl_stmt_seq *stmts) st = (stmt_ty)asdl_seq_GET(stmts, 0); assert(st->kind == Expr_kind); VISIT(c, expr, st->v.Expr.value); - UNSET_LOC(c); - if (!compiler_nameop(c, &_Py_ID(__doc__), Store)) + if (!compiler_nameop(c, NO_LOCATION, &_Py_ID(__doc__), Store)) return 0; } } @@ -2162,34 +2144,31 @@ compiler_body(struct compiler *c, asdl_stmt_seq *stmts) return 1; } -static PyCodeObject * -compiler_mod(struct compiler *c, mod_ty mod) +static int +compiler_codegen(struct compiler *c, mod_ty mod) { - PyCodeObject *co; - int addNone = 1; _Py_DECLARE_STR(anon_module, ""); if (!compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE, mod, 1)) { - return NULL; + return 0; } - c->u->u_loc.lineno = 1; + location loc = LOCATION(1, 1, 0, 0); switch (mod->kind) { case Module_kind: - if (!compiler_body(c, mod->v.Module.body)) { + if (!compiler_body(c, loc, mod->v.Module.body)) { compiler_exit_scope(c); return 0; } break; case Interactive_kind: if (find_ann(mod->v.Interactive.body)) { - ADDOP(c, SETUP_ANNOTATIONS); + ADDOP(c, loc, SETUP_ANNOTATIONS); } c->c_interactive = 1; VISIT_SEQ_IN_SCOPE(c, stmt, mod->v.Interactive.body); break; case Expression_kind: VISIT_IN_SCOPE(c, expr, mod->v.Expression.body); - addNone = 0; break; default: PyErr_Format(PyExc_SystemError, @@ -2197,7 +2176,17 @@ compiler_mod(struct compiler *c, mod_ty mod) mod->kind); return 0; } - co = assemble(c, addNone); + return 1; +} + +static PyCodeObject * +compiler_mod(struct compiler *c, mod_ty mod) +{ + int addNone = mod->kind != Expression_kind; + if (!compiler_codegen(c, mod)) { + return NULL; + } + PyCodeObject *co = assemble(c, addNone); compiler_exit_scope(c); return co; } @@ -2239,8 +2228,8 @@ compiler_lookup_arg(PyObject *dict, PyObject *name) } static int -compiler_make_closure(struct compiler *c, PyCodeObject *co, Py_ssize_t flags, - PyObject *qualname) +compiler_make_closure(struct compiler *c, location loc, + PyCodeObject *co, Py_ssize_t flags, PyObject *qualname) { if (qualname == NULL) qualname = co->co_name; @@ -2286,13 +2275,13 @@ compiler_make_closure(struct compiler *c, PyCodeObject *co, Py_ssize_t flags, Py_DECREF(freevars); return 0; } - ADDOP_I(c, LOAD_CLOSURE, arg); + ADDOP_I(c, loc, LOAD_CLOSURE, arg); } flags |= 0x08; - ADDOP_I(c, BUILD_TUPLE, co->co_nfreevars); + ADDOP_I(c, loc, BUILD_TUPLE, co->co_nfreevars); } - ADDOP_LOAD_CONST(c, (PyObject*)co); - ADDOP_I(c, MAKE_FUNCTION, flags); + ADDOP_LOAD_CONST(c, loc, (PyObject*)co); + ADDOP_I(c, loc, MAKE_FUNCTION, flags); return 1; } @@ -2316,18 +2305,16 @@ compiler_apply_decorators(struct compiler *c, asdl_expr_seq* decos) if (!decos) return 1; - struct location old_loc = c->u->u_loc; for (Py_ssize_t i = asdl_seq_LEN(decos) - 1; i > -1; i--) { - SET_LOC(c, (expr_ty)asdl_seq_GET(decos, i)); - ADDOP_I(c, CALL, 0); + location loc = LOC((expr_ty)asdl_seq_GET(decos, i)); + ADDOP_I(c, loc, CALL, 0); } - c->u->u_loc = old_loc; return 1; } static int -compiler_visit_kwonlydefaults(struct compiler *c, asdl_arg_seq *kwonlyargs, - asdl_expr_seq *kw_defaults) +compiler_visit_kwonlydefaults(struct compiler *c, location loc, + asdl_arg_seq *kwonlyargs, asdl_expr_seq *kw_defaults) { /* Push a dict of keyword-only default values. @@ -2368,8 +2355,8 @@ compiler_visit_kwonlydefaults(struct compiler *c, asdl_arg_seq *kwonlyargs, Py_ssize_t default_count = PyList_GET_SIZE(keys); PyObject *keys_tuple = PyList_AsTuple(keys); Py_DECREF(keys); - ADDOP_LOAD_CONST_NEW(c, keys_tuple); - ADDOP_I(c, BUILD_CONST_KEY_MAP, default_count); + ADDOP_LOAD_CONST_NEW(c, loc, keys_tuple); + ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, default_count); assert(default_count > 0); return 1; } @@ -2385,26 +2372,26 @@ compiler_visit_kwonlydefaults(struct compiler *c, asdl_arg_seq *kwonlyargs, static int compiler_visit_annexpr(struct compiler *c, expr_ty annotation) { - ADDOP_LOAD_CONST_NEW(c, _PyAST_ExprAsUnicode(annotation)); + location loc = LOC(annotation); + ADDOP_LOAD_CONST_NEW(c, loc, _PyAST_ExprAsUnicode(annotation)); return 1; } static int compiler_visit_argannotation(struct compiler *c, identifier id, - expr_ty annotation, Py_ssize_t *annotations_len) + expr_ty annotation, Py_ssize_t *annotations_len, location loc) { if (!annotation) { return 1; } - PyObject *mangled = _Py_Mangle(c->u->u_private, id); if (!mangled) { return 0; } - ADDOP_LOAD_CONST(c, mangled); + ADDOP_LOAD_CONST(c, loc, mangled); Py_DECREF(mangled); - if (c->c_future->ff_features & CO_FUTURE_ANNOTATIONS) { + if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { VISIT(c, annexpr, annotation); } else { @@ -2414,7 +2401,7 @@ compiler_visit_argannotation(struct compiler *c, identifier id, // (Note that in theory we could end up here even for an argument // other than *args, but in practice the grammar doesn't allow it.) VISIT(c, expr, annotation->v.Starred.value); - ADDOP_I(c, UNPACK_SEQUENCE, (Py_ssize_t) 1); + ADDOP_I(c, loc, UNPACK_SEQUENCE, (Py_ssize_t) 1); } else { VISIT(c, expr, annotation); @@ -2426,7 +2413,7 @@ compiler_visit_argannotation(struct compiler *c, identifier id, static int compiler_visit_argannotations(struct compiler *c, asdl_arg_seq* args, - Py_ssize_t *annotations_len) + Py_ssize_t *annotations_len, location loc) { int i; for (i = 0; i < asdl_seq_LEN(args); i++) { @@ -2435,15 +2422,16 @@ compiler_visit_argannotations(struct compiler *c, asdl_arg_seq* args, c, arg->arg, arg->annotation, - annotations_len)) + annotations_len, + loc)) return 0; } return 1; } static int -compiler_visit_annotations(struct compiler *c, arguments_ty args, - expr_ty returns) +compiler_visit_annotations(struct compiler *c, location loc, + arguments_ty args, expr_ty returns) { /* Push arg annotation names and values. The expressions are evaluated out-of-order wrt the source code. @@ -2452,28 +2440,28 @@ compiler_visit_annotations(struct compiler *c, arguments_ty args, */ Py_ssize_t annotations_len = 0; - if (!compiler_visit_argannotations(c, args->args, &annotations_len)) + if (!compiler_visit_argannotations(c, args->args, &annotations_len, loc)) return 0; - if (!compiler_visit_argannotations(c, args->posonlyargs, &annotations_len)) + if (!compiler_visit_argannotations(c, args->posonlyargs, &annotations_len, loc)) return 0; if (args->vararg && args->vararg->annotation && !compiler_visit_argannotation(c, args->vararg->arg, - args->vararg->annotation, &annotations_len)) + args->vararg->annotation, &annotations_len, loc)) return 0; - if (!compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len)) + if (!compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len, loc)) return 0; if (args->kwarg && args->kwarg->annotation && !compiler_visit_argannotation(c, args->kwarg->arg, - args->kwarg->annotation, &annotations_len)) + args->kwarg->annotation, &annotations_len, loc)) return 0; if (!compiler_visit_argannotation(c, &_Py_ID(return), returns, - &annotations_len)) { + &annotations_len, loc)) { return 0; } if (annotations_len) { - ADDOP_I(c, BUILD_TUPLE, annotations_len); + ADDOP_I(c, loc, BUILD_TUPLE, annotations_len); return 1; } @@ -2481,24 +2469,27 @@ compiler_visit_annotations(struct compiler *c, arguments_ty args, } static int -compiler_visit_defaults(struct compiler *c, arguments_ty args) +compiler_visit_defaults(struct compiler *c, arguments_ty args, + location loc) { VISIT_SEQ(c, expr, args->defaults); - ADDOP_I(c, BUILD_TUPLE, asdl_seq_LEN(args->defaults)); + ADDOP_I(c, loc, BUILD_TUPLE, asdl_seq_LEN(args->defaults)); return 1; } static Py_ssize_t -compiler_default_arguments(struct compiler *c, arguments_ty args) +compiler_default_arguments(struct compiler *c, location loc, + arguments_ty args) { Py_ssize_t funcflags = 0; if (args->defaults && asdl_seq_LEN(args->defaults) > 0) { - if (!compiler_visit_defaults(c, args)) + if (!compiler_visit_defaults(c, args, loc)) return -1; funcflags |= 0x01; } if (args->kwonlyargs) { - int res = compiler_visit_kwonlydefaults(c, args->kwonlyargs, + int res = compiler_visit_kwonlydefaults(c, loc, + args->kwonlyargs, args->kw_defaults); if (res == 0) { return -1; @@ -2511,15 +2502,15 @@ compiler_default_arguments(struct compiler *c, arguments_ty args) } static int -forbidden_name(struct compiler *c, identifier name, expr_context_ty ctx) +forbidden_name(struct compiler *c, location loc, identifier name, + expr_context_ty ctx) { - if (ctx == Store && _PyUnicode_EqualToASCIIString(name, "__debug__")) { - compiler_error(c, "cannot assign to __debug__"); + compiler_error(c, loc, "cannot assign to __debug__"); return 1; } if (ctx == Del && _PyUnicode_EqualToASCIIString(name, "__debug__")) { - compiler_error(c, "cannot delete __debug__"); + compiler_error(c, loc, "cannot delete __debug__"); return 1; } return 0; @@ -2529,7 +2520,7 @@ static int compiler_check_debug_one_arg(struct compiler *c, arg_ty arg) { if (arg != NULL) { - if (forbidden_name(c, arg->arg, Store)) + if (forbidden_name(c, LOC(arg), arg->arg, Store)) return 0; } return 1; @@ -2563,6 +2554,42 @@ compiler_check_debug_args(struct compiler *c, arguments_ty args) return 1; } +static inline int +insert_instruction(basicblock *block, int pos, struct instr *instr) { + if (basicblock_next_instr(block) < 0) { + return -1; + } + for (int i = block->b_iused - 1; i > pos; i--) { + block->b_instr[i] = block->b_instr[i-1]; + } + block->b_instr[pos] = *instr; + return 0; +} + +static int +wrap_in_stopiteration_handler(struct compiler *c) +{ + NEW_JUMP_TARGET_LABEL(c, handler); + + /* Insert SETUP_CLEANUP at start */ + struct instr setup = { + .i_opcode = SETUP_CLEANUP, + .i_oparg = handler.id, + .i_loc = NO_LOCATION, + .i_target = NULL, + }; + if (insert_instruction(c->u->u_cfg_builder.g_entryblock, 0, &setup)) { + return 0; + } + + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); + ADDOP(c, NO_LOCATION, RETURN_VALUE); + USE_LABEL(c, handler); + ADDOP(c, NO_LOCATION, STOPITERATION_ERROR); + ADDOP_I(c, NO_LOCATION, RERAISE, 1); + return 1; +} + static int compiler_function(struct compiler *c, stmt_ty s, int is_async) { @@ -2611,12 +2638,12 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) firstlineno = ((expr_ty)asdl_seq_GET(decos, 0))->lineno; } - funcflags = compiler_default_arguments(c, args); + location loc = LOC(s); + funcflags = compiler_default_arguments(c, loc, args); if (funcflags == -1) { return 0; } - - annotations = compiler_visit_annotations(c, args, returns); + annotations = compiler_visit_annotations(c, loc, args, returns); if (annotations == 0) { return 0; } @@ -2643,17 +2670,21 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) for (i = docstring ? 1 : 0; i < asdl_seq_LEN(body); i++) { VISIT_IN_SCOPE(c, stmt, (stmt_ty)asdl_seq_GET(body, i)); } + if (c->u->u_ste->ste_coroutine || c->u->u_ste->ste_generator) { + if (!wrap_in_stopiteration_handler(c)) { + compiler_exit_scope(c); + return 0; + } + } co = assemble(c, 1); - qualname = c->u->u_qualname; - Py_INCREF(qualname); + qualname = Py_NewRef(c->u->u_qualname); compiler_exit_scope(c); if (co == NULL) { Py_XDECREF(qualname); Py_XDECREF(co); return 0; } - - if (!compiler_make_closure(c, co, funcflags, qualname)) { + if (!compiler_make_closure(c, loc, co, funcflags, qualname)) { Py_DECREF(qualname); Py_DECREF(co); return 0; @@ -2663,7 +2694,7 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) if (!compiler_apply_decorators(c, decos)) return 0; - return compiler_nameop(c, name, Store); + return compiler_nameop(c, loc, name, Store); } static int @@ -2691,7 +2722,6 @@ compiler_class(struct compiler *c, stmt_ty s) is the keyword arguments and **kwds argument This borrows from compiler_call. */ - /* 1. compile the class body into a code object */ if (!compiler_enter_scope(c, s->v.ClassDef.name, COMPILER_SCOPE_CLASS, (void *)s, firstlineno)) { @@ -2699,32 +2729,31 @@ compiler_class(struct compiler *c, stmt_ty s) } /* this block represents what we do in the new scope */ { + location loc = LOCATION(firstlineno, firstlineno, 0, 0); /* use the class name for name mangling */ - Py_INCREF(s->v.ClassDef.name); - Py_XSETREF(c->u->u_private, s->v.ClassDef.name); + Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name)); /* load (global) __name__ ... */ - if (!compiler_nameop(c, &_Py_ID(__name__), Load)) { + if (!compiler_nameop(c, loc, &_Py_ID(__name__), Load)) { compiler_exit_scope(c); return 0; } /* ... and store it as __module__ */ - if (!compiler_nameop(c, &_Py_ID(__module__), Store)) { + if (!compiler_nameop(c, loc, &_Py_ID(__module__), Store)) { compiler_exit_scope(c); return 0; } assert(c->u->u_qualname); - ADDOP_LOAD_CONST(c, c->u->u_qualname); - if (!compiler_nameop(c, &_Py_ID(__qualname__), Store)) { + ADDOP_LOAD_CONST(c, loc, c->u->u_qualname); + if (!compiler_nameop(c, loc, &_Py_ID(__qualname__), Store)) { compiler_exit_scope(c); return 0; } /* compile the body proper */ - if (!compiler_body(c, s->v.ClassDef.body)) { + if (!compiler_body(c, loc, s->v.ClassDef.body)) { compiler_exit_scope(c); return 0; } /* The following code is artificial */ - UNSET_LOC(c); /* Return __classcell__ if it is referenced, otherwise return None */ if (c->u->u_ste->ste_needs_class_closure) { /* Store __classcell__ into class namespace & return it */ @@ -2734,10 +2763,9 @@ compiler_class(struct compiler *c, stmt_ty s) return 0; } assert(i == 0); - - ADDOP_I(c, LOAD_CLOSURE, i); - ADDOP_I(c, COPY, 1); - if (!compiler_nameop(c, &_Py_ID(__classcell__), Store)) { + ADDOP_I(c, NO_LOCATION, LOAD_CLOSURE, i); + ADDOP_I(c, NO_LOCATION, COPY, 1); + if (!compiler_nameop(c, NO_LOCATION, &_Py_ID(__classcell__), Store)) { compiler_exit_scope(c); return 0; } @@ -2745,9 +2773,9 @@ compiler_class(struct compiler *c, stmt_ty s) else { /* No methods referenced __class__, so just return None */ assert(PyDict_GET_SIZE(c->u->u_cellvars) == 0); - ADDOP_LOAD_CONST(c, Py_None); + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); } - ADDOP_IN_SCOPE(c, RETURN_VALUE); + ADDOP_IN_SCOPE(c, NO_LOCATION, RETURN_VALUE); /* create the code object */ co = assemble(c, 1); } @@ -2756,29 +2784,32 @@ compiler_class(struct compiler *c, stmt_ty s) if (co == NULL) return 0; + location loc = LOC(s); /* 2. load the 'build_class' function */ - ADDOP(c, PUSH_NULL); - ADDOP(c, LOAD_BUILD_CLASS); + ADDOP(c, loc, PUSH_NULL); + ADDOP(c, loc, LOAD_BUILD_CLASS); /* 3. load a function (or closure) made from the code object */ - if (!compiler_make_closure(c, co, 0, NULL)) { + if (!compiler_make_closure(c, loc, co, 0, NULL)) { Py_DECREF(co); return 0; } Py_DECREF(co); /* 4. load class name */ - ADDOP_LOAD_CONST(c, s->v.ClassDef.name); + ADDOP_LOAD_CONST(c, loc, s->v.ClassDef.name); /* 5. generate the rest of the code for the call */ - if (!compiler_call_helper(c, 2, s->v.ClassDef.bases, s->v.ClassDef.keywords)) + if (!compiler_call_helper(c, loc, 2, + s->v.ClassDef.bases, + s->v.ClassDef.keywords)) return 0; /* 6. apply decorators */ if (!compiler_apply_decorators(c, decos)) return 0; /* 7. store into */ - if (!compiler_nameop(c, s->v.ClassDef.name, Store)) + if (!compiler_nameop(c, loc, s->v.ClassDef.name, Store)) return 0; return 1; } @@ -2816,7 +2847,7 @@ check_compare(struct compiler *c, expr_ty e) const char *msg = (op == Is) ? "\"is\" with a literal. Did you mean \"==\"?" : "\"is not\" with a literal. Did you mean \"!=\"?"; - return compiler_warn(c, msg); + return compiler_warn(c, LOC(e), msg); } } left = right; @@ -2824,7 +2855,8 @@ check_compare(struct compiler *c, expr_ty e) return 1; } -static int compiler_addcompare(struct compiler *c, cmpop_ty op) +static int compiler_addcompare(struct compiler *c, location loc, + cmpop_ty op) { int cmp; switch (op) { @@ -2847,33 +2879,35 @@ static int compiler_addcompare(struct compiler *c, cmpop_ty op) cmp = Py_GE; break; case Is: - ADDOP_I(c, IS_OP, 0); + ADDOP_I(c, loc, IS_OP, 0); return 1; case IsNot: - ADDOP_I(c, IS_OP, 1); + ADDOP_I(c, loc, IS_OP, 1); return 1; case In: - ADDOP_I(c, CONTAINS_OP, 0); + ADDOP_I(c, loc, CONTAINS_OP, 0); return 1; case NotIn: - ADDOP_I(c, CONTAINS_OP, 1); + ADDOP_I(c, loc, CONTAINS_OP, 1); return 1; default: Py_UNREACHABLE(); } - ADDOP_I(c, COMPARE_OP, cmp); + ADDOP_I(c, loc, COMPARE_OP, cmp); return 1; } static int -compiler_jump_if(struct compiler *c, expr_ty e, jump_target_label next, int cond) +compiler_jump_if(struct compiler *c, location loc, + expr_ty e, jump_target_label next, int cond) { switch (e->kind) { case UnaryOp_kind: - if (e->v.UnaryOp.op == Not) - return compiler_jump_if(c, e->v.UnaryOp.operand, next, !cond); + if (e->v.UnaryOp.op == Not) { + return compiler_jump_if(c, loc, e->v.UnaryOp.operand, next, !cond); + } /* fallback to general implementation */ break; case BoolOp_kind: { @@ -2887,11 +2921,13 @@ compiler_jump_if(struct compiler *c, expr_ty e, jump_target_label next, int cond next2 = new_next2; } for (i = 0; i < n; ++i) { - if (!compiler_jump_if(c, (expr_ty)asdl_seq_GET(s, i), next2, cond2)) + if (!compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, i), next2, cond2)) { return 0; + } } - if (!compiler_jump_if(c, (expr_ty)asdl_seq_GET(s, n), next, cond)) + if (!compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, n), next, cond)) { return 0; + } if (!SAME_LABEL(next2, next)) { USE_LABEL(c, next2); } @@ -2900,46 +2936,48 @@ compiler_jump_if(struct compiler *c, expr_ty e, jump_target_label next, int cond case IfExp_kind: { NEW_JUMP_TARGET_LABEL(c, end); NEW_JUMP_TARGET_LABEL(c, next2); - if (!compiler_jump_if(c, e->v.IfExp.test, next2, 0)) + if (!compiler_jump_if(c, loc, e->v.IfExp.test, next2, 0)) { return 0; - if (!compiler_jump_if(c, e->v.IfExp.body, next, cond)) + } + if (!compiler_jump_if(c, loc, e->v.IfExp.body, next, cond)) { return 0; - ADDOP_JUMP_NOLINE(c, JUMP, end); + } + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); USE_LABEL(c, next2); - if (!compiler_jump_if(c, e->v.IfExp.orelse, next, cond)) + if (!compiler_jump_if(c, loc, e->v.IfExp.orelse, next, cond)) { return 0; + } USE_LABEL(c, end); return 1; } case Compare_kind: { - SET_LOC(c, e); - Py_ssize_t i, n = asdl_seq_LEN(e->v.Compare.ops) - 1; + Py_ssize_t n = asdl_seq_LEN(e->v.Compare.ops) - 1; if (n > 0) { if (!check_compare(c, e)) { return 0; } NEW_JUMP_TARGET_LABEL(c, cleanup); VISIT(c, expr, e->v.Compare.left); - for (i = 0; i < n; i++) { + for (Py_ssize_t i = 0; i < n; i++) { VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i)); - ADDOP_I(c, SWAP, 2); - ADDOP_I(c, COPY, 2); - ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, i)); - ADDOP_JUMP(c, POP_JUMP_IF_FALSE, cleanup); + ADDOP_I(c, LOC(e), SWAP, 2); + ADDOP_I(c, LOC(e), COPY, 2); + ADDOP_COMPARE(c, LOC(e), asdl_seq_GET(e->v.Compare.ops, i)); + ADDOP_JUMP(c, LOC(e), POP_JUMP_IF_FALSE, cleanup); } VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n)); - ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, n)); - ADDOP_JUMP(c, cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next); + ADDOP_COMPARE(c, LOC(e), asdl_seq_GET(e->v.Compare.ops, n)); + ADDOP_JUMP(c, LOC(e), cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next); NEW_JUMP_TARGET_LABEL(c, end); - ADDOP_JUMP_NOLINE(c, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); USE_LABEL(c, cleanup); - ADDOP(c, POP_TOP); + ADDOP(c, LOC(e), POP_TOP); if (!cond) { - ADDOP_JUMP_NOLINE(c, JUMP, next); + ADDOP_JUMP(c, NO_LOCATION, JUMP, next); } USE_LABEL(c, end); @@ -2955,7 +2993,7 @@ compiler_jump_if(struct compiler *c, expr_ty e, jump_target_label next, int cond /* general implementation */ VISIT(c, expr, e); - ADDOP_JUMP(c, cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next); + ADDOP_JUMP(c, LOC(e), cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next); return 1; } @@ -2966,10 +3004,11 @@ compiler_ifexp(struct compiler *c, expr_ty e) NEW_JUMP_TARGET_LABEL(c, end); NEW_JUMP_TARGET_LABEL(c, next); - if (!compiler_jump_if(c, e->v.IfExp.test, next, 0)) + if (!compiler_jump_if(c, LOC(e), e->v.IfExp.test, next, 0)) { return 0; + } VISIT(c, expr, e->v.IfExp.body); - ADDOP_JUMP_NOLINE(c, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); USE_LABEL(c, next); VISIT(c, expr, e->v.IfExp.orelse); @@ -2990,7 +3029,8 @@ compiler_lambda(struct compiler *c, expr_ty e) if (!compiler_check_debug_args(c, args)) return 0; - funcflags = compiler_default_arguments(c, args); + location loc = LOC(e); + funcflags = compiler_default_arguments(c, loc, args); if (funcflags == -1) { return 0; } @@ -3013,18 +3053,18 @@ compiler_lambda(struct compiler *c, expr_ty e) co = assemble(c, 0); } else { - ADDOP_IN_SCOPE(c, RETURN_VALUE); + location loc = LOCATION(e->lineno, e->lineno, 0, 0); + ADDOP_IN_SCOPE(c, loc, RETURN_VALUE); co = assemble(c, 1); } - qualname = c->u->u_qualname; - Py_INCREF(qualname); + qualname = Py_NewRef(c->u->u_qualname); compiler_exit_scope(c); if (co == NULL) { Py_DECREF(qualname); return 0; } - if (!compiler_make_closure(c, co, funcflags, qualname)) { + if (!compiler_make_closure(c, loc, co, funcflags, qualname)) { Py_DECREF(qualname); Py_DECREF(co); return 0; @@ -3048,12 +3088,12 @@ compiler_if(struct compiler *c, stmt_ty s) else { next = end; } - if (!compiler_jump_if(c, s->v.If.test, next, 0)) { + if (!compiler_jump_if(c, LOC(s), s->v.If.test, next, 0)) { return 0; } VISIT_SEQ(c, stmt, s->v.If.body); if (asdl_seq_LEN(s->v.If.orelse)) { - ADDOP_JUMP_NOLINE(c, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); USE_LABEL(c, next); VISIT_SEQ(c, stmt, s->v.If.orelse); @@ -3066,28 +3106,29 @@ compiler_if(struct compiler *c, stmt_ty s) static int compiler_for(struct compiler *c, stmt_ty s) { + location loc = LOC(s); NEW_JUMP_TARGET_LABEL(c, start); NEW_JUMP_TARGET_LABEL(c, body); NEW_JUMP_TARGET_LABEL(c, cleanup); NEW_JUMP_TARGET_LABEL(c, end); - if (!compiler_push_fblock(c, FOR_LOOP, start, end, NULL)) { + if (!compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)) { return 0; } VISIT(c, expr, s->v.For.iter); - ADDOP(c, GET_ITER); + ADDOP(c, loc, GET_ITER); USE_LABEL(c, start); - ADDOP_JUMP(c, FOR_ITER, cleanup); + ADDOP_JUMP(c, loc, FOR_ITER, cleanup); USE_LABEL(c, body); VISIT(c, expr, s->v.For.target); VISIT_SEQ(c, stmt, s->v.For.body); /* Mark jump as artificial */ - UNSET_LOC(c); - ADDOP_JUMP(c, JUMP, start); + ADDOP_JUMP(c, NO_LOCATION, JUMP, start); USE_LABEL(c, cleanup); + ADDOP(c, NO_LOCATION, END_FOR); compiler_pop_fblock(c, FOR_LOOP, start); @@ -3101,10 +3142,11 @@ compiler_for(struct compiler *c, stmt_ty s) static int compiler_async_for(struct compiler *c, stmt_ty s) { + location loc = LOC(s); if (IS_TOP_LEVEL_AWAIT(c)){ c->u->u_ste->ste_coroutine = 1; } else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION) { - return compiler_error(c, "'async for' outside async function"); + return compiler_error(c, loc, "'async for' outside async function"); } NEW_JUMP_TARGET_LABEL(c, start); @@ -3112,25 +3154,24 @@ compiler_async_for(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, end); VISIT(c, expr, s->v.AsyncFor.iter); - ADDOP(c, GET_AITER); + ADDOP(c, loc, GET_AITER); USE_LABEL(c, start); - if (!compiler_push_fblock(c, FOR_LOOP, start, end, NULL)) { + if (!compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)) { return 0; } /* SETUP_FINALLY to guard the __anext__ call */ - ADDOP_JUMP(c, SETUP_FINALLY, except); - ADDOP(c, GET_ANEXT); - ADDOP_LOAD_CONST(c, Py_None); - ADD_YIELD_FROM(c, 1); - ADDOP(c, POP_BLOCK); /* for SETUP_FINALLY */ + ADDOP_JUMP(c, loc, SETUP_FINALLY, except); + ADDOP(c, loc, GET_ANEXT); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADD_YIELD_FROM(c, loc, 1); + ADDOP(c, loc, POP_BLOCK); /* for SETUP_FINALLY */ /* Success block for __anext__ */ VISIT(c, expr, s->v.AsyncFor.target); VISIT_SEQ(c, stmt, s->v.AsyncFor.body); /* Mark jump as artificial */ - UNSET_LOC(c); - ADDOP_JUMP(c, JUMP, start); + ADDOP_JUMP(c, NO_LOCATION, JUMP, start); compiler_pop_fblock(c, FOR_LOOP, start); @@ -3139,8 +3180,8 @@ compiler_async_for(struct compiler *c, stmt_ty s) /* Use same line number as the iterator, * as the END_ASYNC_FOR succeeds the `for`, not the body. */ - SET_LOC(c, s->v.AsyncFor.iter); - ADDOP(c, END_ASYNC_FOR); + loc = LOC(s->v.AsyncFor.iter); + ADDOP(c, loc, END_ASYNC_FOR); /* `else` block */ VISIT_SEQ(c, stmt, s->v.For.orelse); @@ -3158,17 +3199,16 @@ compiler_while(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, anchor); USE_LABEL(c, loop); - if (!compiler_push_fblock(c, WHILE_LOOP, loop, end, NULL)) { + if (!compiler_push_fblock(c, LOC(s), WHILE_LOOP, loop, end, NULL)) { return 0; } - if (!compiler_jump_if(c, s->v.While.test, anchor, 0)) { + if (!compiler_jump_if(c, LOC(s), s->v.While.test, anchor, 0)) { return 0; } USE_LABEL(c, body); VISIT_SEQ(c, stmt, s->v.While.body); - SET_LOC(c, s); - if (!compiler_jump_if(c, s->v.While.test, body, 1)) { + if (!compiler_jump_if(c, LOC(s), s->v.While.test, body, 1)) { return 0; } @@ -3186,79 +3226,93 @@ compiler_while(struct compiler *c, stmt_ty s) static int compiler_return(struct compiler *c, stmt_ty s) { + location loc = LOC(s); int preserve_tos = ((s->v.Return.value != NULL) && (s->v.Return.value->kind != Constant_kind)); if (c->u->u_ste->ste_type != FunctionBlock) - return compiler_error(c, "'return' outside function"); + return compiler_error(c, loc, "'return' outside function"); if (s->v.Return.value != NULL && c->u->u_ste->ste_coroutine && c->u->u_ste->ste_generator) { return compiler_error( - c, "'return' with value in async generator"); + c, loc, "'return' with value in async generator"); } + if (preserve_tos) { VISIT(c, expr, s->v.Return.value); } else { /* Emit instruction with line number for return value */ if (s->v.Return.value != NULL) { - SET_LOC(c, s->v.Return.value); - ADDOP(c, NOP); + loc = LOC(s->v.Return.value); + ADDOP(c, loc, NOP); } } if (s->v.Return.value == NULL || s->v.Return.value->lineno != s->lineno) { - SET_LOC(c, s); - ADDOP(c, NOP); + loc = LOC(s); + ADDOP(c, loc, NOP); } - if (!compiler_unwind_fblock_stack(c, preserve_tos, NULL)) + if (!compiler_unwind_fblock_stack(c, &loc, preserve_tos, NULL)) return 0; if (s->v.Return.value == NULL) { - ADDOP_LOAD_CONST(c, Py_None); + ADDOP_LOAD_CONST(c, loc, Py_None); } else if (!preserve_tos) { - ADDOP_LOAD_CONST(c, s->v.Return.value->v.Constant.value); + ADDOP_LOAD_CONST(c, loc, s->v.Return.value->v.Constant.value); } - ADDOP(c, RETURN_VALUE); + ADDOP(c, loc, RETURN_VALUE); return 1; } static int -compiler_break(struct compiler *c) +compiler_break(struct compiler *c, location loc) { struct fblockinfo *loop = NULL; /* Emit instruction with line number */ - ADDOP(c, NOP); - if (!compiler_unwind_fblock_stack(c, 0, &loop)) { + ADDOP(c, loc, NOP); + if (!compiler_unwind_fblock_stack(c, &loc, 0, &loop)) { return 0; } if (loop == NULL) { - return compiler_error(c, "'break' outside loop"); + return compiler_error(c, loc, "'break' outside loop"); } - if (!compiler_unwind_fblock(c, loop, 0)) { + if (!compiler_unwind_fblock(c, &loc, loop, 0)) { return 0; } - ADDOP_JUMP(c, JUMP, loop->fb_exit); + ADDOP_JUMP(c, loc, JUMP, loop->fb_exit); return 1; } static int -compiler_continue(struct compiler *c) +compiler_continue(struct compiler *c, location loc) { struct fblockinfo *loop = NULL; /* Emit instruction with line number */ - ADDOP(c, NOP); - if (!compiler_unwind_fblock_stack(c, 0, &loop)) { + ADDOP(c, loc, NOP); + if (!compiler_unwind_fblock_stack(c, &loc, 0, &loop)) { return 0; } if (loop == NULL) { - return compiler_error(c, "'continue' not properly in loop"); + return compiler_error(c, loc, "'continue' not properly in loop"); } - ADDOP_JUMP(c, JUMP, loop->fb_block); + ADDOP_JUMP(c, loc, JUMP, loop->fb_block); return 1; } +static location +location_of_last_executing_statement(asdl_stmt_seq *stmts) +{ + for (Py_ssize_t i = asdl_seq_LEN(stmts) - 1; i >= 0; i++) { + location loc = LOC((stmt_ty)asdl_seq_GET(stmts, i)); + if (loc.lineno > 0) { + return loc; + } + } + return NO_LOCATION; +} + /* Code generated for "try: finally: " is as follows: SETUP_FINALLY L @@ -3291,16 +3345,18 @@ compiler_continue(struct compiler *c) static int compiler_try_finally(struct compiler *c, stmt_ty s) { + location loc = LOC(s); + NEW_JUMP_TARGET_LABEL(c, body); NEW_JUMP_TARGET_LABEL(c, end); NEW_JUMP_TARGET_LABEL(c, exit); NEW_JUMP_TARGET_LABEL(c, cleanup); /* `try` block */ - ADDOP_JUMP(c, SETUP_FINALLY, end); + ADDOP_JUMP(c, loc, SETUP_FINALLY, end); USE_LABEL(c, body); - if (!compiler_push_fblock(c, FINALLY_TRY, body, end, s->v.Try.finalbody)) + if (!compiler_push_fblock(c, loc, FINALLY_TRY, body, end, s->v.Try.finalbody)) return 0; if (s->v.Try.handlers && asdl_seq_LEN(s->v.Try.handlers)) { if (!compiler_try_except(c, s)) @@ -3309,25 +3365,28 @@ compiler_try_finally(struct compiler *c, stmt_ty s) else { VISIT_SEQ(c, stmt, s->v.Try.body); } - ADDOP_NOLINE(c, POP_BLOCK); + ADDOP(c, NO_LOCATION, POP_BLOCK); compiler_pop_fblock(c, FINALLY_TRY, body); VISIT_SEQ(c, stmt, s->v.Try.finalbody); - ADDOP_JUMP_NOLINE(c, JUMP, exit); + + ADDOP_JUMP(c, NO_LOCATION, JUMP, exit); /* `finally` block */ USE_LABEL(c, end); - UNSET_LOC(c); - ADDOP_JUMP(c, SETUP_CLEANUP, cleanup); - ADDOP(c, PUSH_EXC_INFO); - if (!compiler_push_fblock(c, FINALLY_END, end, NO_LABEL, NULL)) + loc = NO_LOCATION; + ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup); + ADDOP(c, loc, PUSH_EXC_INFO); + if (!compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL)) return 0; VISIT_SEQ(c, stmt, s->v.Try.finalbody); + loc = location_of_last_executing_statement(s->v.Try.finalbody); compiler_pop_fblock(c, FINALLY_END, end); - ADDOP_I(c, RERAISE, 0); + + ADDOP_I(c, loc, RERAISE, 0); USE_LABEL(c, cleanup); - POP_EXCEPT_AND_RERAISE(c); + POP_EXCEPT_AND_RERAISE(c, loc); USE_LABEL(c, exit); return 1; @@ -3336,15 +3395,17 @@ compiler_try_finally(struct compiler *c, stmt_ty s) static int compiler_try_star_finally(struct compiler *c, stmt_ty s) { + location loc = LOC(s); + NEW_JUMP_TARGET_LABEL(c, body); NEW_JUMP_TARGET_LABEL(c, end); NEW_JUMP_TARGET_LABEL(c, exit); NEW_JUMP_TARGET_LABEL(c, cleanup); /* `try` block */ - ADDOP_JUMP(c, SETUP_FINALLY, end); + ADDOP_JUMP(c, loc, SETUP_FINALLY, end); USE_LABEL(c, body); - if (!compiler_push_fblock(c, FINALLY_TRY, body, end, s->v.TryStar.finalbody)) { + if (!compiler_push_fblock(c, loc, FINALLY_TRY, body, end, s->v.TryStar.finalbody)) { return 0; } if (s->v.TryStar.handlers && asdl_seq_LEN(s->v.TryStar.handlers)) { @@ -3355,26 +3416,29 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s) else { VISIT_SEQ(c, stmt, s->v.TryStar.body); } - ADDOP_NOLINE(c, POP_BLOCK); + ADDOP(c, NO_LOCATION, POP_BLOCK); compiler_pop_fblock(c, FINALLY_TRY, body); VISIT_SEQ(c, stmt, s->v.TryStar.finalbody); - ADDOP_JUMP_NOLINE(c, JUMP, exit); + + ADDOP_JUMP(c, NO_LOCATION, JUMP, exit); /* `finally` block */ USE_LABEL(c, end); - UNSET_LOC(c); - ADDOP_JUMP(c, SETUP_CLEANUP, cleanup); - ADDOP(c, PUSH_EXC_INFO); - if (!compiler_push_fblock(c, FINALLY_END, end, NO_LABEL, NULL)) { + loc = NO_LOCATION; + ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup); + ADDOP(c, loc, PUSH_EXC_INFO); + if (!compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL)) { return 0; } VISIT_SEQ(c, stmt, s->v.TryStar.finalbody); + loc = location_of_last_executing_statement(s->v.Try.finalbody); + compiler_pop_fblock(c, FINALLY_END, end); - ADDOP_I(c, RERAISE, 0); + ADDOP_I(c, loc, RERAISE, 0); USE_LABEL(c, cleanup); - POP_EXCEPT_AND_RERAISE(c); + POP_EXCEPT_AND_RERAISE(c, loc); USE_LABEL(c, exit); return 1; @@ -3412,6 +3476,7 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s) static int compiler_try_except(struct compiler *c, stmt_ty s) { + location loc = LOC(s); Py_ssize_t i, n; NEW_JUMP_TARGET_LABEL(c, body); @@ -3419,47 +3484,46 @@ compiler_try_except(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, end); NEW_JUMP_TARGET_LABEL(c, cleanup); - ADDOP_JUMP(c, SETUP_FINALLY, except); + ADDOP_JUMP(c, loc, SETUP_FINALLY, except); USE_LABEL(c, body); - if (!compiler_push_fblock(c, TRY_EXCEPT, body, NO_LABEL, NULL)) + if (!compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL)) return 0; VISIT_SEQ(c, stmt, s->v.Try.body); compiler_pop_fblock(c, TRY_EXCEPT, body); - ADDOP_NOLINE(c, POP_BLOCK); + ADDOP(c, NO_LOCATION, POP_BLOCK); if (s->v.Try.orelse && asdl_seq_LEN(s->v.Try.orelse)) { VISIT_SEQ(c, stmt, s->v.Try.orelse); } - ADDOP_JUMP_NOLINE(c, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); n = asdl_seq_LEN(s->v.Try.handlers); USE_LABEL(c, except); - UNSET_LOC(c); - ADDOP_JUMP(c, SETUP_CLEANUP, cleanup); - ADDOP(c, PUSH_EXC_INFO); + ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup); + ADDOP(c, NO_LOCATION, PUSH_EXC_INFO); /* Runtime will push a block here, so we need to account for that */ - if (!compiler_push_fblock(c, EXCEPTION_HANDLER, NO_LABEL, NO_LABEL, NULL)) + if (!compiler_push_fblock(c, loc, EXCEPTION_HANDLER, NO_LABEL, NO_LABEL, NULL)) return 0; for (i = 0; i < n; i++) { excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET( s->v.Try.handlers, i); - SET_LOC(c, handler); + location loc = LOC(handler); if (!handler->v.ExceptHandler.type && i < n-1) { - return compiler_error(c, "default 'except:' must be last"); + return compiler_error(c, loc, "default 'except:' must be last"); } NEW_JUMP_TARGET_LABEL(c, next_except); except = next_except; if (handler->v.ExceptHandler.type) { VISIT(c, expr, handler->v.ExceptHandler.type); - ADDOP(c, CHECK_EXC_MATCH); - ADDOP_JUMP(c, POP_JUMP_IF_FALSE, except); + ADDOP(c, loc, CHECK_EXC_MATCH); + ADDOP_JUMP(c, loc, POP_JUMP_IF_FALSE, except); } if (handler->v.ExceptHandler.name) { NEW_JUMP_TARGET_LABEL(c, cleanup_end); NEW_JUMP_TARGET_LABEL(c, cleanup_body); - compiler_nameop(c, handler->v.ExceptHandler.name, Store); + compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store); /* try: @@ -3473,62 +3537,59 @@ compiler_try_except(struct compiler *c, stmt_ty s) */ /* second try: */ - ADDOP_JUMP(c, SETUP_CLEANUP, cleanup_end); + ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end); USE_LABEL(c, cleanup_body); - if (!compiler_push_fblock(c, HANDLER_CLEANUP, cleanup_body, NO_LABEL, handler->v.ExceptHandler.name)) + if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, + NO_LABEL, handler->v.ExceptHandler.name)) { return 0; + } /* second # body */ VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body); compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body); /* name = None; del name; # Mark as artificial */ - UNSET_LOC(c); - ADDOP(c, POP_BLOCK); - ADDOP(c, POP_BLOCK); - ADDOP(c, POP_EXCEPT); - ADDOP_LOAD_CONST(c, Py_None); - compiler_nameop(c, handler->v.ExceptHandler.name, Store); - compiler_nameop(c, handler->v.ExceptHandler.name, Del); - ADDOP_JUMP(c, JUMP, end); + ADDOP(c, NO_LOCATION, POP_BLOCK); + ADDOP(c, NO_LOCATION, POP_BLOCK); + ADDOP(c, NO_LOCATION, POP_EXCEPT); + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store); + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); /* except: */ USE_LABEL(c, cleanup_end); - /* name = None; del name; # Mark as artificial */ - UNSET_LOC(c); - - ADDOP_LOAD_CONST(c, Py_None); - compiler_nameop(c, handler->v.ExceptHandler.name, Store); - compiler_nameop(c, handler->v.ExceptHandler.name, Del); + /* name = None; del name; # artificial */ + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store); + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del); - ADDOP_I(c, RERAISE, 1); + ADDOP_I(c, NO_LOCATION, RERAISE, 1); } else { NEW_JUMP_TARGET_LABEL(c, cleanup_body); - ADDOP(c, POP_TOP); /* exc_value */ + ADDOP(c, loc, POP_TOP); /* exc_value */ USE_LABEL(c, cleanup_body); - if (!compiler_push_fblock(c, HANDLER_CLEANUP, cleanup_body, NO_LABEL, NULL)) + if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, NO_LABEL, NULL)) return 0; VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body); compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body); - UNSET_LOC(c); - ADDOP(c, POP_BLOCK); - ADDOP(c, POP_EXCEPT); - ADDOP_JUMP(c, JUMP, end); + ADDOP(c, NO_LOCATION, POP_BLOCK); + ADDOP(c, NO_LOCATION, POP_EXCEPT); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); } USE_LABEL(c, except); } - /* Mark as artificial */ - UNSET_LOC(c); + /* artificial */ compiler_pop_fblock(c, EXCEPTION_HANDLER, NO_LABEL); - ADDOP_I(c, RERAISE, 0); + ADDOP_I(c, NO_LOCATION, RERAISE, 0); USE_LABEL(c, cleanup); - POP_EXCEPT_AND_RERAISE(c); + POP_EXCEPT_AND_RERAISE(c, NO_LOCATION); USE_LABEL(c, end); return 1; @@ -3586,6 +3647,8 @@ compiler_try_except(struct compiler *c, stmt_ty s) static int compiler_try_star_except(struct compiler *c, stmt_ty s) { + location loc = LOC(s); + NEW_JUMP_TARGET_LABEL(c, body); NEW_JUMP_TARGET_LABEL(c, except); NEW_JUMP_TARGET_LABEL(c, orelse); @@ -3593,32 +3656,31 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, cleanup); NEW_JUMP_TARGET_LABEL(c, reraise_star); - ADDOP_JUMP(c, SETUP_FINALLY, except); + ADDOP_JUMP(c, loc, SETUP_FINALLY, except); USE_LABEL(c, body); - if (!compiler_push_fblock(c, TRY_EXCEPT, body, NO_LABEL, NULL)) { + if (!compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL)) { return 0; } VISIT_SEQ(c, stmt, s->v.TryStar.body); compiler_pop_fblock(c, TRY_EXCEPT, body); - ADDOP_NOLINE(c, POP_BLOCK); - ADDOP_JUMP_NOLINE(c, JUMP, orelse); + ADDOP(c, NO_LOCATION, POP_BLOCK); + ADDOP_JUMP(c, NO_LOCATION, JUMP, orelse); Py_ssize_t n = asdl_seq_LEN(s->v.TryStar.handlers); USE_LABEL(c, except); - UNSET_LOC(c); - ADDOP_JUMP(c, SETUP_CLEANUP, cleanup); - ADDOP(c, PUSH_EXC_INFO); + ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup); + ADDOP(c, NO_LOCATION, PUSH_EXC_INFO); /* Runtime will push a block here, so we need to account for that */ - if (!compiler_push_fblock(c, EXCEPTION_GROUP_HANDLER, + if (!compiler_push_fblock(c, loc, EXCEPTION_GROUP_HANDLER, NO_LABEL, NO_LABEL, "except handler")) { return 0; } for (Py_ssize_t i = 0; i < n; i++) { excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET( s->v.TryStar.handlers, i); - SET_LOC(c, handler); + location loc = LOC(handler); NEW_JUMP_TARGET_LABEL(c, next_except); except = next_except; NEW_JUMP_TARGET_LABEL(c, handle_match); @@ -3628,7 +3690,7 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) [exc] COPY 1 [orig, exc] */ - ADDOP_I(c, COPY, 1); + ADDOP_I(c, loc, COPY, 1); /* create empty list for exceptions raised/reraise in the except* blocks */ /* @@ -3636,16 +3698,16 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) [orig, exc, []] SWAP 2 [orig, [], exc] */ - ADDOP_I(c, BUILD_LIST, 0); - ADDOP_I(c, SWAP, 2); + ADDOP_I(c, loc, BUILD_LIST, 0); + ADDOP_I(c, loc, SWAP, 2); } if (handler->v.ExceptHandler.type) { VISIT(c, expr, handler->v.ExceptHandler.type); - ADDOP(c, CHECK_EG_MATCH); - ADDOP_I(c, COPY, 1); - ADDOP_JUMP(c, POP_JUMP_IF_NOT_NONE, handle_match); - ADDOP(c, POP_TOP); // match - ADDOP_JUMP(c, JUMP, except); + ADDOP(c, loc, CHECK_EG_MATCH); + ADDOP_I(c, loc, COPY, 1); + ADDOP_JUMP(c, loc, POP_JUMP_IF_NOT_NONE, handle_match); + ADDOP(c, loc, POP_TOP); // match + ADDOP_JUMP(c, loc, JUMP, except); } USE_LABEL(c, handle_match); @@ -3654,10 +3716,10 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, cleanup_body); if (handler->v.ExceptHandler.name) { - compiler_nameop(c, handler->v.ExceptHandler.name, Store); + compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store); } else { - ADDOP(c, POP_TOP); // match + ADDOP(c, loc, POP_TOP); // match } /* @@ -3671,74 +3733,71 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) del name */ /* second try: */ - ADDOP_JUMP(c, SETUP_CLEANUP, cleanup_end); + ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end); USE_LABEL(c, cleanup_body); - if (!compiler_push_fblock(c, HANDLER_CLEANUP, cleanup_body, NO_LABEL, handler->v.ExceptHandler.name)) + if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, NO_LABEL, handler->v.ExceptHandler.name)) { return 0; + } /* second # body */ VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body); compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body); - /* name = None; del name; # Mark as artificial */ - UNSET_LOC(c); - ADDOP(c, POP_BLOCK); + /* name = None; del name; # artificial */ + ADDOP(c, NO_LOCATION, POP_BLOCK); if (handler->v.ExceptHandler.name) { - ADDOP_LOAD_CONST(c, Py_None); - compiler_nameop(c, handler->v.ExceptHandler.name, Store); - compiler_nameop(c, handler->v.ExceptHandler.name, Del); + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store); + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del); } - ADDOP_JUMP(c, JUMP, except); + ADDOP_JUMP(c, NO_LOCATION, JUMP, except); /* except: */ USE_LABEL(c, cleanup_end); - /* name = None; del name; # Mark as artificial */ - UNSET_LOC(c); - + /* name = None; del name; # artificial */ if (handler->v.ExceptHandler.name) { - ADDOP_LOAD_CONST(c, Py_None); - compiler_nameop(c, handler->v.ExceptHandler.name, Store); - compiler_nameop(c, handler->v.ExceptHandler.name, Del); + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store); + compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del); } /* add exception raised to the res list */ - ADDOP_I(c, LIST_APPEND, 3); // exc - ADDOP(c, POP_TOP); // lasti - ADDOP_JUMP(c, JUMP, except); + ADDOP_I(c, NO_LOCATION, LIST_APPEND, 3); // exc + ADDOP(c, NO_LOCATION, POP_TOP); // lasti + ADDOP_JUMP(c, NO_LOCATION, JUMP, except); USE_LABEL(c, except); if (i == n - 1) { /* Add exc to the list (if not None it's the unhandled part of the EG) */ - ADDOP_I(c, LIST_APPEND, 1); - ADDOP_JUMP(c, JUMP, reraise_star); + ADDOP_I(c, NO_LOCATION, LIST_APPEND, 1); + ADDOP_JUMP(c, NO_LOCATION, JUMP, reraise_star); } } - /* Mark as artificial */ - UNSET_LOC(c); + /* artificial */ compiler_pop_fblock(c, EXCEPTION_GROUP_HANDLER, NO_LABEL); NEW_JUMP_TARGET_LABEL(c, reraise); USE_LABEL(c, reraise_star); - ADDOP(c, PREP_RERAISE_STAR); - ADDOP_I(c, COPY, 1); - ADDOP_JUMP(c, POP_JUMP_IF_NOT_NONE, reraise); + ADDOP(c, NO_LOCATION, PREP_RERAISE_STAR); + ADDOP_I(c, NO_LOCATION, COPY, 1); + ADDOP_JUMP(c, NO_LOCATION, POP_JUMP_IF_NOT_NONE, reraise); /* Nothing to reraise */ - ADDOP(c, POP_TOP); - ADDOP(c, POP_BLOCK); - ADDOP(c, POP_EXCEPT); - ADDOP_JUMP(c, JUMP, end); + ADDOP(c, NO_LOCATION, POP_TOP); + ADDOP(c, NO_LOCATION, POP_BLOCK); + ADDOP(c, NO_LOCATION, POP_EXCEPT); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); USE_LABEL(c, reraise); - ADDOP(c, POP_BLOCK); - ADDOP_I(c, SWAP, 2); - ADDOP(c, POP_EXCEPT); - ADDOP_I(c, RERAISE, 0); + ADDOP(c, NO_LOCATION, POP_BLOCK); + ADDOP_I(c, NO_LOCATION, SWAP, 2); + ADDOP(c, NO_LOCATION, POP_EXCEPT); + ADDOP_I(c, NO_LOCATION, RERAISE, 0); USE_LABEL(c, cleanup); - POP_EXCEPT_AND_RERAISE(c); + POP_EXCEPT_AND_RERAISE(c, NO_LOCATION); USE_LABEL(c, orelse); VISIT_SEQ(c, stmt, s->v.TryStar.orelse); @@ -3767,7 +3826,8 @@ compiler_try_star(struct compiler *c, stmt_ty s) } static int -compiler_import_as(struct compiler *c, identifier name, identifier asname) +compiler_import_as(struct compiler *c, location loc, + identifier name, identifier asname) { /* The IMPORT_NAME opcode was already generated. This function merely needs to bind the result to a name. @@ -3790,25 +3850,26 @@ compiler_import_as(struct compiler *c, identifier name, identifier asname) attr = PyUnicode_Substring(name, pos, (dot != -1) ? dot : len); if (!attr) return 0; - ADDOP_N(c, IMPORT_FROM, attr, names); + ADDOP_N(c, loc, IMPORT_FROM, attr, names); if (dot == -1) { break; } - ADDOP_I(c, SWAP, 2); - ADDOP(c, POP_TOP); + ADDOP_I(c, loc, SWAP, 2); + ADDOP(c, loc, POP_TOP); } - if (!compiler_nameop(c, asname, Store)) { + if (!compiler_nameop(c, loc, asname, Store)) { return 0; } - ADDOP(c, POP_TOP); + ADDOP(c, loc, POP_TOP); return 1; } - return compiler_nameop(c, asname, Store); + return compiler_nameop(c, loc, asname, Store); } static int compiler_import(struct compiler *c, stmt_ty s) { + location loc = LOC(s); /* The Import node stores a module name like a.b.c as a single string. This is convenient for all cases except import a.b.c as d @@ -3823,12 +3884,12 @@ compiler_import(struct compiler *c, stmt_ty s) alias_ty alias = (alias_ty)asdl_seq_GET(s->v.Import.names, i); int r; - ADDOP_LOAD_CONST(c, zero); - ADDOP_LOAD_CONST(c, Py_None); - ADDOP_NAME(c, IMPORT_NAME, alias->name, names); + ADDOP_LOAD_CONST(c, loc, zero); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADDOP_NAME(c, loc, IMPORT_NAME, alias->name, names); if (alias->asname) { - r = compiler_import_as(c, alias->name, alias->asname); + r = compiler_import_as(c, loc, alias->name, alias->asname); if (!r) return r; } @@ -3841,7 +3902,7 @@ compiler_import(struct compiler *c, stmt_ty s) if (tmp == NULL) return 0; } - r = compiler_nameop(c, tmp, Store); + r = compiler_nameop(c, loc, tmp, Store); if (dot != -1) { Py_DECREF(tmp); } @@ -3855,58 +3916,60 @@ compiler_import(struct compiler *c, stmt_ty s) static int compiler_from_import(struct compiler *c, stmt_ty s) { - Py_ssize_t i, n = asdl_seq_LEN(s->v.ImportFrom.names); - PyObject *names; + Py_ssize_t n = asdl_seq_LEN(s->v.ImportFrom.names); - ADDOP_LOAD_CONST_NEW(c, PyLong_FromLong(s->v.ImportFrom.level)); + ADDOP_LOAD_CONST_NEW(c, LOC(s), PyLong_FromLong(s->v.ImportFrom.level)); - names = PyTuple_New(n); - if (!names) + PyObject *names = PyTuple_New(n); + if (!names) { return 0; + } /* build up the names */ - for (i = 0; i < n; i++) { + for (Py_ssize_t i = 0; i < n; i++) { alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i); - Py_INCREF(alias->name); - PyTuple_SET_ITEM(names, i, alias->name); + PyTuple_SET_ITEM(names, i, Py_NewRef(alias->name)); } - if (s->lineno > c->c_future->ff_lineno && s->v.ImportFrom.module && - _PyUnicode_EqualToASCIIString(s->v.ImportFrom.module, "__future__")) { + if (location_is_after(LOC(s), c->c_future.ff_location) && + s->v.ImportFrom.module && + _PyUnicode_EqualToASCIIString(s->v.ImportFrom.module, "__future__")) + { Py_DECREF(names); - return compiler_error(c, "from __future__ imports must occur " + return compiler_error(c, LOC(s), "from __future__ imports must occur " "at the beginning of the file"); } - ADDOP_LOAD_CONST_NEW(c, names); + ADDOP_LOAD_CONST_NEW(c, LOC(s), names); if (s->v.ImportFrom.module) { - ADDOP_NAME(c, IMPORT_NAME, s->v.ImportFrom.module, names); + ADDOP_NAME(c, LOC(s), IMPORT_NAME, s->v.ImportFrom.module, names); } else { _Py_DECLARE_STR(empty, ""); - ADDOP_NAME(c, IMPORT_NAME, &_Py_STR(empty), names); + ADDOP_NAME(c, LOC(s), IMPORT_NAME, &_Py_STR(empty), names); } - for (i = 0; i < n; i++) { + for (Py_ssize_t i = 0; i < n; i++) { alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i); identifier store_name; if (i == 0 && PyUnicode_READ_CHAR(alias->name, 0) == '*') { assert(n == 1); - ADDOP(c, IMPORT_STAR); + ADDOP(c, LOC(s), IMPORT_STAR); return 1; } - ADDOP_NAME(c, IMPORT_FROM, alias->name, names); + ADDOP_NAME(c, LOC(s), IMPORT_FROM, alias->name, names); store_name = alias->name; - if (alias->asname) + if (alias->asname) { store_name = alias->asname; + } - if (!compiler_nameop(c, store_name, Store)) { + if (!compiler_nameop(c, LOC(s), store_name, Store)) { return 0; } } /* remove imported module */ - ADDOP(c, POP_TOP); + ADDOP(c, LOC(s), POP_TOP); return 1; } @@ -3920,57 +3983,53 @@ compiler_assert(struct compiler *c, stmt_ty s) PyTuple_Check(s->v.Assert.test->v.Constant.value) && PyTuple_Size(s->v.Assert.test->v.Constant.value) > 0)) { - if (!compiler_warn(c, "assertion is always true, " - "perhaps remove parentheses?")) + if (!compiler_warn(c, LOC(s), "assertion is always true, " + "perhaps remove parentheses?")) { return 0; } } - if (c->c_optimize) + if (c->c_optimize) { return 1; + } NEW_JUMP_TARGET_LABEL(c, end); - if (!compiler_jump_if(c, s->v.Assert.test, end, 1)) + if (!compiler_jump_if(c, LOC(s), s->v.Assert.test, end, 1)) { return 0; - ADDOP(c, LOAD_ASSERTION_ERROR); + } + ADDOP(c, LOC(s), LOAD_ASSERTION_ERROR); if (s->v.Assert.msg) { VISIT(c, expr, s->v.Assert.msg); - ADDOP_I(c, CALL, 0); + ADDOP_I(c, LOC(s), CALL, 0); } - ADDOP_I(c, RAISE_VARARGS, 1); + ADDOP_I(c, LOC(s), RAISE_VARARGS, 1); USE_LABEL(c, end); return 1; } static int -compiler_visit_stmt_expr(struct compiler *c, expr_ty value) +compiler_stmt_expr(struct compiler *c, location loc, expr_ty value) { if (c->c_interactive && c->c_nestlevel <= 1) { VISIT(c, expr, value); - ADDOP(c, PRINT_EXPR); + ADDOP(c, loc, PRINT_EXPR); return 1; } if (value->kind == Constant_kind) { /* ignore constant statement */ - ADDOP(c, NOP); + ADDOP(c, loc, NOP); return 1; } VISIT(c, expr, value); - /* Mark POP_TOP as artificial */ - UNSET_LOC(c); - ADDOP(c, POP_TOP); + ADDOP(c, NO_LOCATION, POP_TOP); /* artificial */ return 1; } static int compiler_visit_stmt(struct compiler *c, stmt_ty s) { - Py_ssize_t i, n; - - /* Always assign a lineno to the next instruction for a stmt. */ - SET_LOC(c, s); switch (s->kind) { case FunctionDef_kind: @@ -3983,16 +4042,18 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s) VISIT_SEQ(c, expr, s->v.Delete.targets) break; case Assign_kind: - n = asdl_seq_LEN(s->v.Assign.targets); + { + Py_ssize_t n = asdl_seq_LEN(s->v.Assign.targets); VISIT(c, expr, s->v.Assign.value); - for (i = 0; i < n; i++) { + for (Py_ssize_t i = 0; i < n; i++) { if (i < n - 1) { - ADDOP_I(c, COPY, 1); + ADDOP_I(c, LOC(s), COPY, 1); } VISIT(c, expr, (expr_ty)asdl_seq_GET(s->v.Assign.targets, i)); } break; + } case AugAssign_kind: return compiler_augassign(c, s); case AnnAssign_kind: @@ -4006,7 +4067,8 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s) case Match_kind: return compiler_match(c, s); case Raise_kind: - n = 0; + { + Py_ssize_t n = 0; if (s->v.Raise.exc) { VISIT(c, expr, s->v.Raise.exc); n++; @@ -4015,8 +4077,9 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s) n++; } } - ADDOP_I(c, RAISE_VARARGS, (int)n); + ADDOP_I(c, LOC(s), RAISE_VARARGS, (int)n); break; + } case Try_kind: return compiler_try(c, s); case TryStar_kind: @@ -4031,14 +4094,22 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s) case Nonlocal_kind: break; case Expr_kind: - return compiler_visit_stmt_expr(c, s->v.Expr.value); + { + return compiler_stmt_expr(c, LOC(s), s->v.Expr.value); + } case Pass_kind: - ADDOP(c, NOP); + { + ADDOP(c, LOC(s), NOP); break; + } case Break_kind: - return compiler_break(c); + { + return compiler_break(c, LOC(s)); + } case Continue_kind: - return compiler_continue(c); + { + return compiler_continue(c, LOC(s)); + } case With_kind: return compiler_with(c, s, 0); case AsyncFunctionDef_kind: @@ -4072,7 +4143,8 @@ unaryop(unaryop_ty op) } static int -addop_binary(struct compiler *c, operator_ty binop, bool inplace) +addop_binary(struct compiler *c, location loc, operator_ty binop, + bool inplace) { int oparg; switch (binop) { @@ -4120,23 +4192,24 @@ addop_binary(struct compiler *c, operator_ty binop, bool inplace) inplace ? "inplace" : "binary", binop); return 0; } - ADDOP_I(c, BINARY_OP, oparg); + ADDOP_I(c, loc, BINARY_OP, oparg); return 1; } static int -addop_yield(struct compiler *c) { +addop_yield(struct compiler *c, location loc) { if (c->u->u_ste->ste_generator && c->u->u_ste->ste_coroutine) { - ADDOP(c, ASYNC_GEN_WRAP); + ADDOP(c, loc, ASYNC_GEN_WRAP); } - ADDOP_I(c, YIELD_VALUE, 0); - ADDOP_I(c, RESUME, 1); + ADDOP_I(c, loc, YIELD_VALUE, 0); + ADDOP_I(c, loc, RESUME, 1); return 1; } static int -compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx) +compiler_nameop(struct compiler *c, location loc, + identifier name, expr_context_ty ctx) { int op, scope; Py_ssize_t arg; @@ -4149,7 +4222,7 @@ compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx) !_PyUnicode_EqualToASCIIString(name, "True") && !_PyUnicode_EqualToASCIIString(name, "False")); - if (forbidden_name(c, name, ctx)) + if (forbidden_name(c, loc, name, ctx)) return 0; mangled = _Py_Mangle(c->u->u_private, name); @@ -4203,7 +4276,7 @@ compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx) case Store: op = STORE_FAST; break; case Del: op = DELETE_FAST; break; } - ADDOP_N(c, op, mangled, varnames); + ADDOP_N(c, loc, op, mangled, varnames); return 1; case OP_GLOBAL: switch (ctx) { @@ -4230,7 +4303,7 @@ compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx) if (op == LOAD_GLOBAL) { arg <<= 1; } - return cfg_builder_addop_i(CFG_BUILDER(c), op, arg, COMPILER_LOC(c)); + return cfg_builder_addop_i(CFG_BUILDER(c), op, arg, loc); } static int @@ -4240,6 +4313,7 @@ compiler_boolop(struct compiler *c, expr_ty e) Py_ssize_t i, n; asdl_expr_seq *s; + location loc = LOC(e); assert(e->kind == BoolOp_kind); if (e->v.BoolOp.op == And) jumpi = JUMP_IF_FALSE_OR_POP; @@ -4251,7 +4325,7 @@ compiler_boolop(struct compiler *c, expr_ty e) assert(n >= 0); for (i = 0; i < n; ++i) { VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i)); - ADDOP_JUMP(c, jumpi, end); + ADDOP_JUMP(c, loc, jumpi, end); NEW_JUMP_TARGET_LABEL(c, next); USE_LABEL(c, next); @@ -4263,7 +4337,8 @@ compiler_boolop(struct compiler *c, expr_ty e) } static int -starunpack_helper(struct compiler *c, asdl_expr_seq *elts, int pushed, +starunpack_helper(struct compiler *c, location loc, + asdl_expr_seq *elts, int pushed, int build, int add, int extend, int tuple) { Py_ssize_t n = asdl_seq_LEN(elts); @@ -4275,11 +4350,10 @@ starunpack_helper(struct compiler *c, asdl_expr_seq *elts, int pushed, PyObject *val; for (Py_ssize_t i = 0; i < n; i++) { val = ((expr_ty)asdl_seq_GET(elts, i))->v.Constant.value; - Py_INCREF(val); - PyTuple_SET_ITEM(folded, i, val); + PyTuple_SET_ITEM(folded, i, Py_NewRef(val)); } if (tuple && !pushed) { - ADDOP_LOAD_CONST_NEW(c, folded); + ADDOP_LOAD_CONST_NEW(c, loc, folded); } else { if (add == SET_ADD) { Py_SETREF(folded, PyFrozenSet_New(folded)); @@ -4287,11 +4361,11 @@ starunpack_helper(struct compiler *c, asdl_expr_seq *elts, int pushed, return 0; } } - ADDOP_I(c, build, pushed); - ADDOP_LOAD_CONST_NEW(c, folded); - ADDOP_I(c, extend, 1); + ADDOP_I(c, loc, build, pushed); + ADDOP_LOAD_CONST_NEW(c, loc, folded); + ADDOP_I(c, loc, extend, 1); if (tuple) { - ADDOP(c, LIST_TO_TUPLE); + ADDOP(c, loc, LIST_TO_TUPLE); } } return 1; @@ -4312,43 +4386,43 @@ starunpack_helper(struct compiler *c, asdl_expr_seq *elts, int pushed, VISIT(c, expr, elt); } if (tuple) { - ADDOP_I(c, BUILD_TUPLE, n+pushed); + ADDOP_I(c, loc, BUILD_TUPLE, n+pushed); } else { - ADDOP_I(c, build, n+pushed); + ADDOP_I(c, loc, build, n+pushed); } return 1; } int sequence_built = 0; if (big) { - ADDOP_I(c, build, pushed); + ADDOP_I(c, loc, build, pushed); sequence_built = 1; } for (Py_ssize_t i = 0; i < n; i++) { expr_ty elt = asdl_seq_GET(elts, i); if (elt->kind == Starred_kind) { if (sequence_built == 0) { - ADDOP_I(c, build, i+pushed); + ADDOP_I(c, loc, build, i+pushed); sequence_built = 1; } VISIT(c, expr, elt->v.Starred.value); - ADDOP_I(c, extend, 1); + ADDOP_I(c, loc, extend, 1); } else { VISIT(c, expr, elt); if (sequence_built) { - ADDOP_I(c, add, 1); + ADDOP_I(c, loc, add, 1); } } } assert(sequence_built); if (tuple) { - ADDOP(c, LIST_TO_TUPLE); + ADDOP(c, loc, LIST_TO_TUPLE); } return 1; } static int -unpack_helper(struct compiler *c, asdl_expr_seq *elts) +unpack_helper(struct compiler *c, location loc, asdl_expr_seq *elts) { Py_ssize_t n = asdl_seq_LEN(elts); int seen_star = 0; @@ -4357,28 +4431,28 @@ unpack_helper(struct compiler *c, asdl_expr_seq *elts) if (elt->kind == Starred_kind && !seen_star) { if ((i >= (1 << 8)) || (n-i-1 >= (INT_MAX >> 8))) - return compiler_error(c, + return compiler_error(c, loc, "too many expressions in " "star-unpacking assignment"); - ADDOP_I(c, UNPACK_EX, (i + ((n-i-1) << 8))); + ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8))); seen_star = 1; } else if (elt->kind == Starred_kind) { - return compiler_error(c, + return compiler_error(c, loc, "multiple starred expressions in assignment"); } } if (!seen_star) { - ADDOP_I(c, UNPACK_SEQUENCE, n); + ADDOP_I(c, loc, UNPACK_SEQUENCE, n); } return 1; } static int -assignment_helper(struct compiler *c, asdl_expr_seq *elts) +assignment_helper(struct compiler *c, location loc, asdl_expr_seq *elts) { Py_ssize_t n = asdl_seq_LEN(elts); - RETURN_IF_FALSE(unpack_helper(c, elts)); + RETURN_IF_FALSE(unpack_helper(c, loc, elts)); for (Py_ssize_t i = 0; i < n; i++) { expr_ty elt = asdl_seq_GET(elts, i); VISIT(c, expr, elt->kind != Starred_kind ? elt : elt->v.Starred.value); @@ -4389,13 +4463,14 @@ assignment_helper(struct compiler *c, asdl_expr_seq *elts) static int compiler_list(struct compiler *c, expr_ty e) { + location loc = LOC(e); asdl_expr_seq *elts = e->v.List.elts; if (e->v.List.ctx == Store) { - return assignment_helper(c, elts); + return assignment_helper(c, loc, elts); } else if (e->v.List.ctx == Load) { - return starunpack_helper(c, elts, 0, BUILD_LIST, - LIST_APPEND, LIST_EXTEND, 0); + return starunpack_helper(c, loc, elts, 0, + BUILD_LIST, LIST_APPEND, LIST_EXTEND, 0); } else VISIT_SEQ(c, expr, elts); @@ -4405,13 +4480,14 @@ compiler_list(struct compiler *c, expr_ty e) static int compiler_tuple(struct compiler *c, expr_ty e) { + location loc = LOC(e); asdl_expr_seq *elts = e->v.Tuple.elts; if (e->v.Tuple.ctx == Store) { - return assignment_helper(c, elts); + return assignment_helper(c, loc, elts); } else if (e->v.Tuple.ctx == Load) { - return starunpack_helper(c, elts, 0, BUILD_LIST, - LIST_APPEND, LIST_EXTEND, 1); + return starunpack_helper(c, loc, elts, 0, + BUILD_LIST, LIST_APPEND, LIST_EXTEND, 1); } else VISIT_SEQ(c, expr, elts); @@ -4421,8 +4497,9 @@ compiler_tuple(struct compiler *c, expr_ty e) static int compiler_set(struct compiler *c, expr_ty e) { - return starunpack_helper(c, e->v.Set.elts, 0, BUILD_SET, - SET_ADD, SET_UPDATE, 0); + location loc = LOC(e); + return starunpack_helper(c, loc, e->v.Set.elts, 0, + BUILD_SET, SET_ADD, SET_UPDATE, 0); } static int @@ -4443,6 +4520,7 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end Py_ssize_t i, n = end - begin; PyObject *keys, *key; int big = n*2 > STACK_USE_GUIDELINE; + location loc = LOC(e); if (n > 1 && !big && are_all_items_const(e->v.Dict.keys, begin, end)) { for (i = begin; i < end; i++) { VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i)); @@ -4453,25 +4531,24 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end } for (i = begin; i < end; i++) { key = ((expr_ty)asdl_seq_GET(e->v.Dict.keys, i))->v.Constant.value; - Py_INCREF(key); - PyTuple_SET_ITEM(keys, i - begin, key); + PyTuple_SET_ITEM(keys, i - begin, Py_NewRef(key)); } - ADDOP_LOAD_CONST_NEW(c, keys); - ADDOP_I(c, BUILD_CONST_KEY_MAP, n); + ADDOP_LOAD_CONST_NEW(c, loc, keys); + ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n); return 1; } if (big) { - ADDOP_I(c, BUILD_MAP, 0); + ADDOP_I(c, loc, BUILD_MAP, 0); } for (i = begin; i < end; i++) { VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.keys, i)); VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i)); if (big) { - ADDOP_I(c, MAP_ADD, 1); + ADDOP_I(c, loc, MAP_ADD, 1); } } if (!big) { - ADDOP_I(c, BUILD_MAP, n); + ADDOP_I(c, loc, BUILD_MAP, n); } return 1; } @@ -4479,6 +4556,7 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end static int compiler_dict(struct compiler *c, expr_ty e) { + location loc = LOC(e); Py_ssize_t i, n, elements; int have_dict; int is_unpacking = 0; @@ -4493,17 +4571,17 @@ compiler_dict(struct compiler *c, expr_ty e) return 0; } if (have_dict) { - ADDOP_I(c, DICT_UPDATE, 1); + ADDOP_I(c, loc, DICT_UPDATE, 1); } have_dict = 1; elements = 0; } if (have_dict == 0) { - ADDOP_I(c, BUILD_MAP, 0); + ADDOP_I(c, loc, BUILD_MAP, 0); have_dict = 1; } VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i)); - ADDOP_I(c, DICT_UPDATE, 1); + ADDOP_I(c, loc, DICT_UPDATE, 1); } else { if (elements*2 > STACK_USE_GUIDELINE) { @@ -4511,7 +4589,7 @@ compiler_dict(struct compiler *c, expr_ty e) return 0; } if (have_dict) { - ADDOP_I(c, DICT_UPDATE, 1); + ADDOP_I(c, loc, DICT_UPDATE, 1); } have_dict = 1; elements = 0; @@ -4526,12 +4604,12 @@ compiler_dict(struct compiler *c, expr_ty e) return 0; } if (have_dict) { - ADDOP_I(c, DICT_UPDATE, 1); + ADDOP_I(c, loc, DICT_UPDATE, 1); } have_dict = 1; } if (!have_dict) { - ADDOP_I(c, BUILD_MAP, 0); + ADDOP_I(c, loc, BUILD_MAP, 0); } return 1; } @@ -4539,6 +4617,7 @@ compiler_dict(struct compiler *c, expr_ty e) static int compiler_compare(struct compiler *c, expr_ty e) { + location loc = LOC(e); Py_ssize_t i, n; if (!check_compare(c, e)) { @@ -4549,26 +4628,26 @@ compiler_compare(struct compiler *c, expr_ty e) n = asdl_seq_LEN(e->v.Compare.ops) - 1; if (n == 0) { VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, 0)); - ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, 0)); + ADDOP_COMPARE(c, loc, asdl_seq_GET(e->v.Compare.ops, 0)); } else { NEW_JUMP_TARGET_LABEL(c, cleanup); for (i = 0; i < n; i++) { VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i)); - ADDOP_I(c, SWAP, 2); - ADDOP_I(c, COPY, 2); - ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, i)); - ADDOP_JUMP(c, JUMP_IF_FALSE_OR_POP, cleanup); + ADDOP_I(c, loc, SWAP, 2); + ADDOP_I(c, loc, COPY, 2); + ADDOP_COMPARE(c, loc, asdl_seq_GET(e->v.Compare.ops, i)); + ADDOP_JUMP(c, loc, JUMP_IF_FALSE_OR_POP, cleanup); } VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n)); - ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, n)); + ADDOP_COMPARE(c, loc, asdl_seq_GET(e->v.Compare.ops, n)); NEW_JUMP_TARGET_LABEL(c, end); - ADDOP_JUMP_NOLINE(c, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); USE_LABEL(c, cleanup); - ADDOP_I(c, SWAP, 2); - ADDOP(c, POP_TOP); + ADDOP_I(c, loc, SWAP, 2); + ADDOP(c, loc, POP_TOP); USE_LABEL(c, end); } @@ -4618,10 +4697,12 @@ check_caller(struct compiler *c, expr_ty e) case SetComp_kind: case GeneratorExp_kind: case JoinedStr_kind: - case FormattedValue_kind: - return compiler_warn(c, "'%.200s' object is not callable; " - "perhaps you missed a comma?", - infer_type(e)->tp_name); + case FormattedValue_kind: { + location loc = LOC(e); + return compiler_warn(c, loc, "'%.200s' object is not callable; " + "perhaps you missed a comma?", + infer_type(e)->tp_name); + } default: return 1; } @@ -4645,10 +4726,12 @@ check_subscripter(struct compiler *c, expr_ty e) case Set_kind: case SetComp_kind: case GeneratorExp_kind: - case Lambda_kind: - return compiler_warn(c, "'%.200s' object is not subscriptable; " - "perhaps you missed a comma?", - infer_type(e)->tp_name); + case Lambda_kind: { + location loc = LOC(e); + return compiler_warn(c, loc, "'%.200s' object is not subscriptable; " + "perhaps you missed a comma?", + infer_type(e)->tp_name); + } default: return 1; } @@ -4677,12 +4760,14 @@ check_index(struct compiler *c, expr_ty e, expr_ty s) case List_kind: case ListComp_kind: case JoinedStr_kind: - case FormattedValue_kind: - return compiler_warn(c, "%.200s indices must be integers or slices, " - "not %.200s; " - "perhaps you missed a comma?", - infer_type(e)->tp_name, - index_type->tp_name); + case FormattedValue_kind: { + location loc = LOC(e); + return compiler_warn(c, loc, "%.200s indices must be integers " + "or slices, not %.200s; " + "perhaps you missed a comma?", + infer_type(e)->tp_name, + index_type->tp_name); + } default: return 1; } @@ -4707,29 +4792,30 @@ is_import_originated(struct compiler *c, expr_ty e) // If an attribute access spans multiple lines, update the current start // location to point to the attribute name. -static void -update_start_location_to_match_attr(struct compiler *c, expr_ty attr) +static location +update_start_location_to_match_attr(struct compiler *c, location loc, + expr_ty attr) { assert(attr->kind == Attribute_kind); - struct location *loc = &c->u->u_loc; - if (loc->lineno != attr->end_lineno) { - loc->lineno = attr->end_lineno; + if (loc.lineno != attr->end_lineno) { + loc.lineno = attr->end_lineno; int len = (int)PyUnicode_GET_LENGTH(attr->v.Attribute.attr); if (len <= attr->end_col_offset) { - loc->col_offset = attr->end_col_offset - len; + loc.col_offset = attr->end_col_offset - len; } else { // GH-94694: Somebody's compiling weird ASTs. Just drop the columns: - loc->col_offset = -1; - loc->end_col_offset = -1; + loc.col_offset = -1; + loc.end_col_offset = -1; } // Make sure the end position still follows the start position, even for // weird ASTs: - loc->end_lineno = Py_MAX(loc->lineno, loc->end_lineno); - if (loc->lineno == loc->end_lineno) { - loc->end_col_offset = Py_MAX(loc->col_offset, loc->end_col_offset); + loc.end_lineno = Py_MAX(loc.lineno, loc.end_lineno); + if (loc.lineno == loc.end_lineno) { + loc.end_col_offset = Py_MAX(loc.col_offset, loc.end_col_offset); } } + return loc; } // Return 1 if the method call was optimized, -1 if not, and 0 on error. @@ -4773,20 +4859,19 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e) } /* Alright, we can optimize the code. */ VISIT(c, expr, meth->v.Attribute.value); - SET_LOC(c, meth); - update_start_location_to_match_attr(c, meth); - ADDOP_NAME(c, LOAD_METHOD, meth->v.Attribute.attr, names); + location loc = LOC(meth); + loc = update_start_location_to_match_attr(c, loc, meth); + ADDOP_NAME(c, loc, LOAD_METHOD, meth->v.Attribute.attr, names); VISIT_SEQ(c, expr, e->v.Call.args); if (kwdsl) { VISIT_SEQ(c, keyword, kwds); - if (!compiler_call_simple_kw_helper(c, kwds, kwdsl)) { + if (!compiler_call_simple_kw_helper(c, loc, kwds, kwdsl)) { return 0; }; } - SET_LOC(c, e); - update_start_location_to_match_attr(c, meth); - ADDOP_I(c, CALL, argsl + kwdsl); + loc = update_start_location_to_match_attr(c, LOC(e), meth); + ADDOP_I(c, loc, CALL, argsl + kwdsl); return 1; } @@ -4799,14 +4884,14 @@ validate_keywords(struct compiler *c, asdl_keyword_seq *keywords) if (key->arg == NULL) { continue; } - if (forbidden_name(c, key->arg, Store)) { + location loc = LOC(key); + if (forbidden_name(c, loc, key->arg, Store)) { return -1; } for (Py_ssize_t j = i + 1; j < nkeywords; j++) { keyword_ty other = ((keyword_ty)asdl_seq_GET(keywords, j)); if (other->arg && !PyUnicode_Compare(key->arg, other->arg)) { - SET_LOC(c, other); - compiler_error(c, "keyword argument repeated: %U", key->arg); + compiler_error(c, LOC(other), "keyword argument repeated: %U", key->arg); return -1; } } @@ -4827,11 +4912,11 @@ compiler_call(struct compiler *c, expr_ty e) if (!check_caller(c, e->v.Call.func)) { return 0; } - SET_LOC(c, e->v.Call.func); - ADDOP(c, PUSH_NULL); - SET_LOC(c, e); + location loc = LOC(e->v.Call.func); + ADDOP(c, loc, PUSH_NULL); VISIT(c, expr, e->v.Call.func); - return compiler_call_helper(c, 0, + loc = LOC(e); + return compiler_call_helper(c, loc, 0, e->v.Call.args, e->v.Call.keywords); } @@ -4839,23 +4924,23 @@ compiler_call(struct compiler *c, expr_ty e) static int compiler_joined_str(struct compiler *c, expr_ty e) { - + location loc = LOC(e); Py_ssize_t value_count = asdl_seq_LEN(e->v.JoinedStr.values); if (value_count > STACK_USE_GUIDELINE) { _Py_DECLARE_STR(empty, ""); - ADDOP_LOAD_CONST_NEW(c, Py_NewRef(&_Py_STR(empty))); - ADDOP_NAME(c, LOAD_METHOD, &_Py_ID(join), names); - ADDOP_I(c, BUILD_LIST, 0); + ADDOP_LOAD_CONST_NEW(c, loc, Py_NewRef(&_Py_STR(empty))); + ADDOP_NAME(c, loc, LOAD_METHOD, &_Py_ID(join), names); + ADDOP_I(c, loc, BUILD_LIST, 0); for (Py_ssize_t i = 0; i < asdl_seq_LEN(e->v.JoinedStr.values); i++) { VISIT(c, expr, asdl_seq_GET(e->v.JoinedStr.values, i)); - ADDOP_I(c, LIST_APPEND, 1); + ADDOP_I(c, loc, LIST_APPEND, 1); } - ADDOP_I(c, CALL, 1); + ADDOP_I(c, loc, CALL, 1); } else { VISIT_SEQ(c, expr, e->v.JoinedStr.values); if (asdl_seq_LEN(e->v.JoinedStr.values) != 1) { - ADDOP_I(c, BUILD_STRING, asdl_seq_LEN(e->v.JoinedStr.values)); + ADDOP_I(c, loc, BUILD_STRING, asdl_seq_LEN(e->v.JoinedStr.values)); } } return 1; @@ -4902,13 +4987,16 @@ compiler_formatted_value(struct compiler *c, expr_ty e) } /* And push our opcode and oparg */ - ADDOP_I(c, FORMAT_VALUE, oparg); + location loc = LOC(e); + ADDOP_I(c, loc, FORMAT_VALUE, oparg); return 1; } static int -compiler_subkwargs(struct compiler *c, asdl_keyword_seq *keywords, Py_ssize_t begin, Py_ssize_t end) +compiler_subkwargs(struct compiler *c, location loc, + asdl_keyword_seq *keywords, + Py_ssize_t begin, Py_ssize_t end) { Py_ssize_t i, n = end - begin; keyword_ty kw; @@ -4926,26 +5014,25 @@ compiler_subkwargs(struct compiler *c, asdl_keyword_seq *keywords, Py_ssize_t be } for (i = begin; i < end; i++) { key = ((keyword_ty) asdl_seq_GET(keywords, i))->arg; - Py_INCREF(key); - PyTuple_SET_ITEM(keys, i - begin, key); + PyTuple_SET_ITEM(keys, i - begin, Py_NewRef(key)); } - ADDOP_LOAD_CONST_NEW(c, keys); - ADDOP_I(c, BUILD_CONST_KEY_MAP, n); + ADDOP_LOAD_CONST_NEW(c, loc, keys); + ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n); return 1; } if (big) { - ADDOP_I_NOLINE(c, BUILD_MAP, 0); + ADDOP_I(c, NO_LOCATION, BUILD_MAP, 0); } for (i = begin; i < end; i++) { kw = asdl_seq_GET(keywords, i); - ADDOP_LOAD_CONST(c, kw->arg); + ADDOP_LOAD_CONST(c, loc, kw->arg); VISIT(c, expr, kw->value); if (big) { - ADDOP_I_NOLINE(c, MAP_ADD, 1); + ADDOP_I(c, NO_LOCATION, MAP_ADD, 1); } } if (!big) { - ADDOP_I(c, BUILD_MAP, n); + ADDOP_I(c, loc, BUILD_MAP, n); } return 1; } @@ -4955,9 +5042,8 @@ compiler_subkwargs(struct compiler *c, asdl_keyword_seq *keywords, Py_ssize_t be * Returns 1 on success, 0 on error. */ static int -compiler_call_simple_kw_helper(struct compiler *c, - asdl_keyword_seq *keywords, - Py_ssize_t nkwelts) +compiler_call_simple_kw_helper(struct compiler *c, location loc, + asdl_keyword_seq *keywords, Py_ssize_t nkwelts) { PyObject *names; names = PyTuple_New(nkwelts); @@ -4966,22 +5052,21 @@ compiler_call_simple_kw_helper(struct compiler *c, } for (int i = 0; i < nkwelts; i++) { keyword_ty kw = asdl_seq_GET(keywords, i); - Py_INCREF(kw->arg); - PyTuple_SET_ITEM(names, i, kw->arg); + PyTuple_SET_ITEM(names, i, Py_NewRef(kw->arg)); } Py_ssize_t arg = compiler_add_const(c, names); if (arg < 0) { return 0; } Py_DECREF(names); - ADDOP_I(c, KW_NAMES, arg); + ADDOP_I(c, loc, KW_NAMES, arg); return 1; } /* shared code between compiler_call and compiler_class */ static int -compiler_call_helper(struct compiler *c, +compiler_call_helper(struct compiler *c, location loc, int n, /* Args already pushed */ asdl_expr_seq *args, asdl_keyword_seq *keywords) @@ -5019,11 +5104,11 @@ compiler_call_helper(struct compiler *c, } if (nkwelts) { VISIT_SEQ(c, keyword, keywords); - if (!compiler_call_simple_kw_helper(c, keywords, nkwelts)) { + if (!compiler_call_simple_kw_helper(c, loc, keywords, nkwelts)) { return 0; }; } - ADDOP_I(c, CALL, n + nelts + nkwelts); + ADDOP_I(c, loc, CALL, n + nelts + nkwelts); return 1; ex_call: @@ -5032,7 +5117,7 @@ compiler_call_helper(struct compiler *c, if (n ==0 && nelts == 1 && ((expr_ty)asdl_seq_GET(args, 0))->kind == Starred_kind) { VISIT(c, expr, ((expr_ty)asdl_seq_GET(args, 0))->v.Starred.value); } - else if (starunpack_helper(c, args, n, BUILD_LIST, + else if (starunpack_helper(c, loc, args, n, BUILD_LIST, LIST_APPEND, LIST_EXTEND, 1) == 0) { return 0; } @@ -5047,21 +5132,21 @@ compiler_call_helper(struct compiler *c, if (kw->arg == NULL) { /* A keyword argument unpacking. */ if (nseen) { - if (!compiler_subkwargs(c, keywords, i - nseen, i)) { + if (!compiler_subkwargs(c, loc, keywords, i - nseen, i)) { return 0; } if (have_dict) { - ADDOP_I(c, DICT_MERGE, 1); + ADDOP_I(c, loc, DICT_MERGE, 1); } have_dict = 1; nseen = 0; } if (!have_dict) { - ADDOP_I(c, BUILD_MAP, 0); + ADDOP_I(c, loc, BUILD_MAP, 0); have_dict = 1; } VISIT(c, expr, kw->value); - ADDOP_I(c, DICT_MERGE, 1); + ADDOP_I(c, loc, DICT_MERGE, 1); } else { nseen++; @@ -5069,17 +5154,17 @@ compiler_call_helper(struct compiler *c, } if (nseen) { /* Pack up any trailing keyword arguments. */ - if (!compiler_subkwargs(c, keywords, nkwelts - nseen, nkwelts)) { + if (!compiler_subkwargs(c, loc, keywords, nkwelts - nseen, nkwelts)) { return 0; } if (have_dict) { - ADDOP_I(c, DICT_MERGE, 1); + ADDOP_I(c, loc, DICT_MERGE, 1); } have_dict = 1; } assert(have_dict); } - ADDOP_I(c, CALL_FUNCTION_EX, nkwelts > 0); + ADDOP_I(c, loc, CALL_FUNCTION_EX, nkwelts > 0); return 1; } @@ -5099,7 +5184,7 @@ compiler_call_helper(struct compiler *c, static int -compiler_comprehension_generator(struct compiler *c, +compiler_comprehension_generator(struct compiler *c, location loc, asdl_comprehension_seq *generators, int gen_index, int depth, expr_ty elt, expr_ty val, int type) @@ -5108,35 +5193,33 @@ compiler_comprehension_generator(struct compiler *c, gen = (comprehension_ty)asdl_seq_GET(generators, gen_index); if (gen->is_async) { return compiler_async_comprehension_generator( - c, generators, gen_index, depth, elt, val, type); + c, loc, generators, gen_index, depth, elt, val, type); } else { return compiler_sync_comprehension_generator( - c, generators, gen_index, depth, elt, val, type); + c, loc, generators, gen_index, depth, elt, val, type); } } static int -compiler_sync_comprehension_generator(struct compiler *c, - asdl_comprehension_seq *generators, int gen_index, - int depth, +compiler_sync_comprehension_generator(struct compiler *c, location loc, + asdl_comprehension_seq *generators, + int gen_index, int depth, expr_ty elt, expr_ty val, int type) { /* generate code for the iterator, then each of the ifs, and then write to the element */ - comprehension_ty gen; - Py_ssize_t i, n; - NEW_JUMP_TARGET_LABEL(c, start); NEW_JUMP_TARGET_LABEL(c, if_cleanup); NEW_JUMP_TARGET_LABEL(c, anchor); - gen = (comprehension_ty)asdl_seq_GET(generators, gen_index); + comprehension_ty gen = (comprehension_ty)asdl_seq_GET(generators, + gen_index); if (gen_index == 0) { /* Receive outermost iter as an implicit argument */ c->u->u_argcount = 1; - ADDOP_I(c, LOAD_FAST, 0); + ADDOP_I(c, loc, LOAD_FAST, 0); } else { /* Sub-iter - calculate on the fly */ @@ -5163,29 +5246,34 @@ compiler_sync_comprehension_generator(struct compiler *c, } if (IS_LABEL(start)) { VISIT(c, expr, gen->iter); - ADDOP(c, GET_ITER); + ADDOP(c, loc, GET_ITER); } } if (IS_LABEL(start)) { depth++; USE_LABEL(c, start); - ADDOP_JUMP(c, FOR_ITER, anchor); + ADDOP_JUMP(c, loc, FOR_ITER, anchor); } VISIT(c, expr, gen->target); /* XXX this needs to be cleaned up...a lot! */ - n = asdl_seq_LEN(gen->ifs); - for (i = 0; i < n; i++) { + Py_ssize_t n = asdl_seq_LEN(gen->ifs); + for (Py_ssize_t i = 0; i < n; i++) { expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i); - if (!compiler_jump_if(c, e, if_cleanup, 0)) + if (!compiler_jump_if(c, loc, e, if_cleanup, 0)) { return 0; + } } - if (++gen_index < asdl_seq_LEN(generators)) - if (!compiler_comprehension_generator(c, + if (++gen_index < asdl_seq_LEN(generators)) { + if (!compiler_comprehension_generator(c, loc, generators, gen_index, depth, - elt, val, type)) - return 0; + elt, val, type)) { + return 0; + } + } + + location elt_loc = LOC(elt); /* only append after the last for generator */ if (gen_index >= asdl_seq_LEN(generators)) { @@ -5193,23 +5281,27 @@ compiler_sync_comprehension_generator(struct compiler *c, switch (type) { case COMP_GENEXP: VISIT(c, expr, elt); - ADDOP_YIELD(c); - ADDOP(c, POP_TOP); + ADDOP_YIELD(c, elt_loc); + ADDOP(c, elt_loc, POP_TOP); break; case COMP_LISTCOMP: VISIT(c, expr, elt); - ADDOP_I(c, LIST_APPEND, depth + 1); + ADDOP_I(c, elt_loc, LIST_APPEND, depth + 1); break; case COMP_SETCOMP: VISIT(c, expr, elt); - ADDOP_I(c, SET_ADD, depth + 1); + ADDOP_I(c, elt_loc, SET_ADD, depth + 1); break; case COMP_DICTCOMP: /* With '{k: v}', k is evaluated before v, so we do the same. */ VISIT(c, expr, elt); VISIT(c, expr, val); - ADDOP_I(c, MAP_ADD, depth + 1); + elt_loc = LOCATION(elt->lineno, + val->end_lineno, + elt->col_offset, + val->end_col_offset); + ADDOP_I(c, elt_loc, MAP_ADD, depth + 1); break; default: return 0; @@ -5218,90 +5310,98 @@ compiler_sync_comprehension_generator(struct compiler *c, USE_LABEL(c, if_cleanup); if (IS_LABEL(start)) { - ADDOP_JUMP(c, JUMP, start); + ADDOP_JUMP(c, elt_loc, JUMP, start); USE_LABEL(c, anchor); + ADDOP(c, NO_LOCATION, END_FOR); } return 1; } static int -compiler_async_comprehension_generator(struct compiler *c, - asdl_comprehension_seq *generators, int gen_index, - int depth, +compiler_async_comprehension_generator(struct compiler *c, location loc, + asdl_comprehension_seq *generators, + int gen_index, int depth, expr_ty elt, expr_ty val, int type) { - comprehension_ty gen; - Py_ssize_t i, n; NEW_JUMP_TARGET_LABEL(c, start); NEW_JUMP_TARGET_LABEL(c, except); NEW_JUMP_TARGET_LABEL(c, if_cleanup); - gen = (comprehension_ty)asdl_seq_GET(generators, gen_index); + comprehension_ty gen = (comprehension_ty)asdl_seq_GET(generators, + gen_index); if (gen_index == 0) { /* Receive outermost iter as an implicit argument */ c->u->u_argcount = 1; - ADDOP_I(c, LOAD_FAST, 0); + ADDOP_I(c, loc, LOAD_FAST, 0); } else { /* Sub-iter - calculate on the fly */ VISIT(c, expr, gen->iter); - ADDOP(c, GET_AITER); + ADDOP(c, loc, GET_AITER); } USE_LABEL(c, start); /* Runtime will push a block here, so we need to account for that */ - if (!compiler_push_fblock(c, ASYNC_COMPREHENSION_GENERATOR, start, - NO_LABEL, NULL)) { + if (!compiler_push_fblock(c, loc, ASYNC_COMPREHENSION_GENERATOR, + start, NO_LABEL, NULL)) { return 0; } - ADDOP_JUMP(c, SETUP_FINALLY, except); - ADDOP(c, GET_ANEXT); - ADDOP_LOAD_CONST(c, Py_None); - ADD_YIELD_FROM(c, 1); - ADDOP(c, POP_BLOCK); + ADDOP_JUMP(c, loc, SETUP_FINALLY, except); + ADDOP(c, loc, GET_ANEXT); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADD_YIELD_FROM(c, loc, 1); + ADDOP(c, loc, POP_BLOCK); VISIT(c, expr, gen->target); - n = asdl_seq_LEN(gen->ifs); - for (i = 0; i < n; i++) { + Py_ssize_t n = asdl_seq_LEN(gen->ifs); + for (Py_ssize_t i = 0; i < n; i++) { expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i); - if (!compiler_jump_if(c, e, if_cleanup, 0)) + if (!compiler_jump_if(c, loc, e, if_cleanup, 0)) { return 0; + } } depth++; - if (++gen_index < asdl_seq_LEN(generators)) - if (!compiler_comprehension_generator(c, + if (++gen_index < asdl_seq_LEN(generators)) { + if (!compiler_comprehension_generator(c, loc, generators, gen_index, depth, - elt, val, type)) - return 0; + elt, val, type)) { + return 0; + } + } + location elt_loc = LOC(elt); /* only append after the last for generator */ if (gen_index >= asdl_seq_LEN(generators)) { /* comprehension specific code */ switch (type) { case COMP_GENEXP: VISIT(c, expr, elt); - ADDOP_YIELD(c); - ADDOP(c, POP_TOP); + ADDOP_YIELD(c, elt_loc); + ADDOP(c, elt_loc, POP_TOP); break; case COMP_LISTCOMP: VISIT(c, expr, elt); - ADDOP_I(c, LIST_APPEND, depth + 1); + ADDOP_I(c, elt_loc, LIST_APPEND, depth + 1); break; case COMP_SETCOMP: VISIT(c, expr, elt); - ADDOP_I(c, SET_ADD, depth + 1); + ADDOP_I(c, elt_loc, SET_ADD, depth + 1); break; case COMP_DICTCOMP: /* With '{k: v}', k is evaluated before v, so we do the same. */ VISIT(c, expr, elt); VISIT(c, expr, val); - ADDOP_I(c, MAP_ADD, depth + 1); + elt_loc = LOCATION(elt->lineno, + val->end_lineno, + elt->col_offset, + val->end_col_offset); + ADDOP_I(c, elt_loc, MAP_ADD, depth + 1); break; default: return 0; @@ -5309,14 +5409,13 @@ compiler_async_comprehension_generator(struct compiler *c, } USE_LABEL(c, if_cleanup); - ADDOP_JUMP(c, JUMP, start); + ADDOP_JUMP(c, elt_loc, JUMP, start); compiler_pop_fblock(c, ASYNC_COMPREHENSION_GENERATOR, start); USE_LABEL(c, except); - //UNSET_LOC(c); - ADDOP(c, END_ASYNC_FOR); + ADDOP(c, loc, END_ASYNC_FOR); return 1; } @@ -5339,7 +5438,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, { goto error; } - SET_LOC(c, e); + location loc = LOC(e); is_async_generator = c->u->u_ste->ste_coroutine; @@ -5348,8 +5447,8 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, scope_type != COMPILER_SCOPE_COMPREHENSION && !is_top_level_await) { - compiler_error(c, "asynchronous comprehension outside of " - "an asynchronous function"); + compiler_error(c, loc, "asynchronous comprehension outside of " + "an asynchronous function"); goto error_in_scope; } @@ -5371,20 +5470,25 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, goto error_in_scope; } - ADDOP_I(c, op, 0); + ADDOP_I(c, loc, op, 0); } - if (!compiler_comprehension_generator(c, generators, 0, 0, elt, - val, type)) + if (!compiler_comprehension_generator(c, loc, generators, 0, 0, + elt, val, type)) { goto error_in_scope; + } if (type != COMP_GENEXP) { - ADDOP(c, RETURN_VALUE); + ADDOP(c, LOC(e), RETURN_VALUE); + } + if (type == COMP_GENEXP) { + if (!wrap_in_stopiteration_handler(c)) { + goto error_in_scope; + } } co = assemble(c, 1); - qualname = c->u->u_qualname; - Py_INCREF(qualname); + qualname = Py_NewRef(c->u->u_qualname); compiler_exit_scope(c); if (is_top_level_await && is_async_generator){ c->u->u_ste->ste_coroutine = 1; @@ -5392,7 +5496,8 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, if (co == NULL) goto error; - if (!compiler_make_closure(c, co, 0, qualname)) { + loc = LOC(e); + if (!compiler_make_closure(c, loc, co, 0, qualname)) { goto error; } Py_DECREF(qualname); @@ -5400,18 +5505,19 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, VISIT(c, expr, outermost->iter); + loc = LOC(e); if (outermost->is_async) { - ADDOP(c, GET_AITER); + ADDOP(c, loc, GET_AITER); } else { - ADDOP(c, GET_ITER); + ADDOP(c, loc, GET_ITER); } - ADDOP_I(c, CALL, 0); + ADDOP_I(c, loc, CALL, 0); if (is_async_generator && type != COMP_GENEXP) { - ADDOP_I(c, GET_AWAITABLE, 0); - ADDOP_LOAD_CONST(c, Py_None); - ADD_YIELD_FROM(c, 1); + ADDOP_I(c, loc, GET_AWAITABLE, 0); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADD_YIELD_FROM(c, loc, 1); } return 1; @@ -5475,22 +5581,21 @@ compiler_visit_keyword(struct compiler *c, keyword_ty k) static int compiler_with_except_finish(struct compiler *c, jump_target_label cleanup) { - UNSET_LOC(c); NEW_JUMP_TARGET_LABEL(c, suppress); - ADDOP_JUMP(c, POP_JUMP_IF_TRUE, suppress); - ADDOP_I(c, RERAISE, 2); + ADDOP_JUMP(c, NO_LOCATION, POP_JUMP_IF_TRUE, suppress); + ADDOP_I(c, NO_LOCATION, RERAISE, 2); USE_LABEL(c, suppress); - ADDOP(c, POP_TOP); /* exc_value */ - ADDOP(c, POP_BLOCK); - ADDOP(c, POP_EXCEPT); - ADDOP(c, POP_TOP); - ADDOP(c, POP_TOP); + ADDOP(c, NO_LOCATION, POP_TOP); /* exc_value */ + ADDOP(c, NO_LOCATION, POP_BLOCK); + ADDOP(c, NO_LOCATION, POP_EXCEPT); + ADDOP(c, NO_LOCATION, POP_TOP); + ADDOP(c, NO_LOCATION, POP_TOP); NEW_JUMP_TARGET_LABEL(c, exit); - ADDOP_JUMP(c, JUMP, exit); + ADDOP_JUMP(c, NO_LOCATION, JUMP, exit); USE_LABEL(c, cleanup); - POP_EXCEPT_AND_RERAISE(c); + POP_EXCEPT_AND_RERAISE(c, NO_LOCATION); USE_LABEL(c, exit); return 1; @@ -5523,13 +5628,14 @@ compiler_with_except_finish(struct compiler *c, jump_target_label cleanup) { static int compiler_async_with(struct compiler *c, stmt_ty s, int pos) { + location loc = LOC(s); withitem_ty item = asdl_seq_GET(s->v.AsyncWith.items, pos); assert(s->kind == AsyncWith_kind); if (IS_TOP_LEVEL_AWAIT(c)){ c->u->u_ste->ste_coroutine = 1; } else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION){ - return compiler_error(c, "'async with' outside async function"); + return compiler_error(c, loc, "'async with' outside async function"); } NEW_JUMP_TARGET_LABEL(c, block); @@ -5540,16 +5646,16 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos) /* Evaluate EXPR */ VISIT(c, expr, item->context_expr); - ADDOP(c, BEFORE_ASYNC_WITH); - ADDOP_I(c, GET_AWAITABLE, 1); - ADDOP_LOAD_CONST(c, Py_None); - ADD_YIELD_FROM(c, 1); + ADDOP(c, loc, BEFORE_ASYNC_WITH); + ADDOP_I(c, loc, GET_AWAITABLE, 1); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADD_YIELD_FROM(c, loc, 1); - ADDOP_JUMP(c, SETUP_WITH, final); + ADDOP_JUMP(c, loc, SETUP_WITH, final); /* SETUP_WITH pushes a finally block. */ USE_LABEL(c, block); - if (!compiler_push_fblock(c, ASYNC_WITH, block, final, s)) { + if (!compiler_push_fblock(c, loc, ASYNC_WITH, block, final, s)) { return 0; } @@ -5558,43 +5664,45 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos) } else { /* Discard result from context.__aenter__() */ - ADDOP(c, POP_TOP); + ADDOP(c, loc, POP_TOP); } pos++; - if (pos == asdl_seq_LEN(s->v.AsyncWith.items)) + if (pos == asdl_seq_LEN(s->v.AsyncWith.items)) { /* BLOCK code */ VISIT_SEQ(c, stmt, s->v.AsyncWith.body) - else if (!compiler_async_with(c, s, pos)) + } + else if (!compiler_async_with(c, s, pos)) { return 0; + } compiler_pop_fblock(c, ASYNC_WITH, block); - ADDOP(c, POP_BLOCK); + + ADDOP(c, loc, POP_BLOCK); /* End of body; start the cleanup */ /* For successful outcome: * call __exit__(None, None, None) */ - SET_LOC(c, s); - if(!compiler_call_exit_with_nones(c)) + if(!compiler_call_exit_with_nones(c, loc)) return 0; - ADDOP_I(c, GET_AWAITABLE, 2); - ADDOP_LOAD_CONST(c, Py_None); - ADD_YIELD_FROM(c, 1); + ADDOP_I(c, loc, GET_AWAITABLE, 2); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADD_YIELD_FROM(c, loc, 1); - ADDOP(c, POP_TOP); + ADDOP(c, loc, POP_TOP); - ADDOP_JUMP(c, JUMP, exit); + ADDOP_JUMP(c, loc, JUMP, exit); /* For exceptional outcome: */ USE_LABEL(c, final); - ADDOP_JUMP(c, SETUP_CLEANUP, cleanup); - ADDOP(c, PUSH_EXC_INFO); - ADDOP(c, WITH_EXCEPT_START); - ADDOP_I(c, GET_AWAITABLE, 2); - ADDOP_LOAD_CONST(c, Py_None); - ADD_YIELD_FROM(c, 1); + ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup); + ADDOP(c, loc, PUSH_EXC_INFO); + ADDOP(c, loc, WITH_EXCEPT_START); + ADDOP_I(c, loc, GET_AWAITABLE, 2); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADD_YIELD_FROM(c, loc, 1); compiler_with_except_finish(c, cleanup); USE_LABEL(c, exit); @@ -5638,12 +5746,13 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) /* Evaluate EXPR */ VISIT(c, expr, item->context_expr); /* Will push bound __exit__ */ - ADDOP(c, BEFORE_WITH); - ADDOP_JUMP(c, SETUP_WITH, final); + location loc = LOC(s); + ADDOP(c, loc, BEFORE_WITH); + ADDOP_JUMP(c, loc, SETUP_WITH, final); /* SETUP_WITH pushes a finally block. */ USE_LABEL(c, block); - if (!compiler_push_fblock(c, WITH, block, final, s)) { + if (!compiler_push_fblock(c, loc, WITH, block, final, s)) { return 0; } @@ -5652,7 +5761,7 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) } else { /* Discard result from context.__enter__() */ - ADDOP(c, POP_TOP); + ADDOP(c, loc, POP_TOP); } pos++; @@ -5662,10 +5771,7 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) else if (!compiler_with(c, s, pos)) return 0; - - /* Mark all following code as artificial */ - UNSET_LOC(c); - ADDOP(c, POP_BLOCK); + ADDOP(c, NO_LOCATION, POP_BLOCK); compiler_pop_fblock(c, WITH, block); /* End of body; start the cleanup. */ @@ -5673,18 +5779,18 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) /* For successful outcome: * call __exit__(None, None, None) */ - SET_LOC(c, s); - if (!compiler_call_exit_with_nones(c)) + loc = LOC(s); + if (!compiler_call_exit_with_nones(c, loc)) return 0; - ADDOP(c, POP_TOP); - ADDOP_JUMP(c, JUMP, exit); + ADDOP(c, loc, POP_TOP); + ADDOP_JUMP(c, loc, JUMP, exit); /* For exceptional outcome: */ USE_LABEL(c, final); - ADDOP_JUMP(c, SETUP_CLEANUP, cleanup); - ADDOP(c, PUSH_EXC_INFO); - ADDOP(c, WITH_EXCEPT_START); + ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup); + ADDOP(c, loc, PUSH_EXC_INFO); + ADDOP(c, loc, WITH_EXCEPT_START); compiler_with_except_finish(c, cleanup); USE_LABEL(c, exit); @@ -5694,10 +5800,11 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) static int compiler_visit_expr1(struct compiler *c, expr_ty e) { + location loc = LOC(e); switch (e->kind) { case NamedExpr_kind: VISIT(c, expr, e->v.NamedExpr.value); - ADDOP_I(c, COPY, 1); + ADDOP_I(c, loc, COPY, 1); VISIT(c, expr, e->v.NamedExpr.target); break; case BoolOp_kind: @@ -5705,11 +5812,11 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) case BinOp_kind: VISIT(c, expr, e->v.BinOp.left); VISIT(c, expr, e->v.BinOp.right); - ADDOP_BINARY(c, e->v.BinOp.op); + ADDOP_BINARY(c, loc, e->v.BinOp.op); break; case UnaryOp_kind: VISIT(c, expr, e->v.UnaryOp.operand); - ADDOP(c, unaryop(e->v.UnaryOp.op)); + ADDOP(c, loc, unaryop(e->v.UnaryOp.op)); break; case Lambda_kind: return compiler_lambda(c, e); @@ -5729,50 +5836,50 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) return compiler_dictcomp(c, e); case Yield_kind: if (c->u->u_ste->ste_type != FunctionBlock) - return compiler_error(c, "'yield' outside function"); + return compiler_error(c, loc, "'yield' outside function"); if (e->v.Yield.value) { VISIT(c, expr, e->v.Yield.value); } else { - ADDOP_LOAD_CONST(c, Py_None); + ADDOP_LOAD_CONST(c, loc, Py_None); } - ADDOP_YIELD(c); + ADDOP_YIELD(c, loc); break; case YieldFrom_kind: if (c->u->u_ste->ste_type != FunctionBlock) - return compiler_error(c, "'yield' outside function"); + return compiler_error(c, loc, "'yield' outside function"); if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) - return compiler_error(c, "'yield from' inside async function"); + return compiler_error(c, loc, "'yield from' inside async function"); VISIT(c, expr, e->v.YieldFrom.value); - ADDOP(c, GET_YIELD_FROM_ITER); - ADDOP_LOAD_CONST(c, Py_None); - ADD_YIELD_FROM(c, 0); + ADDOP(c, loc, GET_YIELD_FROM_ITER); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADD_YIELD_FROM(c, loc, 0); break; case Await_kind: if (!IS_TOP_LEVEL_AWAIT(c)){ if (c->u->u_ste->ste_type != FunctionBlock){ - return compiler_error(c, "'await' outside function"); + return compiler_error(c, loc, "'await' outside function"); } if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION && c->u->u_scope_type != COMPILER_SCOPE_COMPREHENSION){ - return compiler_error(c, "'await' outside async function"); + return compiler_error(c, loc, "'await' outside async function"); } } VISIT(c, expr, e->v.Await.value); - ADDOP_I(c, GET_AWAITABLE, 0); - ADDOP_LOAD_CONST(c, Py_None); - ADD_YIELD_FROM(c, 1); + ADDOP_I(c, loc, GET_AWAITABLE, 0); + ADDOP_LOAD_CONST(c, loc, Py_None); + ADD_YIELD_FROM(c, loc, 1); break; case Compare_kind: return compiler_compare(c, e); case Call_kind: return compiler_call(c, e); case Constant_kind: - ADDOP_LOAD_CONST(c, e->v.Constant.value); + ADDOP_LOAD_CONST(c, loc, e->v.Constant.value); break; case JoinedStr_kind: return compiler_joined_str(c, e); @@ -5781,21 +5888,20 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) /* The following exprs can be assignment targets. */ case Attribute_kind: VISIT(c, expr, e->v.Attribute.value); - update_start_location_to_match_attr(c, e); + loc = LOC(e); + loc = update_start_location_to_match_attr(c, loc, e); switch (e->v.Attribute.ctx) { case Load: - { - ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names); + ADDOP_NAME(c, loc, LOAD_ATTR, e->v.Attribute.attr, names); break; - } case Store: - if (forbidden_name(c, e->v.Attribute.attr, e->v.Attribute.ctx)) { + if (forbidden_name(c, loc, e->v.Attribute.attr, e->v.Attribute.ctx)) { return 0; } - ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names); + ADDOP_NAME(c, loc, STORE_ATTR, e->v.Attribute.attr, names); break; case Del: - ADDOP_NAME(c, DELETE_ATTR, e->v.Attribute.attr, names); + ADDOP_NAME(c, loc, DELETE_ATTR, e->v.Attribute.attr, names); break; } break; @@ -5806,10 +5912,10 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) case Store: /* In all legitimate cases, the Starred node was already replaced * by compiler_list/compiler_tuple. XXX: is that okay? */ - return compiler_error(c, + return compiler_error(c, loc, "starred assignment target must be in a list or tuple"); default: - return compiler_error(c, + return compiler_error(c, loc, "can't use starred expression here"); } break; @@ -5819,11 +5925,11 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) if (n == 0) { return 0; } - ADDOP_I(c, BUILD_SLICE, n); + ADDOP_I(c, loc, BUILD_SLICE, n); break; } case Name_kind: - return compiler_nameop(c, e->v.Name.id, e->v.Name.ctx); + return compiler_nameop(c, loc, e->v.Name.id, e->v.Name.ctx); /* child nodes of List and Tuple will have expr_context set */ case List_kind: return compiler_list(c, e); @@ -5836,10 +5942,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) static int compiler_visit_expr(struct compiler *c, expr_ty e) { - struct location old_loc = c->u->u_loc; - SET_LOC(c, e); int res = compiler_visit_expr1(c, e); - c->u->u_loc = old_loc; return res; } @@ -5856,15 +5959,14 @@ compiler_augassign(struct compiler *c, stmt_ty s) assert(s->kind == AugAssign_kind); expr_ty e = s->v.AugAssign.target; - struct location old_loc = c->u->u_loc; - SET_LOC(c, e); + location loc = LOC(e); switch (e->kind) { case Attribute_kind: VISIT(c, expr, e->v.Attribute.value); - ADDOP_I(c, COPY, 1); - update_start_location_to_match_attr(c, e); - ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names); + ADDOP_I(c, loc, COPY, 1); + loc = update_start_location_to_match_attr(c, loc, e); + ADDOP_NAME(c, loc, LOAD_ATTR, e->v.Attribute.attr, names); break; case Subscript_kind: VISIT(c, expr, e->v.Subscript.value); @@ -5872,20 +5974,20 @@ compiler_augassign(struct compiler *c, stmt_ty s) if (!compiler_slice(c, e->v.Subscript.slice)) { return 0; } - ADDOP_I(c, COPY, 3); - ADDOP_I(c, COPY, 3); - ADDOP_I(c, COPY, 3); - ADDOP(c, BINARY_SLICE); + ADDOP_I(c, loc, COPY, 3); + ADDOP_I(c, loc, COPY, 3); + ADDOP_I(c, loc, COPY, 3); + ADDOP(c, loc, BINARY_SLICE); } else { VISIT(c, expr, e->v.Subscript.slice); - ADDOP_I(c, COPY, 2); - ADDOP_I(c, COPY, 2); - ADDOP(c, BINARY_SUBSCR); + ADDOP_I(c, loc, COPY, 2); + ADDOP_I(c, loc, COPY, 2); + ADDOP(c, loc, BINARY_SUBSCR); } break; case Name_kind: - if (!compiler_nameop(c, e->v.Name.id, Load)) + if (!compiler_nameop(c, loc, e->v.Name.id, Load)) return 0; break; default: @@ -5895,34 +5997,34 @@ compiler_augassign(struct compiler *c, stmt_ty s) return 0; } - c->u->u_loc = old_loc; + loc = LOC(s); VISIT(c, expr, s->v.AugAssign.value); - ADDOP_INPLACE(c, s->v.AugAssign.op); + ADDOP_INPLACE(c, loc, s->v.AugAssign.op); - SET_LOC(c, e); + loc = LOC(e); switch (e->kind) { case Attribute_kind: - update_start_location_to_match_attr(c, e); - ADDOP_I(c, SWAP, 2); - ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names); + loc = update_start_location_to_match_attr(c, loc, e); + ADDOP_I(c, loc, SWAP, 2); + ADDOP_NAME(c, loc, STORE_ATTR, e->v.Attribute.attr, names); break; case Subscript_kind: if (is_two_element_slice(e->v.Subscript.slice)) { - ADDOP_I(c, SWAP, 4); - ADDOP_I(c, SWAP, 3); - ADDOP_I(c, SWAP, 2); - ADDOP(c, STORE_SLICE); + ADDOP_I(c, loc, SWAP, 4); + ADDOP_I(c, loc, SWAP, 3); + ADDOP_I(c, loc, SWAP, 2); + ADDOP(c, loc, STORE_SLICE); } else { - ADDOP_I(c, SWAP, 3); - ADDOP_I(c, SWAP, 2); - ADDOP(c, STORE_SUBSCR); + ADDOP_I(c, loc, SWAP, 3); + ADDOP_I(c, loc, SWAP, 2); + ADDOP(c, loc, STORE_SUBSCR); } break; case Name_kind: - return compiler_nameop(c, e->v.Name.id, Store); + return compiler_nameop(c, loc, e->v.Name.id, Store); default: Py_UNREACHABLE(); } @@ -5933,7 +6035,7 @@ static int check_ann_expr(struct compiler *c, expr_ty e) { VISIT(c, expr, e); - ADDOP(c, POP_TOP); + ADDOP(c, LOC(e), POP_TOP); return 1; } @@ -5942,7 +6044,7 @@ check_annotation(struct compiler *c, stmt_ty s) { /* Annotations of complex targets does not produce anything under annotations future */ - if (c->c_future->ff_features & CO_FUTURE_ANNOTATIONS) { + if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { return 1; } @@ -5989,6 +6091,7 @@ check_ann_subscr(struct compiler *c, expr_ty e) static int compiler_annassign(struct compiler *c, stmt_ty s) { + location loc = LOC(s); expr_ty targ = s->v.AnnAssign.target; PyObject* mangled; @@ -6001,26 +6104,26 @@ compiler_annassign(struct compiler *c, stmt_ty s) } switch (targ->kind) { case Name_kind: - if (forbidden_name(c, targ->v.Name.id, Store)) + if (forbidden_name(c, loc, targ->v.Name.id, Store)) return 0; /* If we have a simple name in a module or class, store annotation. */ if (s->v.AnnAssign.simple && (c->u->u_scope_type == COMPILER_SCOPE_MODULE || c->u->u_scope_type == COMPILER_SCOPE_CLASS)) { - if (c->c_future->ff_features & CO_FUTURE_ANNOTATIONS) { + if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { VISIT(c, annexpr, s->v.AnnAssign.annotation) } else { VISIT(c, expr, s->v.AnnAssign.annotation); } - ADDOP_NAME(c, LOAD_NAME, &_Py_ID(__annotations__), names); + ADDOP_NAME(c, loc, LOAD_NAME, &_Py_ID(__annotations__), names); mangled = _Py_Mangle(c->u->u_private, targ->v.Name.id); - ADDOP_LOAD_CONST_NEW(c, mangled); - ADDOP(c, STORE_SUBSCR); + ADDOP_LOAD_CONST_NEW(c, loc, mangled); + ADDOP(c, loc, STORE_SUBSCR); } break; case Attribute_kind: - if (forbidden_name(c, targ->v.Attribute.attr, Store)) + if (forbidden_name(c, loc, targ->v.Attribute.attr, Store)) return 0; if (!s->v.AnnAssign.value && !check_ann_expr(c, targ->v.Attribute.value)) { @@ -6052,7 +6155,8 @@ compiler_annassign(struct compiler *c, stmt_ty s) */ static int -compiler_error(struct compiler *c, const char *format, ...) +compiler_error(struct compiler *c, location loc, + const char *format, ...) { va_list vargs; va_start(vargs, format); @@ -6061,22 +6165,20 @@ compiler_error(struct compiler *c, const char *format, ...) if (msg == NULL) { return 0; } - PyObject *loc = PyErr_ProgramTextObject(c->c_filename, c->u->u_loc.lineno); - if (loc == NULL) { - Py_INCREF(Py_None); - loc = Py_None; + PyObject *loc_obj = PyErr_ProgramTextObject(c->c_filename, loc.lineno); + if (loc_obj == NULL) { + loc_obj = Py_NewRef(Py_None); } - struct location u_loc = c->u->u_loc; PyObject *args = Py_BuildValue("O(OiiOii)", msg, c->c_filename, - u_loc.lineno, u_loc.col_offset + 1, loc, - u_loc.end_lineno, u_loc.end_col_offset + 1); + loc.lineno, loc.col_offset + 1, loc_obj, + loc.end_lineno, loc.end_col_offset + 1); Py_DECREF(msg); if (args == NULL) { goto exit; } PyErr_SetObject(PyExc_SyntaxError, args); exit: - Py_DECREF(loc); + Py_DECREF(loc_obj); Py_XDECREF(args); return 0; } @@ -6086,7 +6188,8 @@ compiler_error(struct compiler *c, const char *format, ...) and returns 0. */ static int -compiler_warn(struct compiler *c, const char *format, ...) +compiler_warn(struct compiler *c, location loc, + const char *format, ...) { va_list vargs; va_start(vargs, format); @@ -6096,14 +6199,14 @@ compiler_warn(struct compiler *c, const char *format, ...) return 0; } if (PyErr_WarnExplicitObject(PyExc_SyntaxWarning, msg, c->c_filename, - c->u->u_loc.lineno, NULL, NULL) < 0) + loc.lineno, NULL, NULL) < 0) { if (PyErr_ExceptionMatches(PyExc_SyntaxWarning)) { /* Replace the SyntaxWarning exception with a SyntaxError to get a more accurate error report */ PyErr_Clear(); assert(PyUnicode_AsUTF8(msg) != NULL); - compiler_error(c, PyUnicode_AsUTF8(msg)); + compiler_error(c, loc, PyUnicode_AsUTF8(msg)); } Py_DECREF(msg); return 0; @@ -6115,6 +6218,7 @@ compiler_warn(struct compiler *c, const char *format, ...) static int compiler_subscript(struct compiler *c, expr_ty e) { + location loc = LOC(e); expr_context_ty ctx = e->v.Subscript.ctx; int op = 0; @@ -6133,11 +6237,11 @@ compiler_subscript(struct compiler *c, expr_ty e) return 0; } if (ctx == Load) { - ADDOP(c, BINARY_SLICE); + ADDOP(c, loc, BINARY_SLICE); } else { assert(ctx == Store); - ADDOP(c, STORE_SLICE); + ADDOP(c, loc, STORE_SLICE); } } else { @@ -6148,7 +6252,7 @@ compiler_subscript(struct compiler *c, expr_ty e) case Del: op = DELETE_SUBSCR; break; } assert(op); - ADDOP(c, op); + ADDOP(c, loc, op); } return 1; } @@ -6166,14 +6270,14 @@ compiler_slice(struct compiler *c, expr_ty s) VISIT(c, expr, s->v.Slice.lower); } else { - ADDOP_LOAD_CONST(c, Py_None); + ADDOP_LOAD_CONST(c, LOC(s), Py_None); } if (s->v.Slice.upper) { VISIT(c, expr, s->v.Slice.upper); } else { - ADDOP_LOAD_CONST(c, Py_None); + ADDOP_LOAD_CONST(c, LOC(s), Py_None); } if (s->v.Slice.step) { @@ -6230,19 +6334,21 @@ ensure_fail_pop(struct compiler *c, pattern_context *pc, Py_ssize_t n) // Use op to jump to the correct fail_pop block. static int -jump_to_fail_pop(struct compiler *c, pattern_context *pc, int op) +jump_to_fail_pop(struct compiler *c, location loc, + pattern_context *pc, int op) { // Pop any items on the top of the stack, plus any objects we were going to // capture on success: Py_ssize_t pops = pc->on_top + PyList_GET_SIZE(pc->stores); RETURN_IF_FALSE(ensure_fail_pop(c, pc, pops)); - ADDOP_JUMP(c, op, pc->fail_pop[pops]); + ADDOP_JUMP(c, loc, op, pc->fail_pop[pops]); return 1; } // Build all of the fail_pop blocks and reset fail_pop. static int -emit_and_reset_fail_pop(struct compiler *c, pattern_context *pc) +emit_and_reset_fail_pop(struct compiler *c, location loc, + pattern_context *pc) { if (!pc->fail_pop_size) { assert(pc->fail_pop == NULL); @@ -6250,7 +6356,7 @@ emit_and_reset_fail_pop(struct compiler *c, pattern_context *pc) } while (--pc->fail_pop_size) { USE_LABEL(c, pc->fail_pop[pc->fail_pop_size]); - if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, COMPILER_LOC(c))) { + if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, loc)) { pc->fail_pop_size = 0; PyObject_Free(pc->fail_pop); pc->fail_pop = NULL; @@ -6264,29 +6370,31 @@ emit_and_reset_fail_pop(struct compiler *c, pattern_context *pc) } static int -compiler_error_duplicate_store(struct compiler *c, identifier n) +compiler_error_duplicate_store(struct compiler *c, location loc, identifier n) { - return compiler_error(c, "multiple assignments to name %R in pattern", n); + return compiler_error(c, loc, + "multiple assignments to name %R in pattern", n); } // Duplicate the effect of 3.10's ROT_* instructions using SWAPs. static int -pattern_helper_rotate(struct compiler *c, Py_ssize_t count) +pattern_helper_rotate(struct compiler *c, location loc, Py_ssize_t count) { while (1 < count) { - ADDOP_I(c, SWAP, count--); + ADDOP_I(c, loc, SWAP, count--); } return 1; } static int -pattern_helper_store_name(struct compiler *c, identifier n, pattern_context *pc) +pattern_helper_store_name(struct compiler *c, location loc, + identifier n, pattern_context *pc) { if (n == NULL) { - ADDOP(c, POP_TOP); + ADDOP(c, loc, POP_TOP); return 1; } - if (forbidden_name(c, n, Store)) { + if (forbidden_name(c, loc, n, Store)) { return 0; } // Can't assign to the same name twice: @@ -6295,17 +6403,18 @@ pattern_helper_store_name(struct compiler *c, identifier n, pattern_context *pc) return 0; } if (duplicate) { - return compiler_error_duplicate_store(c, n); + return compiler_error_duplicate_store(c, loc, n); } // Rotate this object underneath any items we need to preserve: Py_ssize_t rotations = pc->on_top + PyList_GET_SIZE(pc->stores) + 1; - RETURN_IF_FALSE(pattern_helper_rotate(c, rotations)); + RETURN_IF_FALSE(pattern_helper_rotate(c, loc, rotations)); return !PyList_Append(pc->stores, n); } static int -pattern_unpack_helper(struct compiler *c, asdl_pattern_seq *elts) +pattern_unpack_helper(struct compiler *c, location loc, + asdl_pattern_seq *elts) { Py_ssize_t n = asdl_seq_LEN(elts); int seen_star = 0; @@ -6314,28 +6423,29 @@ pattern_unpack_helper(struct compiler *c, asdl_pattern_seq *elts) if (elt->kind == MatchStar_kind && !seen_star) { if ((i >= (1 << 8)) || (n-i-1 >= (INT_MAX >> 8))) - return compiler_error(c, + return compiler_error(c, loc, "too many expressions in " "star-unpacking sequence pattern"); - ADDOP_I(c, UNPACK_EX, (i + ((n-i-1) << 8))); + ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8))); seen_star = 1; } else if (elt->kind == MatchStar_kind) { - return compiler_error(c, + return compiler_error(c, loc, "multiple starred expressions in sequence pattern"); } } if (!seen_star) { - ADDOP_I(c, UNPACK_SEQUENCE, n); + ADDOP_I(c, loc, UNPACK_SEQUENCE, n); } return 1; } static int -pattern_helper_sequence_unpack(struct compiler *c, asdl_pattern_seq *patterns, - Py_ssize_t star, pattern_context *pc) +pattern_helper_sequence_unpack(struct compiler *c, location loc, + asdl_pattern_seq *patterns, Py_ssize_t star, + pattern_context *pc) { - RETURN_IF_FALSE(pattern_unpack_helper(c, patterns)); + RETURN_IF_FALSE(pattern_unpack_helper(c, loc, patterns)); Py_ssize_t size = asdl_seq_LEN(patterns); // We've now got a bunch of new subjects on the stack. They need to remain // there after each subpattern match: @@ -6353,8 +6463,9 @@ pattern_helper_sequence_unpack(struct compiler *c, asdl_pattern_seq *patterns, // UNPACK_SEQUENCE / UNPACK_EX. This is more efficient for patterns with a // starred wildcard like [first, *_] / [first, *_, last] / [*_, last] / etc. static int -pattern_helper_sequence_subscr(struct compiler *c, asdl_pattern_seq *patterns, - Py_ssize_t star, pattern_context *pc) +pattern_helper_sequence_subscr(struct compiler *c, location loc, + asdl_pattern_seq *patterns, Py_ssize_t star, + pattern_context *pc) { // We need to keep the subject around for extracting elements: pc->on_top++; @@ -6368,29 +6479,30 @@ pattern_helper_sequence_subscr(struct compiler *c, asdl_pattern_seq *patterns, assert(WILDCARD_STAR_CHECK(pattern)); continue; } - ADDOP_I(c, COPY, 1); + ADDOP_I(c, loc, COPY, 1); if (i < star) { - ADDOP_LOAD_CONST_NEW(c, PyLong_FromSsize_t(i)); + ADDOP_LOAD_CONST_NEW(c, loc, PyLong_FromSsize_t(i)); } else { // The subject may not support negative indexing! Compute a // nonnegative index: - ADDOP(c, GET_LEN); - ADDOP_LOAD_CONST_NEW(c, PyLong_FromSsize_t(size - i)); - ADDOP_BINARY(c, Sub); + ADDOP(c, loc, GET_LEN); + ADDOP_LOAD_CONST_NEW(c, loc, PyLong_FromSsize_t(size - i)); + ADDOP_BINARY(c, loc, Sub); } - ADDOP(c, BINARY_SUBSCR); + ADDOP(c, loc, BINARY_SUBSCR); RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc)); } // Pop the subject, we're done with it: pc->on_top--; - ADDOP(c, POP_TOP); + ADDOP(c, loc, POP_TOP); return 1; } // Like compiler_pattern, but turn off checks for irrefutability. static int -compiler_pattern_subpattern(struct compiler *c, pattern_ty p, pattern_context *pc) +compiler_pattern_subpattern(struct compiler *c, + pattern_ty p, pattern_context *pc) { int allow_irrefutable = pc->allow_irrefutable; pc->allow_irrefutable = 1; @@ -6408,20 +6520,20 @@ compiler_pattern_as(struct compiler *c, pattern_ty p, pattern_context *pc) if (!pc->allow_irrefutable) { if (p->v.MatchAs.name) { const char *e = "name capture %R makes remaining patterns unreachable"; - return compiler_error(c, e, p->v.MatchAs.name); + return compiler_error(c, LOC(p), e, p->v.MatchAs.name); } const char *e = "wildcard makes remaining patterns unreachable"; - return compiler_error(c, e); + return compiler_error(c, LOC(p), e); } - return pattern_helper_store_name(c, p->v.MatchAs.name, pc); + return pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc); } // Need to make a copy for (possibly) storing later: pc->on_top++; - ADDOP_I(c, COPY, 1); + ADDOP_I(c, LOC(p), COPY, 1); RETURN_IF_FALSE(compiler_pattern(c, p->v.MatchAs.pattern, pc)); // Success! Store it: pc->on_top--; - RETURN_IF_FALSE(pattern_helper_store_name(c, p->v.MatchAs.name, pc)); + RETURN_IF_FALSE(pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc)); return 1; } @@ -6429,7 +6541,7 @@ static int compiler_pattern_star(struct compiler *c, pattern_ty p, pattern_context *pc) { assert(p->kind == MatchStar_kind); - RETURN_IF_FALSE(pattern_helper_store_name(c, p->v.MatchStar.name, pc)); + RETURN_IF_FALSE(pattern_helper_store_name(c, LOC(p), p->v.MatchStar.name, pc)); return 1; } @@ -6441,15 +6553,15 @@ validate_kwd_attrs(struct compiler *c, asdl_identifier_seq *attrs, asdl_pattern_ Py_ssize_t nattrs = asdl_seq_LEN(attrs); for (Py_ssize_t i = 0; i < nattrs; i++) { identifier attr = ((identifier)asdl_seq_GET(attrs, i)); - SET_LOC(c, ((pattern_ty) asdl_seq_GET(patterns, i))); - if (forbidden_name(c, attr, Store)) { + location loc = LOC((pattern_ty) asdl_seq_GET(patterns, i)); + if (forbidden_name(c, loc, attr, Store)) { return -1; } for (Py_ssize_t j = i + 1; j < nattrs; j++) { identifier other = ((identifier)asdl_seq_GET(attrs, j)); if (!PyUnicode_Compare(attr, other)) { - SET_LOC(c, ((pattern_ty) asdl_seq_GET(patterns, j))); - compiler_error(c, "attribute name repeated in class pattern: %U", attr); + location loc = LOC((pattern_ty) asdl_seq_GET(patterns, j)); + compiler_error(c, loc, "attribute name repeated in class pattern: %U", attr); return -1; } } @@ -6471,15 +6583,14 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc) // AST validator shouldn't let this happen, but if it does, // just fail, don't crash out of the interpreter const char * e = "kwd_attrs (%d) / kwd_patterns (%d) length mismatch in class pattern"; - return compiler_error(c, e, nattrs, nkwd_patterns); + return compiler_error(c, LOC(p), e, nattrs, nkwd_patterns); } if (INT_MAX < nargs || INT_MAX < nargs + nattrs - 1) { const char *e = "too many sub-patterns in class pattern %R"; - return compiler_error(c, e, p->v.MatchClass.cls); + return compiler_error(c, LOC(p), e, p->v.MatchClass.cls); } if (nattrs) { RETURN_IF_FALSE(!validate_kwd_attrs(c, kwd_attrs, kwd_patterns)); - SET_LOC(c, p); } VISIT(c, expr, p->v.MatchClass.cls); PyObject *attr_names; @@ -6487,18 +6598,17 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc) Py_ssize_t i; for (i = 0; i < nattrs; i++) { PyObject *name = asdl_seq_GET(kwd_attrs, i); - Py_INCREF(name); - PyTuple_SET_ITEM(attr_names, i, name); - } - ADDOP_LOAD_CONST_NEW(c, attr_names); - ADDOP_I(c, MATCH_CLASS, nargs); - ADDOP_I(c, COPY, 1); - ADDOP_LOAD_CONST(c, Py_None); - ADDOP_I(c, IS_OP, 1); + PyTuple_SET_ITEM(attr_names, i, Py_NewRef(name)); + } + ADDOP_LOAD_CONST_NEW(c, LOC(p), attr_names); + ADDOP_I(c, LOC(p), MATCH_CLASS, nargs); + ADDOP_I(c, LOC(p), COPY, 1); + ADDOP_LOAD_CONST(c, LOC(p), Py_None); + ADDOP_I(c, LOC(p), IS_OP, 1); // TOS is now a tuple of (nargs + nattrs) attributes (or None): pc->on_top++; - RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE)); - ADDOP_I(c, UNPACK_SEQUENCE, nargs + nattrs); + RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); + ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, nargs + nattrs); pc->on_top += nargs + nattrs - 1; for (i = 0; i < nargs + nattrs; i++) { pc->on_top--; @@ -6512,7 +6622,7 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc) pattern = asdl_seq_GET(kwd_patterns, i - nargs); } if (WILDCARD_CHECK(pattern)) { - ADDOP(c, POP_TOP); + ADDOP(c, LOC(p), POP_TOP); continue; } RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc)); @@ -6522,7 +6632,8 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc) } static int -compiler_pattern_mapping(struct compiler *c, pattern_ty p, pattern_context *pc) +compiler_pattern_mapping(struct compiler *c, pattern_ty p, + pattern_context *pc) { assert(p->kind == MatchMapping_kind); asdl_expr_seq *keys = p->v.MatchMapping.keys; @@ -6533,29 +6644,29 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p, pattern_context *pc) // AST validator shouldn't let this happen, but if it does, // just fail, don't crash out of the interpreter const char * e = "keys (%d) / patterns (%d) length mismatch in mapping pattern"; - return compiler_error(c, e, size, npatterns); + return compiler_error(c, LOC(p), e, size, npatterns); } // We have a double-star target if "rest" is set PyObject *star_target = p->v.MatchMapping.rest; // We need to keep the subject on top during the mapping and length checks: pc->on_top++; - ADDOP(c, MATCH_MAPPING); - RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), MATCH_MAPPING); + RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); if (!size && !star_target) { // If the pattern is just "{}", we're done! Pop the subject: pc->on_top--; - ADDOP(c, POP_TOP); + ADDOP(c, LOC(p), POP_TOP); return 1; } if (size) { // If the pattern has any keys in it, perform a length check: - ADDOP(c, GET_LEN); - ADDOP_LOAD_CONST_NEW(c, PyLong_FromSsize_t(size)); - ADDOP_COMPARE(c, GtE); - RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), GET_LEN); + ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size)); + ADDOP_COMPARE(c, LOC(p), GtE); + RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); } if (INT_MAX < size - 1) { - return compiler_error(c, "too many sub-patterns in mapping pattern"); + return compiler_error(c, LOC(p), "too many sub-patterns in mapping pattern"); } // Collect all of the keys into a tuple for MATCH_KEYS and // **rest. They can either be dotted names or literals: @@ -6573,8 +6684,8 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p, pattern_context *pc) if (key == NULL) { const char *e = "can't use NULL keys in MatchMapping " "(set 'rest' parameter instead)"; - SET_LOC(c, ((pattern_ty) asdl_seq_GET(patterns, i))); - compiler_error(c, e); + location loc = LOC((pattern_ty) asdl_seq_GET(patterns, i)); + compiler_error(c, loc, e); goto error; } @@ -6585,7 +6696,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p, pattern_context *pc) } if (in_seen) { const char *e = "mapping pattern checks duplicate key (%R)"; - compiler_error(c, e, key->v.Constant.value); + compiler_error(c, LOC(p), e, key->v.Constant.value); goto error; } if (PySet_Add(seen, key->v.Constant.value)) { @@ -6595,7 +6706,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p, pattern_context *pc) else if (key->kind != Attribute_kind) { const char *e = "mapping pattern keys may only match literals and attribute lookups"; - compiler_error(c, e); + compiler_error(c, LOC(p), e); goto error; } if (!compiler_visit_expr(c, key)) { @@ -6606,17 +6717,17 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p, pattern_context *pc) // all keys have been checked; there are no duplicates Py_DECREF(seen); - ADDOP_I(c, BUILD_TUPLE, size); - ADDOP(c, MATCH_KEYS); + ADDOP_I(c, LOC(p), BUILD_TUPLE, size); + ADDOP(c, LOC(p), MATCH_KEYS); // There's now a tuple of keys and a tuple of values on top of the subject: pc->on_top += 2; - ADDOP_I(c, COPY, 1); - ADDOP_LOAD_CONST(c, Py_None); - ADDOP_I(c, IS_OP, 1); - RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE)); + ADDOP_I(c, LOC(p), COPY, 1); + ADDOP_LOAD_CONST(c, LOC(p), Py_None); + ADDOP_I(c, LOC(p), IS_OP, 1); + RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); // So far so good. Use that tuple of values on the stack to match // sub-patterns against: - ADDOP_I(c, UNPACK_SEQUENCE, size); + ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, size); pc->on_top += size - 1; for (Py_ssize_t i = 0; i < size; i++) { pc->on_top--; @@ -6633,20 +6744,20 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p, pattern_context *pc) // rest = dict(TOS1) // for key in TOS: // del rest[key] - ADDOP_I(c, BUILD_MAP, 0); // [subject, keys, empty] - ADDOP_I(c, SWAP, 3); // [empty, keys, subject] - ADDOP_I(c, DICT_UPDATE, 2); // [copy, keys] - ADDOP_I(c, UNPACK_SEQUENCE, size); // [copy, keys...] + ADDOP_I(c, LOC(p), BUILD_MAP, 0); // [subject, keys, empty] + ADDOP_I(c, LOC(p), SWAP, 3); // [empty, keys, subject] + ADDOP_I(c, LOC(p), DICT_UPDATE, 2); // [copy, keys] + ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, size); // [copy, keys...] while (size) { - ADDOP_I(c, COPY, 1 + size--); // [copy, keys..., copy] - ADDOP_I(c, SWAP, 2); // [copy, keys..., copy, key] - ADDOP(c, DELETE_SUBSCR); // [copy, keys...] + ADDOP_I(c, LOC(p), COPY, 1 + size--); // [copy, keys..., copy] + ADDOP_I(c, LOC(p), SWAP, 2); // [copy, keys..., copy, key] + ADDOP(c, LOC(p), DELETE_SUBSCR); // [copy, keys...] } - RETURN_IF_FALSE(pattern_helper_store_name(c, star_target, pc)); + RETURN_IF_FALSE(pattern_helper_store_name(c, LOC(p), star_target, pc)); } else { - ADDOP(c, POP_TOP); // Tuple of keys. - ADDOP(c, POP_TOP); // Subject. + ADDOP(c, LOC(p), POP_TOP); // Tuple of keys. + ADDOP(c, LOC(p), POP_TOP); // Subject. } return 1; @@ -6672,7 +6783,6 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) // NOTE: We can't use returning macros anymore! goto error on error. for (Py_ssize_t i = 0; i < size; i++) { pattern_ty alt = asdl_seq_GET(p->v.MatchOr.patterns, i); - SET_LOC(c, alt); PyObject *pc_stores = PyList_New(0); if (pc_stores == NULL) { goto error; @@ -6683,7 +6793,7 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) pc->fail_pop = NULL; pc->fail_pop_size = 0; pc->on_top = 0; - if (!cfg_builder_addop_i(CFG_BUILDER(c), COPY, 1, COMPILER_LOC(c)) || + if (!cfg_builder_addop_i(CFG_BUILDER(c), COPY, 1, LOC(alt)) || !compiler_pattern(c, alt, pc)) { goto error; } @@ -6694,8 +6804,7 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) // for the others (they can't bind a different set of names, and // might need to be reordered): assert(control == NULL); - control = pc->stores; - Py_INCREF(control); + control = Py_NewRef(pc->stores); } else if (nstores != PyList_GET_SIZE(control)) { goto diff; @@ -6739,7 +6848,7 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) // Do the same thing to the stack, using several // rotations: while (rotations--) { - if (!pattern_helper_rotate(c, icontrol + 1)){ + if (!pattern_helper_rotate(c, LOC(alt), icontrol + 1)){ goto error; } } @@ -6747,8 +6856,8 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) } } assert(control); - if (!cfg_builder_addop_j(CFG_BUILDER(c), JUMP, end, COMPILER_LOC(c)) || - !emit_and_reset_fail_pop(c, pc)) + if (!cfg_builder_addop_j(CFG_BUILDER(c), LOC(alt), JUMP, end) || + !emit_and_reset_fail_pop(c, LOC(alt), pc)) { goto error; } @@ -6759,7 +6868,8 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) // Need to NULL this for the PyObject_Free call in the error block. old_pc.fail_pop = NULL; // No match. Pop the remaining copy of the subject and fail: - if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, COMPILER_LOC(c)) || !jump_to_fail_pop(c, pc, JUMP)) { + if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, LOC(p)) || + !jump_to_fail_pop(c, LOC(p), pc, JUMP)) { goto error; } @@ -6774,7 +6884,7 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) Py_ssize_t nrots = nstores + 1 + pc->on_top + PyList_GET_SIZE(pc->stores); for (Py_ssize_t i = 0; i < nstores; i++) { // Rotate this capture to its proper place on the stack: - if (!pattern_helper_rotate(c, nrots)) { + if (!pattern_helper_rotate(c, LOC(p), nrots)) { goto error; } // Update the list of previous stores with this new name, checking for @@ -6785,7 +6895,7 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) goto error; } if (dupe) { - compiler_error_duplicate_store(c, name); + compiler_error_duplicate_store(c, LOC(p), name); goto error; } if (PyList_Append(pc->stores, name)) { @@ -6796,10 +6906,10 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) Py_DECREF(control); // NOTE: Returning macros are safe again. // Pop the copy of the subject: - ADDOP(c, POP_TOP); + ADDOP(c, LOC(p), POP_TOP); return 1; diff: - compiler_error(c, "alternative patterns bind different names"); + compiler_error(c, LOC(p), "alternative patterns bind different names"); error: PyObject_Free(old_pc.fail_pop); Py_DECREF(old_pc.stores); @@ -6809,7 +6919,8 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc) static int -compiler_pattern_sequence(struct compiler *c, pattern_ty p, pattern_context *pc) +compiler_pattern_sequence(struct compiler *c, pattern_ty p, + pattern_context *pc) { assert(p->kind == MatchSequence_kind); asdl_pattern_seq *patterns = p->v.MatchSequence.patterns; @@ -6823,7 +6934,7 @@ compiler_pattern_sequence(struct compiler *c, pattern_ty p, pattern_context *pc) if (pattern->kind == MatchStar_kind) { if (star >= 0) { const char *e = "multiple starred names in sequence pattern"; - return compiler_error(c, e); + return compiler_error(c, LOC(p), e); } star_wildcard = WILDCARD_STAR_CHECK(pattern); only_wildcard &= star_wildcard; @@ -6834,33 +6945,33 @@ compiler_pattern_sequence(struct compiler *c, pattern_ty p, pattern_context *pc) } // We need to keep the subject on top during the sequence and length checks: pc->on_top++; - ADDOP(c, MATCH_SEQUENCE); - RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), MATCH_SEQUENCE); + RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); if (star < 0) { // No star: len(subject) == size - ADDOP(c, GET_LEN); - ADDOP_LOAD_CONST_NEW(c, PyLong_FromSsize_t(size)); - ADDOP_COMPARE(c, Eq); - RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), GET_LEN); + ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size)); + ADDOP_COMPARE(c, LOC(p), Eq); + RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); } else if (size > 1) { // Star: len(subject) >= size - 1 - ADDOP(c, GET_LEN); - ADDOP_LOAD_CONST_NEW(c, PyLong_FromSsize_t(size - 1)); - ADDOP_COMPARE(c, GtE); - RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE)); + ADDOP(c, LOC(p), GET_LEN); + ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size - 1)); + ADDOP_COMPARE(c, LOC(p), GtE); + RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); } // Whatever comes next should consume the subject: pc->on_top--; if (only_wildcard) { // Patterns like: [] / [_] / [_, _] / [*_] / [_, *_] / [_, _, *_] / etc. - ADDOP(c, POP_TOP); + ADDOP(c, LOC(p), POP_TOP); } else if (star_wildcard) { - RETURN_IF_FALSE(pattern_helper_sequence_subscr(c, patterns, star, pc)); + RETURN_IF_FALSE(pattern_helper_sequence_subscr(c, LOC(p), patterns, star, pc)); } else { - RETURN_IF_FALSE(pattern_helper_sequence_unpack(c, patterns, star, pc)); + RETURN_IF_FALSE(pattern_helper_sequence_unpack(c, LOC(p), patterns, star, pc)); } return 1; } @@ -6872,11 +6983,11 @@ compiler_pattern_value(struct compiler *c, pattern_ty p, pattern_context *pc) expr_ty value = p->v.MatchValue.value; if (!MATCH_VALUE_EXPR(value)) { const char *e = "patterns may only match literals and attribute lookups"; - return compiler_error(c, e); + return compiler_error(c, LOC(p), e); } VISIT(c, expr, value); - ADDOP_COMPARE(c, Eq); - RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE)); + ADDOP_COMPARE(c, LOC(p), Eq); + RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); return 1; } @@ -6884,16 +6995,15 @@ static int compiler_pattern_singleton(struct compiler *c, pattern_ty p, pattern_context *pc) { assert(p->kind == MatchSingleton_kind); - ADDOP_LOAD_CONST(c, p->v.MatchSingleton.value); - ADDOP_COMPARE(c, Is); - RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE)); + ADDOP_LOAD_CONST(c, LOC(p), p->v.MatchSingleton.value); + ADDOP_COMPARE(c, LOC(p), Is); + RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE)); return 1; } static int compiler_pattern(struct compiler *c, pattern_ty p, pattern_context *pc) { - SET_LOC(c, p); switch (p->kind) { case MatchValue_kind: return compiler_pattern_value(c, p, pc); @@ -6915,7 +7025,7 @@ compiler_pattern(struct compiler *c, pattern_ty p, pattern_context *pc) // AST validator shouldn't let this happen, but if it does, // just fail, don't crash out of the interpreter const char *e = "invalid match pattern node in AST (kind=%d)"; - return compiler_error(c, e, p->kind); + return compiler_error(c, LOC(p), e, p->kind); } static int @@ -6929,10 +7039,9 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc) int has_default = WILDCARD_CHECK(m->pattern) && 1 < cases; for (Py_ssize_t i = 0; i < cases - has_default; i++) { m = asdl_seq_GET(s->v.Match.cases, i); - SET_LOC(c, m->pattern); // Only copy the subject if we're *not* on the last case: if (i != cases - has_default - 1) { - ADDOP_I(c, COPY, 1); + ADDOP_I(c, LOC(m->pattern), COPY, 1); } RETURN_IF_FALSE(pc->stores = PyList_New(0)); // Irrefutable cases must be either guarded, last, or both: @@ -6950,7 +7059,7 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc) Py_ssize_t nstores = PyList_GET_SIZE(pc->stores); for (Py_ssize_t n = 0; n < nstores; n++) { PyObject *name = PyList_GET_ITEM(pc->stores, n); - if (!compiler_nameop(c, name, Store)) { + if (!compiler_nameop(c, LOC(m->pattern), name, Store)) { Py_DECREF(pc->stores); return 0; } @@ -6959,35 +7068,33 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc) // NOTE: Returning macros are safe again. if (m->guard) { RETURN_IF_FALSE(ensure_fail_pop(c, pc, 0)); - RETURN_IF_FALSE(compiler_jump_if(c, m->guard, pc->fail_pop[0], 0)); + RETURN_IF_FALSE(compiler_jump_if(c, LOC(m->pattern), m->guard, pc->fail_pop[0], 0)); } // Success! Pop the subject off, we're done with it: if (i != cases - has_default - 1) { - ADDOP(c, POP_TOP); + ADDOP(c, LOC(m->pattern), POP_TOP); } VISIT_SEQ(c, stmt, m->body); - ADDOP_JUMP(c, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); // If the pattern fails to match, we want the line number of the // cleanup to be associated with the failed pattern, not the last line // of the body - SET_LOC(c, m->pattern); - RETURN_IF_FALSE(emit_and_reset_fail_pop(c, pc)); + RETURN_IF_FALSE(emit_and_reset_fail_pop(c, LOC(m->pattern), pc)); } if (has_default) { // A trailing "case _" is common, and lets us save a bit of redundant // pushing and popping in the loop above: m = asdl_seq_GET(s->v.Match.cases, cases - 1); - SET_LOC(c, m->pattern); if (cases == 1) { // No matches. Done with the subject: - ADDOP(c, POP_TOP); + ADDOP(c, LOC(m->pattern), POP_TOP); } else { // Show line coverage for default case (it doesn't create bytecode) - ADDOP(c, NOP); + ADDOP(c, LOC(m->pattern), NOP); } if (m->guard) { - RETURN_IF_FALSE(compiler_jump_if(c, m->guard, end, 0)); + RETURN_IF_FALSE(compiler_jump_if(c, LOC(m->pattern), m->guard, end, 0)); } VISIT_SEQ(c, stmt, m->body); } @@ -7910,103 +8017,163 @@ assemble_jump_offsets(basicblock *entryblock) } -// Ensure each basicblock is only put onto the stack once. -#define MAYBE_PUSH(B) do { \ - if ((B)->b_visited == 0) { \ - *(*stack_top)++ = (B); \ - (B)->b_visited = 1; \ - } \ - } while (0) +// helper functions for add_checks_for_loads_of_unknown_variables +static inline void +maybe_push(basicblock *b, uint64_t unsafe_mask, basicblock ***sp) +{ + // Push b if the unsafe mask is giving us any new information. + // To avoid overflowing the stack, only allow each block once. + // Use b->b_visited=1 to mean that b is currently on the stack. + uint64_t both = b->b_unsafe_locals_mask | unsafe_mask; + if (b->b_unsafe_locals_mask != both) { + b->b_unsafe_locals_mask = both; + // More work left to do. + if (!b->b_visited) { + // not on the stack, so push it. + *(*sp)++ = b; + b->b_visited = 1; + } + } +} static void -scan_block_for_local(int target, basicblock *b, bool unsafe_to_start, - basicblock ***stack_top) +scan_block_for_locals(basicblock *b, basicblock ***sp) { - bool unsafe = unsafe_to_start; + // bit i is set if local i is potentially uninitialized + uint64_t unsafe_mask = b->b_unsafe_locals_mask; for (int i = 0; i < b->b_iused; i++) { struct instr *instr = &b->b_instr[i]; assert(instr->i_opcode != EXTENDED_ARG); - assert(instr->i_opcode != EXTENDED_ARG_QUICK); - assert(instr->i_opcode != LOAD_FAST__LOAD_FAST); - assert(instr->i_opcode != STORE_FAST__LOAD_FAST); - assert(instr->i_opcode != LOAD_CONST__LOAD_FAST); - assert(instr->i_opcode != STORE_FAST__STORE_FAST); - assert(instr->i_opcode != LOAD_FAST__LOAD_CONST); - if (unsafe && instr->i_except != NULL) { - MAYBE_PUSH(instr->i_except); - } - if (instr->i_oparg != target) { + assert(!IS_SUPERINSTRUCTION_OPCODE(instr->i_opcode)); + if (instr->i_except != NULL) { + maybe_push(instr->i_except, unsafe_mask, sp); + } + if (instr->i_oparg >= 64) { continue; } + assert(instr->i_oparg >= 0); + uint64_t bit = (uint64_t)1 << instr->i_oparg; switch (instr->i_opcode) { + case DELETE_FAST: + unsafe_mask |= bit; + break; + case STORE_FAST: + unsafe_mask &= ~bit; + break; case LOAD_FAST_CHECK: - // if this doesn't raise, then var is defined - unsafe = false; + // If this doesn't raise, then the local is defined. + unsafe_mask &= ~bit; break; case LOAD_FAST: - if (unsafe) { + if (unsafe_mask & bit) { instr->i_opcode = LOAD_FAST_CHECK; } - unsafe = false; - break; - case STORE_FAST: - unsafe = false; - break; - case DELETE_FAST: - unsafe = true; + unsafe_mask &= ~bit; break; } } - if (unsafe) { - // unsafe at end of this block, - // so unsafe at start of next blocks - if (b->b_next && BB_HAS_FALLTHROUGH(b)) { - MAYBE_PUSH(b->b_next); - } - struct instr *last = basicblock_last_instr(b); - if (last != NULL) { - if (is_jump(last)) { - assert(last->i_target != NULL); - MAYBE_PUSH(last->i_target); + if (b->b_next && BB_HAS_FALLTHROUGH(b)) { + maybe_push(b->b_next, unsafe_mask, sp); + } + struct instr *last = basicblock_last_instr(b); + if (last && is_jump(last)) { + assert(last->i_target != NULL); + maybe_push(last->i_target, unsafe_mask, sp); + } +} + +static int +fast_scan_many_locals(basicblock *entryblock, int nlocals) +{ + assert(nlocals > 64); + Py_ssize_t *states = PyMem_Calloc(nlocals - 64, sizeof(Py_ssize_t)); + if (states == NULL) { + PyErr_NoMemory(); + return -1; + } + Py_ssize_t blocknum = 0; + // state[i - 64] == blocknum if local i is guaranteed to + // be initialized, i.e., if it has had a previous LOAD_FAST or + // STORE_FAST within that basicblock (not followed by DELETE_FAST). + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + blocknum++; + for (int i = 0; i < b->b_iused; i++) { + struct instr *instr = &b->b_instr[i]; + assert(instr->i_opcode != EXTENDED_ARG); + assert(!IS_SUPERINSTRUCTION_OPCODE(instr->i_opcode)); + int arg = instr->i_oparg; + if (arg < 64) { + continue; + } + assert(arg >= 0); + switch (instr->i_opcode) { + case DELETE_FAST: + states[arg - 64] = blocknum - 1; + break; + case STORE_FAST: + states[arg - 64] = blocknum; + break; + case LOAD_FAST: + if (states[arg - 64] != blocknum) { + instr->i_opcode = LOAD_FAST_CHECK; + } + states[arg - 64] = blocknum; + break; + case LOAD_FAST_CHECK: + Py_UNREACHABLE(); } } } + PyMem_Free(states); + return 0; } -#undef MAYBE_PUSH static int add_checks_for_loads_of_uninitialized_variables(basicblock *entryblock, struct compiler *c) { + int nlocals = (int)PyDict_GET_SIZE(c->u->u_varnames); + if (nlocals == 0) { + return 0; + } + if (nlocals > 64) { + // To avoid O(nlocals**2) compilation, locals beyond the first + // 64 are only analyzed one basicblock at a time: initialization + // info is not passed between basicblocks. + if (fast_scan_many_locals(entryblock, nlocals) < 0) { + return -1; + } + nlocals = 64; + } basicblock **stack = make_cfg_traversal_stack(entryblock); if (stack == NULL) { return -1; } - Py_ssize_t nparams = PyList_GET_SIZE(c->u->u_ste->ste_varnames); - int nlocals = (int)PyDict_GET_SIZE(c->u->u_varnames); - for (int target = 0; target < nlocals; target++) { - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - b->b_visited = 0; - } - basicblock **stack_top = stack; + basicblock **sp = stack; - // First pass: find the relevant DFS starting points: - // the places where "being uninitialized" originates, - // which are the entry block and any DELETE_FAST statements. - if (target >= nparams) { - // only non-parameter locals start out uninitialized. - *(stack_top++) = entryblock; - entryblock->b_visited = 1; - } - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - scan_block_for_local(target, b, false, &stack_top); - } + // First origin of being uninitialized: + // The non-parameter locals in the entry block. + int nparams = (int)PyList_GET_SIZE(c->u->u_ste->ste_varnames); + uint64_t start_mask = 0; + for (int i = nparams; i < nlocals; i++) { + start_mask |= (uint64_t)1 << i; + } + maybe_push(entryblock, start_mask, &sp); - // Second pass: Depth-first search to propagate uncertainty - while (stack_top > stack) { - basicblock *b = *--stack_top; - scan_block_for_local(target, b, true, &stack_top); - } + // Second origin of being uninitialized: + // There could be DELETE_FAST somewhere, so + // be sure to scan each basicblock at least once. + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + scan_block_for_locals(b, &sp); + } + + // Now propagate the uncertainty from the origins we found: Use + // LOAD_FAST_CHECK for any LOAD_FAST where the local could be undefined. + while (sp > stack) { + basicblock *b = *--sp; + // mark as no longer on stack + b->b_visited = 0; + scan_block_for_locals(b, &sp); } PyMem_Free(stack); return 0; @@ -8023,10 +8190,9 @@ dict_keys_inorder(PyObject *dict, Py_ssize_t offset) return NULL; while (PyDict_Next(dict, &pos, &k, &v)) { i = PyLong_AS_LONG(v); - Py_INCREF(k); assert((i - offset) < size); assert((i - offset) >= 0); - PyTuple_SET_ITEM(tuple, i - offset, k); + PyTuple_SET_ITEM(tuple, i - offset, Py_NewRef(k)); } return tuple; } @@ -8048,10 +8214,9 @@ consts_dict_keys_inorder(PyObject *dict) if (PyTuple_CheckExact(k)) { k = PyTuple_GET_ITEM(k, 1); } - Py_INCREF(k); assert(i < size); assert(i >= 0); - PyList_SET_ITEM(consts, i, k); + PyList_SET_ITEM(consts, i, Py_NewRef(k)); } return consts; } @@ -8078,7 +8243,7 @@ compute_code_flags(struct compiler *c) } /* (Only) inherit compilerflags in PyCF_MASK */ - flags |= (c->c_flags->cf_flags & PyCF_MASK); + flags |= (c->c_flags.cf_flags & PyCF_MASK); if ((IS_TOP_LEVEL_AWAIT(c)) && ste->ste_coroutine && @@ -8115,9 +8280,7 @@ merge_const_one(PyObject *const_cache, PyObject **obj) t = PyTuple_GET_ITEM(t, 1); } - Py_INCREF(t); - Py_DECREF(*obj); - *obj = t; + Py_SETREF(*obj, Py_NewRef(t)); return 1; } @@ -8308,7 +8471,7 @@ static int optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache); static int -trim_unused_consts(basicblock *entryblock, PyObject *consts); +remove_unused_consts(basicblock *entryblock, PyObject *consts); /* Duplicates exit BBs, so that line numbers can be propagated to them */ static int @@ -8347,18 +8510,6 @@ build_cellfixedoffsets(struct compiler *c) return fixed; } -static inline int -insert_instruction(basicblock *block, int pos, struct instr *instr) { - if (basicblock_next_instr(block) < 0) { - return -1; - } - for (int i = block->b_iused - 1; i > pos; i--) { - block->b_instr[i] = block->b_instr[i-1]; - } - block->b_instr[pos] = *instr; - return 0; -} - static int insert_prefix_instructions(struct compiler *c, basicblock *entryblock, int *fixed, int nfreevars, int code_flags) @@ -8492,7 +8643,6 @@ fix_cell_offsets(struct compiler *c, basicblock *entryblock, int *fixedmap) struct instr *inst = &b->b_instr[i]; // This is called before extended args are generated. assert(inst->i_opcode != EXTENDED_ARG); - assert(inst->i_opcode != EXTENDED_ARG_QUICK); int oldoffset = inst->i_oparg; switch(inst->i_opcode) { case MAKE_CELL: @@ -8516,6 +8666,17 @@ static void propagate_line_numbers(basicblock *entryblock); #ifndef NDEBUG + +static bool +no_redundant_nops(cfg_builder *g) { + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + if (remove_redundant_nops(b) != 0) { + return false; + } + } + return true; +} + static bool no_redundant_jumps(cfg_builder *g) { for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { @@ -8584,10 +8745,10 @@ assemble(struct compiler *c, int addNone) /* Make sure every block that falls off the end returns None. */ if (!basicblock_returns(CFG_BUILDER(c)->g_curblock)) { - UNSET_LOC(c); - if (addNone) - ADDOP_LOAD_CONST(c, Py_None); - ADDOP(c, RETURN_VALUE); + if (addNone) { + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); + } + ADDOP(c, NO_LOCATION, RETURN_VALUE); } assert(PyDict_GET_SIZE(c->u->u_varnames) < INT_MAX); @@ -8662,6 +8823,9 @@ assemble(struct compiler *c, int addNone) if (add_checks_for_loads_of_uninitialized_variables(g->g_entryblock, c) < 0) { goto error; } + if (remove_unused_consts(g->g_entryblock, consts)) { + goto error; + } /** line numbers (TODO: move this before optimization stage) */ if (duplicate_exits_without_lineno(g) < 0) { @@ -8693,10 +8857,6 @@ assemble(struct compiler *c, int addNone) /* Can't modify the bytecode after computing jump offsets. */ assemble_jump_offsets(g->g_entryblock); - if (trim_unused_consts(g->g_entryblock, consts)) { - goto error; - } - /* Create assembler */ if (!assemble_init(&a, c->u->u_firstlineno)) goto error; @@ -8764,8 +8924,7 @@ get_const_value(int opcode, int oparg, PyObject *co_consts) "Internal error: failed to get value of a constant"); return NULL; } - Py_INCREF(constant); - return constant; + return Py_NewRef(constant); } /* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n @@ -9287,7 +9446,7 @@ inline_small_exit_blocks(basicblock *bb) { return 0; } -static void +static int remove_redundant_nops(basicblock *bb) { /* Remove NOPs when legal to do so. */ int dest = 0; @@ -9335,7 +9494,9 @@ remove_redundant_nops(basicblock *bb) { prev_lineno = lineno; } assert(dest <= bb->b_iused); + int num_removed = bb->b_iused - dest; bb->b_iused = dest; + return num_removed; } static int @@ -9437,7 +9598,7 @@ propagate_line_numbers(basicblock *entryblock) { continue; } - struct location prev_location = NO_LOCATION; + location prev_location = NO_LOCATION; for (int i = 0; i < b->b_iused; i++) { if (b->b_instr[i].i_loc.lineno < 0) { b->b_instr[i].i_loc = prev_location; @@ -9546,42 +9707,117 @@ optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache) b->b_iused = 0; } } - eliminate_empty_basic_blocks(g); for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { remove_redundant_nops(b); } + eliminate_empty_basic_blocks(g); + assert(no_redundant_nops(g)); if (remove_redundant_jumps(g) < 0) { return -1; } return 0; } -// Remove trailing unused constants. + static int -trim_unused_consts(basicblock *entryblock, PyObject *consts) +remove_unused_consts(basicblock *entryblock, PyObject *consts) { assert(PyList_CheckExact(consts)); + Py_ssize_t nconsts = PyList_GET_SIZE(consts); + if (nconsts == 0) { + return 0; /* nothing to do */ + } + + Py_ssize_t *index_map = NULL; + Py_ssize_t *reverse_index_map = NULL; + int err = 1; + index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t)); + if (index_map == NULL) { + goto end; + } + for (Py_ssize_t i = 1; i < nconsts; i++) { + index_map[i] = -1; + } // The first constant may be docstring; keep it always. - int max_const_index = 0; + index_map[0] = 0; + + /* mark used consts */ for (basicblock *b = entryblock; b != NULL; b = b->b_next) { for (int i = 0; i < b->b_iused; i++) { - if ((b->b_instr[i].i_opcode == LOAD_CONST || - b->b_instr[i].i_opcode == KW_NAMES) && - b->b_instr[i].i_oparg > max_const_index) { - max_const_index = b->b_instr[i].i_oparg; + if (b->b_instr[i].i_opcode == LOAD_CONST || + b->b_instr[i].i_opcode == KW_NAMES) { + + int index = b->b_instr[i].i_oparg; + index_map[index] = index; } } } - if (max_const_index+1 < PyList_GET_SIZE(consts)) { - //fprintf(stderr, "removing trailing consts: max=%d, size=%d\n", - // max_const_index, (int)PyList_GET_SIZE(consts)); - if (PyList_SetSlice(consts, max_const_index+1, - PyList_GET_SIZE(consts), NULL) < 0) { - return 1; + /* now index_map[i] == i if consts[i] is used, -1 otherwise */ + + /* condense consts */ + Py_ssize_t n_used_consts = 0; + for (int i = 0; i < nconsts; i++) { + if (index_map[i] != -1) { + assert(index_map[i] == i); + index_map[n_used_consts++] = index_map[i]; } } - return 0; + if (n_used_consts == nconsts) { + /* nothing to do */ + err = 0; + goto end; + } + + /* move all used consts to the beginning of the consts list */ + assert(n_used_consts < nconsts); + for (Py_ssize_t i = 0; i < n_used_consts; i++) { + Py_ssize_t old_index = index_map[i]; + assert(i <= old_index && old_index < nconsts); + if (i != old_index) { + PyObject *value = PyList_GET_ITEM(consts, index_map[i]); + assert(value != NULL); + PyList_SetItem(consts, i, Py_NewRef(value)); + } + } + + /* truncate the consts list at its new size */ + if (PyList_SetSlice(consts, n_used_consts, nconsts, NULL) < 0) { + goto end; + } + + /* adjust const indices in the bytecode */ + reverse_index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t)); + if (reverse_index_map == NULL) { + goto end; + } + for (Py_ssize_t i = 0; i < nconsts; i++) { + reverse_index_map[i] = -1; + } + for (Py_ssize_t i = 0; i < n_used_consts; i++) { + assert(index_map[i] != -1); + assert(reverse_index_map[index_map[i]] == -1); + reverse_index_map[index_map[i]] = i; + } + + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + if (b->b_instr[i].i_opcode == LOAD_CONST || + b->b_instr[i].i_opcode == KW_NAMES) { + + int index = b->b_instr[i].i_oparg; + assert(reverse_index_map[index] >= 0); + assert(reverse_index_map[index] < n_used_consts); + b->b_instr[i].i_oparg = (int)reverse_index_map[index]; + } + } + } + + err = 0; +end: + PyMem_Free(index_map); + PyMem_Free(reverse_index_map); + return err; } static inline int @@ -9649,6 +9885,9 @@ duplicate_exits_without_lineno(cfg_builder *g) /* Access to compiler optimizations for unit tests. + * + * _PyCompile_CodeGen takes and AST, applies code-gen and + * returns the unoptimized CFG as an instruction list. * * _PyCompile_OptimizeCfg takes an instruction list, constructs * a CFG, optimizes it and converts back to an instruction list. @@ -9695,7 +9934,7 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g) if (PyErr_Occurred()) { return -1; } - struct location loc; + location loc; loc.lineno = PyLong_AsLong(PyTuple_GET_ITEM(item, 2)); if (PyErr_Occurred()) { return -1; @@ -9743,8 +9982,10 @@ cfg_to_instructions(cfg_builder *g) Py_DECREF(lbl); for (int i = 0; i < b->b_iused; i++) { struct instr *instr = &b->b_instr[i]; - struct location loc = instr->i_loc; - int arg = HAS_TARGET(instr->i_opcode) ? instr->i_target->b_label : instr->i_oparg; + location loc = instr->i_loc; + int arg = HAS_TARGET(instr->i_opcode) ? + instr->i_target->b_label : instr->i_oparg; + PyObject *inst_tuple = Py_BuildValue( "(iiiiii)", instr->i_opcode, arg, loc.lineno, loc.end_lineno, @@ -9767,6 +10008,52 @@ cfg_to_instructions(cfg_builder *g) return NULL; } +PyObject * +_PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags, + int optimize) +{ + PyObject *res = NULL; + + if (!PyAST_Check(ast)) { + PyErr_SetString(PyExc_TypeError, "expected an AST"); + return NULL; + } + + PyArena *arena = _PyArena_New(); + if (arena == NULL) { + return NULL; + } + + mod_ty mod = PyAST_obj2mod(ast, arena, 0 /* exec */); + if (mod == NULL || !_PyAST_Validate(mod)) { + _PyArena_Free(arena); + return NULL; + } + + struct compiler *c = new_compiler(mod, filename, pflags, optimize, arena); + if (c == NULL) { + _PyArena_Free(arena); + return NULL; + } + + if (!compiler_codegen(c, mod)) { + goto finally; + } + + cfg_builder *g = CFG_BUILDER(c); + + if (translate_jump_labels_to_targets(g->g_entryblock) < 0) { + goto finally; + } + + res = cfg_to_instructions(g); + +finally: + compiler_exit_scope(c); + compiler_free(c); + _PyArena_Free(arena); + return res; +} PyObject * _PyCompile_OptimizeCfg(PyObject *instructions, PyObject *consts) @@ -9806,6 +10093,5 @@ PyObject * PyCode_Optimize(PyObject *code, PyObject* Py_UNUSED(consts), PyObject *Py_UNUSED(names), PyObject *Py_UNUSED(lnotab_obj)) { - Py_INCREF(code); - return code; + return Py_NewRef(code); } diff --git a/Python/context.c b/Python/context.c index ef9db6a9cd063b..5d385508405ede 100644 --- a/Python/context.c +++ b/Python/context.c @@ -124,8 +124,7 @@ _PyContext_Enter(PyThreadState *ts, PyObject *octx) ctx->ctx_prev = (PyContext *)ts->context; /* borrow */ ctx->ctx_entered = 1; - Py_INCREF(ctx); - ts->context = (PyObject *)ctx; + ts->context = Py_NewRef(ctx); ts->context_ver++; return 0; @@ -400,8 +399,7 @@ context_new_from_vars(PyHamtObject *vars) return NULL; } - Py_INCREF(vars); - ctx->ctx_vars = vars; + ctx->ctx_vars = (PyHamtObject*)Py_NewRef(vars); _PyObject_GC_TRACK(ctx); return ctx; @@ -546,8 +544,7 @@ context_tp_subscript(PyContext *self, PyObject *key) PyErr_SetObject(PyExc_KeyError, key); return NULL; } - Py_INCREF(val); - return val; + return Py_NewRef(val); } static int @@ -588,11 +585,9 @@ _contextvars_Context_get_impl(PyContext *self, PyObject *key, return NULL; } if (found == 0) { - Py_INCREF(default_value); - return default_value; + return Py_NewRef(default_value); } - Py_INCREF(val); - return val; + return Py_NewRef(val); } @@ -831,11 +826,9 @@ contextvar_new(PyObject *name, PyObject *def) return NULL; } - Py_INCREF(name); - var->var_name = name; + var->var_name = Py_NewRef(name); - Py_XINCREF(def); - var->var_default = def; + var->var_default = Py_XNewRef(def); var->var_cached = NULL; var->var_cached_tsid = 0; @@ -1176,8 +1169,7 @@ token_tp_repr(PyContextToken *self) static PyObject * token_get_var(PyContextToken *self, void *Py_UNUSED(ignored)) { - Py_INCREF(self->tok_var); - return (PyObject *)self->tok_var; + return Py_NewRef(self->tok_var);; } static PyObject * @@ -1187,8 +1179,7 @@ token_get_old_value(PyContextToken *self, void *Py_UNUSED(ignored)) return get_token_missing(); } - Py_INCREF(self->tok_oldval); - return self->tok_oldval; + return Py_NewRef(self->tok_oldval); } static PyGetSetDef PyContextTokenType_getsetlist[] = { @@ -1228,14 +1219,11 @@ token_new(PyContext *ctx, PyContextVar *var, PyObject *val) return NULL; } - Py_INCREF(ctx); - tok->tok_ctx = ctx; + tok->tok_ctx = (PyContext*)Py_NewRef(ctx); - Py_INCREF(var); - tok->tok_var = var; + tok->tok_var = (PyContextVar*)Py_NewRef(var); - Py_XINCREF(val); - tok->tok_oldval = val; + tok->tok_oldval = Py_XNewRef(val); tok->tok_used = 0; @@ -1247,25 +1235,29 @@ token_new(PyContext *ctx, PyContextVar *var, PyObject *val) /////////////////////////// Token.MISSING -static PyObject *_token_missing; - - -typedef struct { - PyObject_HEAD -} PyContextTokenMissing; - - static PyObject * context_token_missing_tp_repr(PyObject *self) { return PyUnicode_FromString(""); } +static void +context_token_missing_tp_dealloc(_PyContextTokenMissing *Py_UNUSED(self)) +{ +#ifdef Py_DEBUG + /* The singleton is statically allocated. */ + _Py_FatalRefcountError("deallocating the token missing singleton"); +#else + return; +#endif +} + PyTypeObject _PyContextTokenMissing_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "Token.MISSING", - sizeof(PyContextTokenMissing), + sizeof(_PyContextTokenMissing), + .tp_dealloc = (destructor)context_token_missing_tp_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT, .tp_repr = context_token_missing_tp_repr, @@ -1275,19 +1267,7 @@ PyTypeObject _PyContextTokenMissing_Type = { static PyObject * get_token_missing(void) { - if (_token_missing != NULL) { - Py_INCREF(_token_missing); - return _token_missing; - } - - _token_missing = (PyObject *)PyObject_New( - PyContextTokenMissing, &_PyContextTokenMissing_Type); - if (_token_missing == NULL) { - return NULL; - } - - Py_INCREF(_token_missing); - return _token_missing; + return Py_NewRef(&_Py_SINGLETON(context_token_missing)); } @@ -1312,15 +1292,11 @@ _PyContext_ClearFreeList(PyInterpreterState *interp) void _PyContext_Fini(PyInterpreterState *interp) { - if (_Py_IsMainInterpreter(interp)) { - Py_CLEAR(_token_missing); - } _PyContext_ClearFreeList(interp); #if defined(Py_DEBUG) && PyContext_MAXFREELIST > 0 struct _Py_context_state *state = &interp->context; state->numfree = -1; #endif - _PyHamt_Fini(interp); } diff --git a/Python/deepfreeze/README.txt b/Python/deepfreeze/README.txt index da55d4e7c7469e..276ab51143ab33 100644 --- a/Python/deepfreeze/README.txt +++ b/Python/deepfreeze/README.txt @@ -3,4 +3,4 @@ modules. Python/frozen.c depends on these files. None of these files are committed into the repo. -See Tools/scripts/freeze_modules.py for more info. +See Tools/build/freeze_modules.py for more info. diff --git a/Python/dtoa.c b/Python/dtoa.c index 733e70bc791698..1b47d83bf77a24 100644 --- a/Python/dtoa.c +++ b/Python/dtoa.c @@ -119,6 +119,7 @@ #include "Python.h" #include "pycore_dtoa.h" // _PY_SHORT_FLOAT_REPR +#include "pycore_runtime.h" // _PyRuntime #include // exit() /* if _PY_SHORT_FLOAT_REPR == 0, then don't even try to compile @@ -156,7 +157,7 @@ #endif -typedef uint32_t ULong; +// ULong is defined in pycore_dtoa.h. typedef int32_t Long; typedef uint64_t ULLong; @@ -171,12 +172,6 @@ typedef uint64_t ULLong; #define Bug(x) {fprintf(stderr, "%s\n", x); exit(1);} #endif -#ifndef PRIVATE_MEM -#define PRIVATE_MEM 2304 -#endif -#define PRIVATE_mem ((PRIVATE_MEM+sizeof(double)-1)/sizeof(double)) -static double private_mem[PRIVATE_mem], *pmem_next = private_mem; - #ifdef __cplusplus extern "C" { #endif @@ -298,8 +293,6 @@ BCinfo { #define FFFFFFFF 0xffffffffUL -#define Kmax 7 - /* struct Bigint is used to represent arbitrary-precision integers. These integers are stored in sign-magnitude format, with the magnitude stored as an array of base 2**32 digits. Bigints are always normalized: if x is a @@ -322,13 +315,7 @@ BCinfo { significant (x[0]) to most significant (x[wds-1]). */ -struct -Bigint { - struct Bigint *next; - int k, maxwds, sign, wds; - ULong x[1]; -}; - +// struct Bigint is defined in pycore_dtoa.h. typedef struct Bigint Bigint; #ifndef Py_USING_MEMORY_DEBUGGER @@ -352,7 +339,9 @@ typedef struct Bigint Bigint; Bfree to PyMem_Free. Investigate whether this has any significant performance on impact. */ -static Bigint *freelist[Kmax+1]; +#define freelist _PyRuntime.dtoa.freelist +#define private_mem _PyRuntime.dtoa.preallocated +#define pmem_next _PyRuntime.dtoa.preallocated_next /* Allocate space for a Bigint with up to 1<next; else { x = 1 << k; len = (sizeof(Bigint) + (x-1)*sizeof(ULong) + sizeof(double) - 1) /sizeof(double); - if (k <= Kmax && pmem_next - private_mem + len <= (Py_ssize_t)PRIVATE_mem) { + if (k <= Bigint_Kmax && + pmem_next - private_mem + len <= (Py_ssize_t)Bigint_PREALLOC_SIZE + ) { rv = (Bigint*)pmem_next; pmem_next += len; } @@ -391,7 +382,7 @@ static void Bfree(Bigint *v) { if (v) { - if (v->k > Kmax) + if (v->k > Bigint_Kmax) FREE((void*)v); else { v->next = freelist[v->k]; @@ -400,6 +391,10 @@ Bfree(Bigint *v) } } +#undef pmem_next +#undef private_mem +#undef freelist + #else /* Alternative versions of Balloc and Bfree that use PyMem_Malloc and diff --git a/Python/errors.c b/Python/errors.c index 2aa748c60c3704..05ef62246ec0a4 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -37,8 +37,7 @@ _PyErr_Restore(PyThreadState *tstate, PyObject *type, PyObject *value, if (traceback != NULL && !PyTraceBack_Check(traceback)) { /* XXX Should never happen -- fatal error instead? */ /* Well, it could be None. */ - Py_DECREF(traceback); - traceback = NULL; + Py_SETREF(traceback, NULL); } /* Save these in locals to safeguard against recursive @@ -178,8 +177,7 @@ _PyErr_SetObject(PyThreadState *tstate, PyObject *exception, PyObject *value) } if (value != NULL && PyExceptionInstance_Check(value)) tb = PyException_GetTraceback(value); - Py_XINCREF(exception); - _PyErr_Restore(tstate, exception, value, tb); + _PyErr_Restore(tstate, Py_XNewRef(exception), value, tb); } void @@ -326,8 +324,7 @@ _PyErr_NormalizeException(PyThreadState *tstate, PyObject **exc, set to NULL. */ if (!value) { - value = Py_None; - Py_INCREF(value); + value = Py_NewRef(Py_None); } /* Normalize the exception so that if the type is a class, the @@ -355,16 +352,13 @@ _PyErr_NormalizeException(PyThreadState *tstate, PyObject **exc, if (fixed_value == NULL) { goto error; } - Py_DECREF(value); - value = fixed_value; + Py_SETREF(value, fixed_value); } /* If the class of the instance doesn't exactly match the class of the type, believe the instance. */ else if (inclass != type) { - Py_INCREF(inclass); - Py_DECREF(type); - type = inclass; + Py_SETREF(type, Py_NewRef(inclass)); } } *exc = type; @@ -490,13 +484,9 @@ _PyErr_GetExcInfo(PyThreadState *tstate, { _PyErr_StackItem *exc_info = _PyErr_GetTopmostException(tstate); - *p_type = get_exc_type(exc_info->exc_value); - *p_value = exc_info->exc_value; - *p_traceback = get_exc_traceback(exc_info->exc_value); - - Py_XINCREF(*p_type); - Py_XINCREF(*p_value); - Py_XINCREF(*p_traceback); + *p_type = Py_XNewRef(get_exc_type(exc_info->exc_value)); + *p_value = Py_XNewRef(exc_info->exc_value); + *p_traceback = Py_XNewRef(get_exc_traceback(exc_info->exc_value)); } PyObject* @@ -675,9 +665,9 @@ _PyErr_FormatVFromCause(PyThreadState *tstate, PyObject *exception, _PyErr_Fetch(tstate, &exc, &val2, &tb); _PyErr_NormalizeException(tstate, &exc, &val2, &tb); - Py_INCREF(val); - PyException_SetCause(val2, val); - PyException_SetContext(val2, val); + PyException_SetCause(val2, Py_NewRef(val)); + PyException_SetContext(val2, Py_NewRef(val)); + Py_DECREF(val); _PyErr_Restore(tstate, exc, val2, tb); return NULL; @@ -975,9 +965,10 @@ PyObject *PyErr_SetFromWindowsErrWithFilename( #endif /* MS_WINDOWS */ -PyObject * -PyErr_SetImportErrorSubclass(PyObject *exception, PyObject *msg, - PyObject *name, PyObject *path) +static PyObject * +_PyErr_SetImportErrorSubclassWithNameFrom( + PyObject *exception, PyObject *msg, + PyObject *name, PyObject *path, PyObject* from_name) { PyThreadState *tstate = _PyThreadState_GET(); int issubclass; @@ -1005,6 +996,10 @@ PyErr_SetImportErrorSubclass(PyObject *exception, PyObject *msg, if (path == NULL) { path = Py_None; } + if (from_name == NULL) { + from_name = Py_None; + } + kwargs = PyDict_New(); if (kwargs == NULL) { @@ -1016,6 +1011,9 @@ PyErr_SetImportErrorSubclass(PyObject *exception, PyObject *msg, if (PyDict_SetItemString(kwargs, "path", path) < 0) { goto done; } + if (PyDict_SetItemString(kwargs, "name_from", from_name) < 0) { + goto done; + } error = PyObject_VectorcallDict(exception, &msg, 1, kwargs); if (error != NULL) { @@ -1028,6 +1026,20 @@ PyErr_SetImportErrorSubclass(PyObject *exception, PyObject *msg, return NULL; } + +PyObject * +PyErr_SetImportErrorSubclass(PyObject *exception, PyObject *msg, + PyObject *name, PyObject *path) +{ + return _PyErr_SetImportErrorSubclassWithNameFrom(exception, msg, name, path, NULL); +} + +PyObject * +_PyErr_SetImportErrorWithNameFrom(PyObject *msg, PyObject *name, PyObject *path, PyObject* from_name) +{ + return _PyErr_SetImportErrorSubclassWithNameFrom(PyExc_ImportError, msg, name, path, from_name); +} + PyObject * PyErr_SetImportError(PyObject *msg, PyObject *name, PyObject *path) { @@ -1144,9 +1156,7 @@ PyErr_NewException(const char *name, PyObject *base, PyObject *dict) goto failure; } if (PyTuple_Check(base)) { - bases = base; - /* INCREF as we create a new ref in the else branch */ - Py_INCREF(bases); + bases = Py_NewRef(base); } else { bases = PyTuple_Pack(1, base); if (bases == NULL) @@ -1265,8 +1275,7 @@ make_unraisable_hook_args(PyThreadState *tstate, PyObject *exc_type, if (exc_type == NULL) { \ exc_type = Py_None; \ } \ - Py_INCREF(exc_type); \ - PyStructSequence_SET_ITEM(args, pos++, exc_type); \ + PyStructSequence_SET_ITEM(args, pos++, Py_NewRef(exc_type)); \ } while (0) diff --git a/Python/fileutils.c b/Python/fileutils.c index fb1e5ef9a03026..244bd899b3bd24 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -191,7 +191,7 @@ extern int _Py_normalize_encoding(const char *, char *, size_t); Py_DecodeLocale() uses mbstowcs() -1: unknown, need to call check_force_ascii() to get the value */ -static int force_ascii = -1; +#define force_ascii (_PyRuntime.fileutils.force_ascii) static int check_force_ascii(void) diff --git a/Python/frame.c b/Python/frame.c index 96566de63a78fd..52f6ef428291c5 100644 --- a/Python/frame.c +++ b/Python/frame.c @@ -37,14 +37,31 @@ _PyFrame_MakeAndSetFrameObject(_PyInterpreterFrame *frame) Py_XDECREF(error_type); Py_XDECREF(error_value); Py_XDECREF(error_traceback); + return NULL; } - else { - assert(frame->owner != FRAME_OWNED_BY_FRAME_OBJECT); - assert(frame->owner != FRAME_CLEARED); - f->f_frame = frame; - frame->frame_obj = f; - PyErr_Restore(error_type, error_value, error_traceback); + PyErr_Restore(error_type, error_value, error_traceback); + if (frame->frame_obj) { + // GH-97002: How did we get into this horrible situation? Most likely, + // allocating f triggered a GC collection, which ran some code that + // *also* created the same frame... while we were in the middle of + // creating it! See test_sneaky_frame_object in test_frame.py for a + // concrete example. + // + // Regardless, just throw f away and use that frame instead, since it's + // already been exposed to user code. It's actually a bit tricky to do + // this, since we aren't backed by a real _PyInterpreterFrame anymore. + // Just pretend that we have an owned, cleared frame so frame_dealloc + // doesn't make the situation worse: + f->f_frame = (_PyInterpreterFrame *)f->_f_frame_data; + f->f_frame->owner = FRAME_CLEARED; + f->f_frame->frame_obj = f; + Py_DECREF(f); + return frame->frame_obj; } + assert(frame->owner != FRAME_OWNED_BY_FRAME_OBJECT); + assert(frame->owner != FRAME_CLEARED); + f->f_frame = frame; + frame->frame_obj = f; return f; } @@ -63,6 +80,7 @@ _PyFrame_Copy(_PyInterpreterFrame *src, _PyInterpreterFrame *dest) static void take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) { + assert(frame->owner != FRAME_OWNED_BY_CSTACK); assert(frame->owner != FRAME_OWNED_BY_FRAME_OBJECT); assert(frame->owner != FRAME_CLEARED); Py_ssize_t size = ((char*)&frame->localsplus[frame->stacktop]) - (char *)frame; @@ -82,7 +100,9 @@ take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) while (prev && _PyFrame_IsIncomplete(prev)) { prev = prev->previous; } + frame->previous = NULL; if (prev) { + assert(prev->owner != FRAME_OWNED_BY_CSTACK); /* Link PyFrameObjects.f_back and remove link through _PyInterpreterFrame.previous */ PyFrameObject *back = _PyFrame_GetFrameObject(prev); if (back == NULL) { @@ -94,7 +114,6 @@ take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) else { f->f_back = (PyFrameObject *)Py_NewRef(back); } - frame->previous = NULL; } if (!_PyObject_GC_IS_TRACKED((PyObject *)f)) { _PyObject_GC_TRACK((PyObject *)f); diff --git a/Python/frozen.c b/Python/frozen.c index 8a2a7243537cc5..48b429519b6606 100644 --- a/Python/frozen.c +++ b/Python/frozen.c @@ -8,7 +8,7 @@ * These files must be regenerated any time the corresponding .pyc * file would change (including with changes to the compiler, bytecode * format, marshal format). This can be done with "make regen-frozen". - * That make target just runs Tools/scripts/freeze_modules.py. + * That make target just runs Tools/build/freeze_modules.py. * * The freeze_modules.py script also determines which modules get * frozen. Update the list at the top of the script to add, remove, diff --git a/Python/frozen_modules/README.txt b/Python/frozen_modules/README.txt index 444167cc496af3..795bb0efad34df 100644 --- a/Python/frozen_modules/README.txt +++ b/Python/frozen_modules/README.txt @@ -4,4 +4,4 @@ modules. Python/frozen.c depends on these files. Note that, other than the required frozen modules, none of these files are committed into the repo. -See Tools/scripts/freeze_modules.py for more info. +See Tools/build/freeze_modules.py for more info. diff --git a/Python/future.c b/Python/future.c index d465608ca45494..d56f7330964684 100644 --- a/Python/future.c +++ b/Python/future.c @@ -2,8 +2,6 @@ #include "pycore_ast.h" // _PyAST_GetDocString() #define UNDEFINED_FUTURE_FEATURE "future feature %.100s is not defined" -#define ERR_LATE_FUTURE \ -"from __future__ imports must occur at the beginning of the file" static int future_check_features(PyFutureFeatures *ff, stmt_ty s, PyObject *filename) @@ -56,81 +54,56 @@ future_check_features(PyFutureFeatures *ff, stmt_ty s, PyObject *filename) static int future_parse(PyFutureFeatures *ff, mod_ty mod, PyObject *filename) { - int i, done = 0, prev_line = 0; - - if (!(mod->kind == Module_kind || mod->kind == Interactive_kind)) + if (!(mod->kind == Module_kind || mod->kind == Interactive_kind)) { return 1; + } - if (asdl_seq_LEN(mod->v.Module.body) == 0) + Py_ssize_t n = asdl_seq_LEN(mod->v.Module.body); + if (n == 0) { return 1; + } - /* A subsequent pass will detect future imports that don't - appear at the beginning of the file. There's one case, - however, that is easier to handle here: A series of imports - joined by semi-colons, where the first import is a future - statement but some subsequent import has the future form - but is preceded by a regular import. - */ - - i = 0; - if (_PyAST_GetDocString(mod->v.Module.body) != NULL) + Py_ssize_t i = 0; + if (_PyAST_GetDocString(mod->v.Module.body) != NULL) { i++; + } - for (; i < asdl_seq_LEN(mod->v.Module.body); i++) { + for (; i < n; i++) { stmt_ty s = (stmt_ty)asdl_seq_GET(mod->v.Module.body, i); - if (done && s->lineno > prev_line) - return 1; - prev_line = s->lineno; - - /* The tests below will return from this function unless it is - still possible to find a future statement. The only things - that can precede a future statement are another future - statement and a doc string. - */ + /* The only things that can precede a future statement + * are another future statement and a doc string. + */ if (s->kind == ImportFrom_kind) { identifier modname = s->v.ImportFrom.module; if (modname && _PyUnicode_EqualToASCIIString(modname, "__future__")) { - if (done) { - PyErr_SetString(PyExc_SyntaxError, - ERR_LATE_FUTURE); - PyErr_SyntaxLocationObject(filename, s->lineno, s->col_offset); + if (!future_check_features(ff, s, filename)) { return 0; } - if (!future_check_features(ff, s, filename)) - return 0; - ff->ff_lineno = s->lineno; + ff->ff_location = SRC_LOCATION_FROM_AST(s); } else { - done = 1; + return 1; } } else { - done = 1; + return 1; } } return 1; } -PyFutureFeatures * -_PyFuture_FromAST(mod_ty mod, PyObject *filename) +int +_PyFuture_FromAST(mod_ty mod, PyObject *filename, PyFutureFeatures *ff) { - PyFutureFeatures *ff; - - ff = (PyFutureFeatures *)PyObject_Malloc(sizeof(PyFutureFeatures)); - if (ff == NULL) { - PyErr_NoMemory(); - return NULL; - } ff->ff_features = 0; - ff->ff_lineno = -1; + ff->ff_location = (_PyCompilerSrcLocation){-1, -1, -1, -1}; if (!future_parse(ff, mod, filename)) { - PyObject_Free(ff); - return NULL; + return 0; } - return ff; + return 1; } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h new file mode 100644 index 00000000000000..ae8fdd5e99c3dc --- /dev/null +++ b/Python/generated_cases.c.h @@ -0,0 +1,3827 @@ +// This file is generated by Tools/cases_generator/generate_cases.py +// from Python/bytecodes.c +// Do not edit! + + TARGET(NOP) { + DISPATCH(); + } + + TARGET(RESUME) { + assert(tstate->cframe == &cframe); + assert(frame == cframe.current_frame); + if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { + goto handle_eval_breaker; + } + DISPATCH(); + } + + TARGET(LOAD_CLOSURE) { + PyObject *value; + /* We keep LOAD_CLOSURE so that the bytecode stays more readable. */ + value = GETLOCAL(oparg); + if (value == NULL) goto unbound_local_error; + Py_INCREF(value); + STACK_GROW(1); + POKE(1, value); + DISPATCH(); + } + + TARGET(LOAD_FAST_CHECK) { + PyObject *value; + value = GETLOCAL(oparg); + if (value == NULL) goto unbound_local_error; + Py_INCREF(value); + STACK_GROW(1); + POKE(1, value); + DISPATCH(); + } + + TARGET(LOAD_FAST) { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + STACK_GROW(1); + POKE(1, value); + DISPATCH(); + } + + TARGET(LOAD_CONST) { + PREDICTED(LOAD_CONST); + PyObject *value; + value = GETITEM(consts, oparg); + Py_INCREF(value); + STACK_GROW(1); + POKE(1, value); + DISPATCH(); + } + + TARGET(STORE_FAST) { + PyObject *value = PEEK(1); + SETLOCAL(oparg, value); + STACK_SHRINK(1); + DISPATCH(); + } + + TARGET(POP_TOP) { + PyObject *value = PEEK(1); + Py_DECREF(value); + STACK_SHRINK(1); + DISPATCH(); + } + + TARGET(PUSH_NULL) { + PyObject *res; + res = NULL; + STACK_GROW(1); + POKE(1, res); + DISPATCH(); + } + + TARGET(UNARY_POSITIVE) { + PyObject *value = PEEK(1); + PyObject *res; + res = PyNumber_Positive(value); + Py_DECREF(value); + if (res == NULL) goto pop_1_error; + POKE(1, res); + DISPATCH(); + } + + TARGET(UNARY_NEGATIVE) { + PyObject *value = PEEK(1); + PyObject *res; + res = PyNumber_Negative(value); + Py_DECREF(value); + if (res == NULL) goto pop_1_error; + POKE(1, res); + DISPATCH(); + } + + TARGET(UNARY_NOT) { + PyObject *value = PEEK(1); + PyObject *res; + int err = PyObject_IsTrue(value); + Py_DECREF(value); + if (err < 0) goto pop_1_error; + if (err == 0) { + res = Py_True; + } + else { + res = Py_False; + } + Py_INCREF(res); + POKE(1, res); + DISPATCH(); + } + + TARGET(UNARY_INVERT) { + PyObject *value = PEEK(1); + PyObject *res; + res = PyNumber_Invert(value); + Py_DECREF(value); + if (res == NULL) goto pop_1_error; + POKE(1, res); + DISPATCH(); + } + + TARGET(BINARY_OP_MULTIPLY_INT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *prod; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + prod = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + if (prod == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, prod); + next_instr += 1; + DISPATCH(); + } + + TARGET(BINARY_OP_MULTIPLY_FLOAT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *prod; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dprod = ((PyFloatObject *)left)->ob_fval * + ((PyFloatObject *)right)->ob_fval; + prod = PyFloat_FromDouble(dprod); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + if (prod == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, prod); + next_instr += 1; + DISPATCH(); + } + + TARGET(BINARY_OP_SUBTRACT_INT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *sub; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + sub = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + if (sub == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, sub); + next_instr += 1; + DISPATCH(); + } + + TARGET(BINARY_OP_SUBTRACT_FLOAT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *sub; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dsub = ((PyFloatObject *)left)->ob_fval - ((PyFloatObject *)right)->ob_fval; + sub = PyFloat_FromDouble(dsub); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + if (sub == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, sub); + next_instr += 1; + DISPATCH(); + } + + TARGET(BINARY_OP_ADD_UNICODE) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *res; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + res = PyUnicode_Concat(left, right); + _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + if (res == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, res); + next_instr += 1; + DISPATCH(); + } + + TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; + assert(_Py_OPCODE(true_next) == STORE_FAST || + _Py_OPCODE(true_next) == STORE_FAST__LOAD_FAST); + PyObject **target_local = &GETLOCAL(_Py_OPARG(true_next)); + DEOPT_IF(*target_local != left, BINARY_OP); + STAT_INC(BINARY_OP, hit); + /* Handle `left = left + right` or `left += right` for str. + * + * When possible, extend `left` in place rather than + * allocating a new PyUnicodeObject. This attempts to avoid + * quadratic behavior when one neglects to use str.join(). + * + * If `left` has only two references remaining (one from + * the stack, one in the locals), DECREFing `left` leaves + * only the locals reference, so PyUnicode_Append knows + * that the string is safe to mutate. + */ + assert(Py_REFCNT(left) >= 2); + _Py_DECREF_NO_DEALLOC(left); + PyUnicode_Append(target_local, right); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + if (*target_local == NULL) goto pop_2_error; + // The STORE_FAST is already done. + JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1); + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(BINARY_OP_ADD_FLOAT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *sum; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + double dsum = ((PyFloatObject *)left)->ob_fval + + ((PyFloatObject *)right)->ob_fval; + sum = PyFloat_FromDouble(dsum); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + if (sum == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, sum); + next_instr += 1; + DISPATCH(); + } + + TARGET(BINARY_OP_ADD_INT) { + PyObject *right = PEEK(1); + PyObject *left = PEEK(2); + PyObject *sum; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); + DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); + STAT_INC(BINARY_OP, hit); + sum = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + if (sum == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, sum); + next_instr += 1; + DISPATCH(); + } + + TARGET(BINARY_SUBSCR) { + PREDICTED(BINARY_SUBSCR); + static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 4, "incorrect cache size"); + PyObject *sub = PEEK(1); + PyObject *container = PEEK(2); + PyObject *res; + _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr--; + _Py_Specialize_BinarySubscr(container, sub, next_instr); + DISPATCH_SAME_OPARG(); + } + STAT_INC(BINARY_SUBSCR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + res = PyObject_GetItem(container, sub); + Py_DECREF(container); + Py_DECREF(sub); + if (res == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, res); + next_instr += 4; + DISPATCH(); + } + + TARGET(BINARY_SLICE) { + PyObject *stop = PEEK(1); + PyObject *start = PEEK(2); + PyObject *container = PEEK(3); + PyObject *res; + PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); + // Can't use ERROR_IF() here, because we haven't + // DECREF'ed container yet, and we still own slice. + if (slice == NULL) { + res = NULL; + } + else { + res = PyObject_GetItem(container, slice); + Py_DECREF(slice); + } + Py_DECREF(container); + if (res == NULL) goto pop_3_error; + STACK_SHRINK(2); + POKE(1, res); + DISPATCH(); + } + + TARGET(STORE_SLICE) { + PyObject *stop = PEEK(1); + PyObject *start = PEEK(2); + PyObject *container = PEEK(3); + PyObject *v = PEEK(4); + PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); + int err; + if (slice == NULL) { + err = 1; + } + else { + err = PyObject_SetItem(container, slice, v); + Py_DECREF(slice); + } + Py_DECREF(v); + Py_DECREF(container); + if (err) goto pop_4_error; + STACK_SHRINK(4); + DISPATCH(); + } + + TARGET(BINARY_SUBSCR_LIST_INT) { + PyObject *sub = PEEK(1); + PyObject *list = PEEK(2); + PyObject *res; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); + + // Deopt unless 0 <= sub < PyList_Size(list) + Py_ssize_t signed_magnitude = Py_SIZE(sub); + DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); + assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + DEOPT_IF(index >= PyList_GET_SIZE(list), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyList_GET_ITEM(list, index); + assert(res != NULL); + Py_INCREF(res); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(list); + STACK_SHRINK(1); + POKE(1, res); + next_instr += 4; + DISPATCH(); + } + + TARGET(BINARY_SUBSCR_TUPLE_INT) { + PyObject *sub = PEEK(1); + PyObject *tuple = PEEK(2); + PyObject *res; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); + + // Deopt unless 0 <= sub < PyTuple_Size(list) + Py_ssize_t signed_magnitude = Py_SIZE(sub); + DEOPT_IF(((size_t)signed_magnitude) > 1, BINARY_SUBSCR); + assert(((PyLongObject *)_PyLong_GetZero())->ob_digit[0] == 0); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + DEOPT_IF(index >= PyTuple_GET_SIZE(tuple), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyTuple_GET_ITEM(tuple, index); + assert(res != NULL); + Py_INCREF(res); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(tuple); + STACK_SHRINK(1); + POKE(1, res); + next_instr += 4; + DISPATCH(); + } + + TARGET(BINARY_SUBSCR_DICT) { + PyObject *sub = PEEK(1); + PyObject *dict = PEEK(2); + PyObject *res; + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = PyDict_GetItemWithError(dict, sub); + if (res == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetKeyError(sub); + } + Py_DECREF(dict); + Py_DECREF(sub); + if (1) goto pop_2_error; + } + Py_INCREF(res); // Do this before DECREF'ing dict, sub + Py_DECREF(dict); + Py_DECREF(sub); + STACK_SHRINK(1); + POKE(1, res); + next_instr += 4; + DISPATCH(); + } + + TARGET(BINARY_SUBSCR_GETITEM) { + uint32_t type_version = read_u32(next_instr + 1); + uint16_t func_version = read_u16(next_instr + 3); + PyObject *sub = PEEK(1); + PyObject *container = PEEK(2); + PyTypeObject *tp = Py_TYPE(container); + DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR); + assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE); + PyObject *cached = ((PyHeapTypeObject *)tp)->_spec_cache.getitem; + assert(PyFunction_Check(cached)); + PyFunctionObject *getitem = (PyFunctionObject *)cached; + DEOPT_IF(getitem->func_version != func_version, BINARY_SUBSCR); + PyCodeObject *code = (PyCodeObject *)getitem->func_code; + assert(code->co_argcount == 2); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + Py_INCREF(getitem); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem); + STACK_SHRINK(2); + new_frame->localsplus[0] = container; + new_frame->localsplus[1] = sub; + for (int i = 2; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); + DISPATCH_INLINED(new_frame); + } + + TARGET(LIST_APPEND) { + PyObject *v = POP(); + PyObject *list = PEEK(oparg); + if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) + goto error; + PREDICT(JUMP_BACKWARD); + DISPATCH(); + } + + TARGET(SET_ADD) { + PyObject *v = POP(); + PyObject *set = PEEK(oparg); + int err; + err = PySet_Add(set, v); + Py_DECREF(v); + if (err != 0) + goto error; + PREDICT(JUMP_BACKWARD); + DISPATCH(); + } + + TARGET(STORE_SUBSCR) { + PREDICTED(STORE_SUBSCR); + PyObject *sub = PEEK(1); + PyObject *container = PEEK(2); + PyObject *v = PEEK(3); + _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr--; + _Py_Specialize_StoreSubscr(container, sub, next_instr); + DISPATCH_SAME_OPARG(); + } + STAT_INC(STORE_SUBSCR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + /* container[sub] = v */ + int err = PyObject_SetItem(container, sub, v); + Py_DECREF(v); + Py_DECREF(container); + Py_DECREF(sub); + if (err != 0) goto pop_3_error; + JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); + STACK_SHRINK(3); + DISPATCH(); + } + + TARGET(STORE_SUBSCR_LIST_INT) { + assert(cframe.use_tracing == 0); + PyObject *sub = TOP(); + PyObject *list = SECOND(); + PyObject *value = THIRD(); + DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); + DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); + + // Ensure nonnegative, zero-or-one-digit ints. + DEOPT_IF(((size_t)Py_SIZE(sub)) > 1, STORE_SUBSCR); + Py_ssize_t index = ((PyLongObject*)sub)->ob_digit[0]; + // Ensure index < len(list) + DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); + STAT_INC(STORE_SUBSCR, hit); + + PyObject *old_value = PyList_GET_ITEM(list, index); + PyList_SET_ITEM(list, index, value); + STACK_SHRINK(3); + assert(old_value != NULL); + Py_DECREF(old_value); + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(list); + JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); + DISPATCH(); + } + + TARGET(STORE_SUBSCR_DICT) { + assert(cframe.use_tracing == 0); + PyObject *sub = TOP(); + PyObject *dict = SECOND(); + PyObject *value = THIRD(); + DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); + STACK_SHRINK(3); + STAT_INC(STORE_SUBSCR, hit); + int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); + Py_DECREF(dict); + if (err != 0) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR); + DISPATCH(); + } + + TARGET(DELETE_SUBSCR) { + PyObject *sub = TOP(); + PyObject *container = SECOND(); + int err; + STACK_SHRINK(2); + /* del container[sub] */ + err = PyObject_DelItem(container, sub); + Py_DECREF(container); + Py_DECREF(sub); + if (err != 0) + goto error; + DISPATCH(); + } + + TARGET(PRINT_EXPR) { + PyObject *value = POP(); + PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook)); + PyObject *res; + if (hook == NULL) { + _PyErr_SetString(tstate, PyExc_RuntimeError, + "lost sys.displayhook"); + Py_DECREF(value); + goto error; + } + res = PyObject_CallOneArg(hook, value); + Py_DECREF(value); + if (res == NULL) + goto error; + Py_DECREF(res); + DISPATCH(); + } + + TARGET(RAISE_VARARGS) { + PyObject *cause = NULL, *exc = NULL; + switch (oparg) { + case 2: + cause = POP(); /* cause */ + /* fall through */ + case 1: + exc = POP(); /* exc */ + /* fall through */ + case 0: + if (do_raise(tstate, exc, cause)) { + goto exception_unwind; + } + break; + default: + _PyErr_SetString(tstate, PyExc_SystemError, + "bad RAISE_VARARGS oparg"); + break; + } + goto error; + } + + TARGET(INTERPRETER_EXIT) { + assert(frame == &entry_frame); + assert(_PyFrame_IsIncomplete(frame)); + PyObject *retval = POP(); + assert(EMPTY()); + /* Restore previous cframe and return. */ + tstate->cframe = cframe.previous; + tstate->cframe->use_tracing = cframe.use_tracing; + assert(tstate->cframe->current_frame == frame->previous); + assert(!_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallTstate(tstate); + return retval; + } + + TARGET(RETURN_VALUE) { + PyObject *retval = POP(); + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + TRACE_FUNCTION_EXIT(); + DTRACE_FUNCTION_EXIT(); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + frame = cframe.current_frame = pop_frame(tstate, frame); + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + TARGET(GET_AITER) { + unaryfunc getter = NULL; + PyObject *iter = NULL; + PyObject *obj = TOP(); + PyTypeObject *type = Py_TYPE(obj); + + if (type->tp_as_async != NULL) { + getter = type->tp_as_async->am_aiter; + } + + if (getter != NULL) { + iter = (*getter)(obj); + Py_DECREF(obj); + if (iter == NULL) { + SET_TOP(NULL); + goto error; + } + } + else { + SET_TOP(NULL); + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' requires an object with " + "__aiter__ method, got %.100s", + type->tp_name); + Py_DECREF(obj); + goto error; + } + + if (Py_TYPE(iter)->tp_as_async == NULL || + Py_TYPE(iter)->tp_as_async->am_anext == NULL) { + + SET_TOP(NULL); + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' received an object from __aiter__ " + "that does not implement __anext__: %.100s", + Py_TYPE(iter)->tp_name); + Py_DECREF(iter); + goto error; + } + + SET_TOP(iter); + DISPATCH(); + } + + TARGET(GET_ANEXT) { + unaryfunc getter = NULL; + PyObject *next_iter = NULL; + PyObject *awaitable = NULL; + PyObject *aiter = TOP(); + PyTypeObject *type = Py_TYPE(aiter); + + if (PyAsyncGen_CheckExact(aiter)) { + awaitable = type->tp_as_async->am_anext(aiter); + if (awaitable == NULL) { + goto error; + } + } else { + if (type->tp_as_async != NULL){ + getter = type->tp_as_async->am_anext; + } + + if (getter != NULL) { + next_iter = (*getter)(aiter); + if (next_iter == NULL) { + goto error; + } + } + else { + _PyErr_Format(tstate, PyExc_TypeError, + "'async for' requires an iterator with " + "__anext__ method, got %.100s", + type->tp_name); + goto error; + } + + awaitable = _PyCoro_GetAwaitableIter(next_iter); + if (awaitable == NULL) { + _PyErr_FormatFromCause( + PyExc_TypeError, + "'async for' received an invalid object " + "from __anext__: %.100s", + Py_TYPE(next_iter)->tp_name); + + Py_DECREF(next_iter); + goto error; + } else { + Py_DECREF(next_iter); + } + } + + PUSH(awaitable); + PREDICT(LOAD_CONST); + DISPATCH(); + } + + TARGET(GET_AWAITABLE) { + PREDICTED(GET_AWAITABLE); + PyObject *iterable = TOP(); + PyObject *iter = _PyCoro_GetAwaitableIter(iterable); + + if (iter == NULL) { + format_awaitable_error(tstate, Py_TYPE(iterable), oparg); + } + + Py_DECREF(iterable); + + if (iter != NULL && PyCoro_CheckExact(iter)) { + PyObject *yf = _PyGen_yf((PyGenObject*)iter); + if (yf != NULL) { + /* `iter` is a coroutine object that is being + awaited, `yf` is a pointer to the current awaitable + being awaited on. */ + Py_DECREF(yf); + Py_CLEAR(iter); + _PyErr_SetString(tstate, PyExc_RuntimeError, + "coroutine is being awaited already"); + /* The code below jumps to `error` if `iter` is NULL. */ + } + } + + SET_TOP(iter); /* Even if it's NULL */ + + if (iter == NULL) { + goto error; + } + + PREDICT(LOAD_CONST); + DISPATCH(); + } + + TARGET(SEND) { + assert(frame != &entry_frame); + assert(STACK_LEVEL() >= 2); + PyObject *v = POP(); + PyObject *receiver = TOP(); + PySendResult gen_status; + PyObject *retval; + if (tstate->c_tracefunc == NULL) { + gen_status = PyIter_Send(receiver, v, &retval); + } else { + if (Py_IsNone(v) && PyIter_Check(receiver)) { + retval = Py_TYPE(receiver)->tp_iternext(receiver); + } + else { + retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v); + } + if (retval == NULL) { + if (tstate->c_tracefunc != NULL + && _PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) + call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + if (_PyGen_FetchStopIterationValue(&retval) == 0) { + gen_status = PYGEN_RETURN; + } + else { + gen_status = PYGEN_ERROR; + } + } + else { + gen_status = PYGEN_NEXT; + } + } + Py_DECREF(v); + if (gen_status == PYGEN_ERROR) { + assert(retval == NULL); + goto error; + } + if (gen_status == PYGEN_RETURN) { + assert(retval != NULL); + Py_DECREF(receiver); + SET_TOP(retval); + JUMPBY(oparg); + } + else { + assert(gen_status == PYGEN_NEXT); + assert(retval != NULL); + PUSH(retval); + } + DISPATCH(); + } + + TARGET(ASYNC_GEN_WRAP) { + PyObject *v = TOP(); + assert(frame->f_code->co_flags & CO_ASYNC_GENERATOR); + PyObject *w = _PyAsyncGenValueWrapperNew(v); + if (w == NULL) { + goto error; + } + SET_TOP(w); + Py_DECREF(v); + DISPATCH(); + } + + TARGET(YIELD_VALUE) { + // NOTE: It's important that YIELD_VALUE never raises an exception! + // The compiler treats any exception raised here as a failed close() + // or throw() call. + assert(oparg == STACK_LEVEL()); + assert(frame != &entry_frame); + PyObject *retval = POP(); + PyGenObject *gen = _PyFrame_GetGenerator(frame); + gen->gi_frame_state = FRAME_SUSPENDED; + _PyFrame_SetStackPointer(frame, stack_pointer); + TRACE_FUNCTION_EXIT(); + DTRACE_FUNCTION_EXIT(); + tstate->exc_info = gen->gi_exc_state.previous_item; + gen->gi_exc_state.previous_item = NULL; + _Py_LeaveRecursiveCallPy(tstate); + _PyInterpreterFrame *gen_frame = frame; + frame = cframe.current_frame = frame->previous; + gen_frame->previous = NULL; + frame->prev_instr -= frame->yield_offset; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + TARGET(POP_EXCEPT) { + _PyErr_StackItem *exc_info = tstate->exc_info; + PyObject *value = exc_info->exc_value; + exc_info->exc_value = POP(); + Py_XDECREF(value); + DISPATCH(); + } + + TARGET(RERAISE) { + if (oparg) { + PyObject *lasti = PEEK(oparg + 1); + if (PyLong_Check(lasti)) { + frame->prev_instr = _PyCode_CODE(frame->f_code) + PyLong_AsLong(lasti); + assert(!_PyErr_Occurred(tstate)); + } + else { + assert(PyLong_Check(lasti)); + _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); + goto error; + } + } + PyObject *val = POP(); + assert(val && PyExceptionInstance_Check(val)); + PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); + PyObject *tb = PyException_GetTraceback(val); + _PyErr_Restore(tstate, exc, val, tb); + goto exception_unwind; + } + + TARGET(PREP_RERAISE_STAR) { + PyObject *excs = POP(); + assert(PyList_Check(excs)); + PyObject *orig = POP(); + + PyObject *val = _PyExc_PrepReraiseStar(orig, excs); + Py_DECREF(excs); + Py_DECREF(orig); + + if (val == NULL) { + goto error; + } + + PUSH(val); + DISPATCH(); + } + + TARGET(END_ASYNC_FOR) { + PyObject *val = POP(); + assert(val && PyExceptionInstance_Check(val)); + if (PyErr_GivenExceptionMatches(val, PyExc_StopAsyncIteration)) { + Py_DECREF(val); + Py_DECREF(POP()); + } + else { + PyObject *exc = Py_NewRef(PyExceptionInstance_Class(val)); + PyObject *tb = PyException_GetTraceback(val); + _PyErr_Restore(tstate, exc, val, tb); + goto exception_unwind; + } + DISPATCH(); + } + + TARGET(CLEANUP_THROW) { + assert(throwflag); + PyObject *exc_value = TOP(); + assert(exc_value && PyExceptionInstance_Check(exc_value)); + if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) { + PyObject *value = ((PyStopIterationObject *)exc_value)->value; + Py_INCREF(value); + Py_DECREF(POP()); // The StopIteration. + Py_DECREF(POP()); // The last sent value. + Py_DECREF(POP()); // The delegated sub-iterator. + PUSH(value); + } + else { + PyObject *exc_type = Py_NewRef(Py_TYPE(exc_value)); + PyObject *exc_traceback = PyException_GetTraceback(exc_value); + _PyErr_Restore(tstate, exc_type, Py_NewRef(exc_value), exc_traceback); + goto exception_unwind; + } + DISPATCH(); + } + + TARGET(STOPITERATION_ERROR) { + assert(frame->owner == FRAME_OWNED_BY_GENERATOR); + PyObject *exc = TOP(); + assert(PyExceptionInstance_Check(exc)); + const char *msg = NULL; + if (PyErr_GivenExceptionMatches(exc, PyExc_StopIteration)) { + msg = "generator raised StopIteration"; + if (frame->f_code->co_flags & CO_ASYNC_GENERATOR) { + msg = "async generator raised StopIteration"; + } + else if (frame->f_code->co_flags & CO_COROUTINE) { + msg = "coroutine raised StopIteration"; + } + } + else if ((frame->f_code->co_flags & CO_ASYNC_GENERATOR) && + PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) + { + /* code in `gen` raised a StopAsyncIteration error: + raise a RuntimeError. + */ + msg = "async generator raised StopAsyncIteration"; + } + if (msg != NULL) { + PyObject *message = _PyUnicode_FromASCII(msg, strlen(msg)); + if (message == NULL) { + goto error; + } + PyObject *error = PyObject_CallOneArg(PyExc_RuntimeError, message); + if (error == NULL) { + Py_DECREF(message); + goto error; + } + assert(PyExceptionInstance_Check(error)); + SET_TOP(error); + PyException_SetCause(error, Py_NewRef(exc)); + // Steal exc reference, rather than Py_NewRef+Py_DECREF + PyException_SetContext(error, exc); + Py_DECREF(message); + } + DISPATCH(); + } + + TARGET(LOAD_ASSERTION_ERROR) { + PyObject *value = PyExc_AssertionError; + PUSH(Py_NewRef(value)); + DISPATCH(); + } + + TARGET(LOAD_BUILD_CLASS) { + PyObject *bc; + if (PyDict_CheckExact(BUILTINS())) { + bc = _PyDict_GetItemWithError(BUILTINS(), + &_Py_ID(__build_class__)); + if (bc == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_NameError, + "__build_class__ not found"); + } + goto error; + } + Py_INCREF(bc); + } + else { + bc = PyObject_GetItem(BUILTINS(), &_Py_ID(__build_class__)); + if (bc == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) + _PyErr_SetString(tstate, PyExc_NameError, + "__build_class__ not found"); + goto error; + } + } + PUSH(bc); + DISPATCH(); + } + + TARGET(STORE_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *v = POP(); + PyObject *ns = LOCALS(); + int err; + if (ns == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals found when storing %R", name); + Py_DECREF(v); + goto error; + } + if (PyDict_CheckExact(ns)) + err = PyDict_SetItem(ns, name, v); + else + err = PyObject_SetItem(ns, name, v); + Py_DECREF(v); + if (err != 0) + goto error; + DISPATCH(); + } + + TARGET(DELETE_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *ns = LOCALS(); + int err; + if (ns == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals when deleting %R", name); + goto error; + } + err = PyObject_DelItem(ns, name); + // Can't use ERROR_IF here. + if (err != 0) { + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, + name); + goto error; + } + DISPATCH(); + } + + TARGET(UNPACK_SEQUENCE) { + PREDICTED(UNPACK_SEQUENCE); + _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *seq = TOP(); + next_instr--; + _Py_Specialize_UnpackSequence(seq, next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(UNPACK_SEQUENCE, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *seq = POP(); + PyObject **top = stack_pointer + oparg; + if (!unpack_iterable(tstate, seq, oparg, -1, top)) { + Py_DECREF(seq); + goto error; + } + STACK_GROW(oparg); + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + DISPATCH(); + } + + TARGET(UNPACK_SEQUENCE_TWO_TUPLE) { + PyObject *seq = TOP(); + DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + SET_TOP(Py_NewRef(PyTuple_GET_ITEM(seq, 1))); + PUSH(Py_NewRef(PyTuple_GET_ITEM(seq, 0))); + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + DISPATCH(); + } + + TARGET(UNPACK_SEQUENCE_TUPLE) { + PyObject *seq = TOP(); + DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + STACK_SHRINK(1); + PyObject **items = _PyTuple_ITEMS(seq); + while (oparg--) { + PUSH(Py_NewRef(items[oparg])); + } + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + DISPATCH(); + } + + TARGET(UNPACK_SEQUENCE_LIST) { + PyObject *seq = TOP(); + DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); + DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); + STAT_INC(UNPACK_SEQUENCE, hit); + STACK_SHRINK(1); + PyObject **items = _PyList_ITEMS(seq); + while (oparg--) { + PUSH(Py_NewRef(items[oparg])); + } + Py_DECREF(seq); + JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); + DISPATCH(); + } + + TARGET(UNPACK_EX) { + int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); + PyObject *seq = POP(); + PyObject **top = stack_pointer + totalargs; + if (!unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top)) { + Py_DECREF(seq); + goto error; + } + STACK_GROW(totalargs); + Py_DECREF(seq); + DISPATCH(); + } + + TARGET(STORE_ATTR) { + PREDICTED(STORE_ATTR); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *name = GETITEM(names, oparg); + next_instr--; + _Py_Specialize_StoreAttr(owner, next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(STORE_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *name = GETITEM(names, oparg); + PyObject *owner = TOP(); + PyObject *v = SECOND(); + int err; + STACK_SHRINK(2); + err = PyObject_SetAttr(owner, name, v); + Py_DECREF(v); + Py_DECREF(owner); + if (err != 0) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); + DISPATCH(); + } + + TARGET(DELETE_ATTR) { + PyObject *name = GETITEM(names, oparg); + PyObject *owner = POP(); + int err; + err = PyObject_SetAttr(owner, name, (PyObject *)NULL); + Py_DECREF(owner); + if (err != 0) + goto error; + DISPATCH(); + } + + TARGET(STORE_GLOBAL) { + PyObject *name = GETITEM(names, oparg); + PyObject *v = POP(); + int err; + err = PyDict_SetItem(GLOBALS(), name, v); + Py_DECREF(v); + if (err != 0) + goto error; + DISPATCH(); + } + + TARGET(DELETE_GLOBAL) { + PyObject *name = GETITEM(names, oparg); + int err; + err = PyDict_DelItem(GLOBALS(), name); + // Can't use ERROR_IF here. + if (err != 0) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + DISPATCH(); + } + + TARGET(LOAD_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *locals = LOCALS(); + PyObject *v; + if (locals == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals when loading %R", name); + goto error; + } + if (PyDict_CheckExact(locals)) { + v = PyDict_GetItemWithError(locals, name); + if (v != NULL) { + Py_INCREF(v); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + } + else { + v = PyObject_GetItem(locals, name); + if (v == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) + goto error; + _PyErr_Clear(tstate); + } + } + if (v == NULL) { + v = PyDict_GetItemWithError(GLOBALS(), name); + if (v != NULL) { + Py_INCREF(v); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + else { + if (PyDict_CheckExact(BUILTINS())) { + v = PyDict_GetItemWithError(BUILTINS(), name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + Py_INCREF(v); + } + else { + v = PyObject_GetItem(BUILTINS(), name); + if (v == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + } + } + PUSH(v); + DISPATCH(); + } + + TARGET(LOAD_GLOBAL) { + PREDICTED(LOAD_GLOBAL); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *name = GETITEM(names, oparg>>1); + next_instr--; + _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_GLOBAL, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + int push_null = oparg & 1; + PEEK(0) = NULL; + PyObject *name = GETITEM(names, oparg>>1); + PyObject *v; + if (PyDict_CheckExact(GLOBALS()) + && PyDict_CheckExact(BUILTINS())) + { + v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), + (PyDictObject *)BUILTINS(), + name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + /* _PyDict_LoadGlobal() returns NULL without raising + * an exception if the key doesn't exist */ + format_exc_check_arg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + Py_INCREF(v); + } + else { + /* Slow-path if globals or builtins is not a dict */ + + /* namespace 1: globals */ + v = PyObject_GetItem(GLOBALS(), name); + if (v == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + + /* namespace 2: builtins */ + v = PyObject_GetItem(BUILTINS(), name); + if (v == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + format_exc_check_arg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + goto error; + } + } + } + /* Skip over inline cache */ + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STACK_GROW(push_null); + PUSH(v); + DISPATCH(); + } + + TARGET(LOAD_GLOBAL_MODULE) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); + PyDictObject *dict = (PyDictObject *)GLOBALS(); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + uint32_t version = read_u32(cache->module_keys_version); + DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(dict->ma_keys)); + PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); + PyObject *res = entries[cache->index].me_value; + DEOPT_IF(res == NULL, LOAD_GLOBAL); + int push_null = oparg & 1; + PEEK(0) = NULL; + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STAT_INC(LOAD_GLOBAL, hit); + STACK_GROW(push_null+1); + SET_TOP(Py_NewRef(res)); + DISPATCH(); + } + + TARGET(LOAD_GLOBAL_BUILTIN) { + assert(cframe.use_tracing == 0); + DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); + DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL); + PyDictObject *mdict = (PyDictObject *)GLOBALS(); + PyDictObject *bdict = (PyDictObject *)BUILTINS(); + _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; + uint32_t mod_version = read_u32(cache->module_keys_version); + uint16_t bltn_version = cache->builtin_keys_version; + DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL); + DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(bdict->ma_keys)); + PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); + PyObject *res = entries[cache->index].me_value; + DEOPT_IF(res == NULL, LOAD_GLOBAL); + int push_null = oparg & 1; + PEEK(0) = NULL; + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_GLOBAL); + STAT_INC(LOAD_GLOBAL, hit); + STACK_GROW(push_null+1); + SET_TOP(Py_NewRef(res)); + DISPATCH(); + } + + TARGET(DELETE_FAST) { + PyObject *v = GETLOCAL(oparg); + if (v == NULL) goto unbound_local_error; + SETLOCAL(oparg, NULL); + DISPATCH(); + } + + TARGET(MAKE_CELL) { + // "initial" is probably NULL but not if it's an arg (or set + // via PyFrame_LocalsToFast() before MAKE_CELL has run). + PyObject *initial = GETLOCAL(oparg); + PyObject *cell = PyCell_New(initial); + if (cell == NULL) { + goto resume_with_error; + } + SETLOCAL(oparg, cell); + DISPATCH(); + } + + TARGET(DELETE_DEREF) { + PyObject *cell = GETLOCAL(oparg); + PyObject *oldobj = PyCell_GET(cell); + // Can't use ERROR_IF here. + // Fortunately we don't need its superpower. + if (oldobj == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + PyCell_SET(cell, NULL); + Py_DECREF(oldobj); + DISPATCH(); + } + + TARGET(LOAD_CLASSDEREF) { + PyObject *name, *value, *locals = LOCALS(); + assert(locals); + assert(oparg >= 0 && oparg < frame->f_code->co_nlocalsplus); + name = PyTuple_GET_ITEM(frame->f_code->co_localsplusnames, oparg); + if (PyDict_CheckExact(locals)) { + value = PyDict_GetItemWithError(locals, name); + if (value != NULL) { + Py_INCREF(value); + } + else if (_PyErr_Occurred(tstate)) { + goto error; + } + } + else { + value = PyObject_GetItem(locals, name); + if (value == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + } + } + if (!value) { + PyObject *cell = GETLOCAL(oparg); + value = PyCell_GET(cell); + if (value == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + Py_INCREF(value); + } + PUSH(value); + DISPATCH(); + } + + TARGET(LOAD_DEREF) { + PyObject *cell = GETLOCAL(oparg); + PyObject *value = PyCell_GET(cell); + if (value == NULL) { + format_exc_unbound(tstate, frame->f_code, oparg); + goto error; + } + PUSH(Py_NewRef(value)); + DISPATCH(); + } + + TARGET(STORE_DEREF) { + PyObject *v = POP(); + PyObject *cell = GETLOCAL(oparg); + PyObject *oldobj = PyCell_GET(cell); + PyCell_SET(cell, v); + Py_XDECREF(oldobj); + DISPATCH(); + } + + TARGET(COPY_FREE_VARS) { + /* Copy closure variables to free variables */ + PyCodeObject *co = frame->f_code; + assert(PyFunction_Check(frame->f_funcobj)); + PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + int offset = co->co_nlocals + co->co_nplaincellvars; + assert(oparg == co->co_nfreevars); + for (int i = 0; i < oparg; ++i) { + PyObject *o = PyTuple_GET_ITEM(closure, i); + frame->localsplus[offset + i] = Py_NewRef(o); + } + DISPATCH(); + } + + TARGET(BUILD_STRING) { + PyObject *str; + str = _PyUnicode_JoinArray(&_Py_STR(empty), + stack_pointer - oparg, oparg); + if (str == NULL) + goto error; + while (--oparg >= 0) { + PyObject *item = POP(); + Py_DECREF(item); + } + PUSH(str); + DISPATCH(); + } + + TARGET(BUILD_TUPLE) { + STACK_SHRINK(oparg); + PyObject *tup = _PyTuple_FromArraySteal(stack_pointer, oparg); + if (tup == NULL) + goto error; + PUSH(tup); + DISPATCH(); + } + + TARGET(BUILD_LIST) { + PyObject *list = PyList_New(oparg); + if (list == NULL) + goto error; + while (--oparg >= 0) { + PyObject *item = POP(); + PyList_SET_ITEM(list, oparg, item); + } + PUSH(list); + DISPATCH(); + } + + TARGET(LIST_TO_TUPLE) { + PyObject *list = POP(); + PyObject *tuple = PyList_AsTuple(list); + Py_DECREF(list); + if (tuple == NULL) { + goto error; + } + PUSH(tuple); + DISPATCH(); + } + + TARGET(LIST_EXTEND) { + PyObject *iterable = POP(); + PyObject *list = PEEK(oparg); + PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); + if (none_val == NULL) { + if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && + (Py_TYPE(iterable)->tp_iter == NULL && !PySequence_Check(iterable))) + { + _PyErr_Clear(tstate); + _PyErr_Format(tstate, PyExc_TypeError, + "Value after * must be an iterable, not %.200s", + Py_TYPE(iterable)->tp_name); + } + Py_DECREF(iterable); + goto error; + } + Py_DECREF(none_val); + Py_DECREF(iterable); + DISPATCH(); + } + + TARGET(SET_UPDATE) { + PyObject *iterable = POP(); + PyObject *set = PEEK(oparg); + int err = _PySet_Update(set, iterable); + Py_DECREF(iterable); + if (err < 0) { + goto error; + } + DISPATCH(); + } + + TARGET(BUILD_SET) { + PyObject *set = PySet_New(NULL); + int err = 0; + int i; + if (set == NULL) + goto error; + for (i = oparg; i > 0; i--) { + PyObject *item = PEEK(i); + if (err == 0) + err = PySet_Add(set, item); + Py_DECREF(item); + } + STACK_SHRINK(oparg); + if (err != 0) { + Py_DECREF(set); + goto error; + } + PUSH(set); + DISPATCH(); + } + + TARGET(BUILD_MAP) { + PyObject *map = _PyDict_FromItems( + &PEEK(2*oparg), 2, + &PEEK(2*oparg - 1), 2, + oparg); + if (map == NULL) + goto error; + + while (oparg--) { + Py_DECREF(POP()); + Py_DECREF(POP()); + } + PUSH(map); + DISPATCH(); + } + + TARGET(SETUP_ANNOTATIONS) { + int err; + PyObject *ann_dict; + if (LOCALS() == NULL) { + _PyErr_Format(tstate, PyExc_SystemError, + "no locals found when setting up annotations"); + goto error; + } + /* check if __annotations__ in locals()... */ + if (PyDict_CheckExact(LOCALS())) { + ann_dict = _PyDict_GetItemWithError(LOCALS(), + &_Py_ID(__annotations__)); + if (ann_dict == NULL) { + if (_PyErr_Occurred(tstate)) { + goto error; + } + /* ...if not, create a new one */ + ann_dict = PyDict_New(); + if (ann_dict == NULL) { + goto error; + } + err = PyDict_SetItem(LOCALS(), &_Py_ID(__annotations__), + ann_dict); + Py_DECREF(ann_dict); + if (err != 0) { + goto error; + } + } + } + else { + /* do the same if locals() is not a dict */ + ann_dict = PyObject_GetItem(LOCALS(), &_Py_ID(__annotations__)); + if (ann_dict == NULL) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { + goto error; + } + _PyErr_Clear(tstate); + ann_dict = PyDict_New(); + if (ann_dict == NULL) { + goto error; + } + err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), + ann_dict); + Py_DECREF(ann_dict); + if (err != 0) { + goto error; + } + } + else { + Py_DECREF(ann_dict); + } + } + DISPATCH(); + } + + TARGET(BUILD_CONST_KEY_MAP) { + PyObject *map; + PyObject *keys = TOP(); + if (!PyTuple_CheckExact(keys) || + PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { + _PyErr_SetString(tstate, PyExc_SystemError, + "bad BUILD_CONST_KEY_MAP keys argument"); + goto error; + } + map = _PyDict_FromItems( + &PyTuple_GET_ITEM(keys, 0), 1, + &PEEK(oparg + 1), 1, oparg); + if (map == NULL) { + goto error; + } + + Py_DECREF(POP()); + while (oparg--) { + Py_DECREF(POP()); + } + PUSH(map); + DISPATCH(); + } + + TARGET(DICT_UPDATE) { + PyObject *update = POP(); + PyObject *dict = PEEK(oparg); + if (PyDict_Update(dict, update) < 0) { + if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object is not a mapping", + Py_TYPE(update)->tp_name); + } + Py_DECREF(update); + goto error; + } + Py_DECREF(update); + DISPATCH(); + } + + TARGET(DICT_MERGE) { + PyObject *update = POP(); + PyObject *dict = PEEK(oparg); + + if (_PyDict_MergeEx(dict, update, 2) < 0) { + format_kwargs_error(tstate, PEEK(2 + oparg), update); + Py_DECREF(update); + goto error; + } + Py_DECREF(update); + PREDICT(CALL_FUNCTION_EX); + DISPATCH(); + } + + TARGET(MAP_ADD) { + PyObject *value = TOP(); + PyObject *key = SECOND(); + PyObject *map; + STACK_SHRINK(2); + map = PEEK(oparg); /* dict */ + assert(PyDict_CheckExact(map)); + /* map[key] = value */ + if (_PyDict_SetItem_Take2((PyDictObject *)map, key, value) != 0) { + goto error; + } + PREDICT(JUMP_BACKWARD); + DISPATCH(); + } + + TARGET(LOAD_ATTR) { + PREDICTED(LOAD_ATTR); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *name = GETITEM(names, oparg>>1); + next_instr--; + _Py_Specialize_LoadAttr(owner, next_instr, name); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + PyObject *name = GETITEM(names, oparg >> 1); + PyObject *owner = TOP(); + if (oparg & 1) { + /* Designed to work in tandem with CALL. */ + PyObject* meth = NULL; + + int meth_found = _PyObject_GetMethod(owner, name, &meth); + + if (meth == NULL) { + /* Most likely attribute wasn't found. */ + goto error; + } + + if (meth_found) { + /* We can bypass temporary bound method object. + meth is unbound method and obj is self. + + meth | self | arg1 | ... | argN + */ + SET_TOP(meth); + PUSH(owner); // self + } + else { + /* meth is not an unbound method (but a regular attr, or + something was returned by a descriptor protocol). Set + the second element of the stack to NULL, to signal + CALL that it's not a method call. + + NULL | meth | arg1 | ... | argN + */ + SET_TOP(NULL); + Py_DECREF(owner); + PUSH(meth); + } + } + else { + PyObject *res = PyObject_GetAttr(owner, name); + if (res == NULL) { + goto error; + } + Py_DECREF(owner); + SET_TOP(res); + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_INSTANCE_VALUE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + assert(tp->tp_dictoffset < 0); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + res = _PyDictOrValues_GetValues(dorv)->values[cache->index]; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_MODULE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); + PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; + assert(dict != NULL); + DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->version), + LOAD_ATTR); + assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); + assert(cache->index < dict->ma_keys->dk_nentries); + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + cache->index; + res = ep->me_value; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_WITH_HINT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); + DEOPT_IF(dict == NULL, LOAD_ATTR); + assert(PyDict_CheckExact((PyObject *)dict)); + PyObject *name = GETITEM(names, oparg>>1); + uint16_t hint = cache->index; + DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR); + if (DK_IS_UNICODE(dict->ma_keys)) { + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, LOAD_ATTR); + res = ep->me_value; + } + else { + PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, LOAD_ATTR); + res = ep->me_value; + } + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_SLOT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyObject *res; + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + char *addr = (char *)owner + cache->index; + res = *(PyObject **)addr; + DEOPT_IF(res == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_CLASS) { + assert(cframe.use_tracing == 0); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + PyObject *cls = TOP(); + DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, + LOAD_ATTR); + assert(type_version != 0); + + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + Py_INCREF(res); + SET_TOP(NULL); + STACK_GROW((oparg & 1)); + SET_TOP(res); + Py_DECREF(cls); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_PROPERTY) { + assert(cframe.use_tracing == 0); + DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + PyObject *owner = TOP(); + PyTypeObject *cls = Py_TYPE(owner); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); + assert(type_version != 0); + PyObject *fget = read_obj(cache->descr); + assert(Py_IS_TYPE(fget, &PyFunction_Type)); + PyFunctionObject *f = (PyFunctionObject *)fget; + uint32_t func_version = read_u32(cache->keys_version); + assert(func_version != 0); + DEOPT_IF(f->func_version != func_version, LOAD_ATTR); + PyCodeObject *code = (PyCodeObject *)f->func_code; + assert(code->co_argcount == 1); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(fget); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); + SET_TOP(NULL); + int shrink_stack = !(oparg & 1); + STACK_SHRINK(shrink_stack); + new_frame->localsplus[0] = owner; + for (int i = 1; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH_INLINED(new_frame); + } + + TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { + assert(cframe.use_tracing == 0); + DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + PyObject *owner = TOP(); + PyTypeObject *cls = Py_TYPE(owner); + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); + assert(type_version != 0); + PyObject *getattribute = read_obj(cache->descr); + assert(Py_IS_TYPE(getattribute, &PyFunction_Type)); + PyFunctionObject *f = (PyFunctionObject *)getattribute; + uint32_t func_version = read_u32(cache->keys_version); + assert(func_version != 0); + DEOPT_IF(f->func_version != func_version, LOAD_ATTR); + PyCodeObject *code = (PyCodeObject *)f->func_code; + assert(code->co_argcount == 2); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + + PyObject *name = GETITEM(names, oparg >> 1); + Py_INCREF(f); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f); + SET_TOP(NULL); + int shrink_stack = !(oparg & 1); + STACK_SHRINK(shrink_stack); + new_frame->localsplus[0] = owner; + new_frame->localsplus[1] = Py_NewRef(name); + for (int i = 2; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH_INLINED(new_frame); + } + + TARGET(STORE_ATTR_INSTANCE_VALUE) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); + STAT_INC(STORE_ATTR, hit); + Py_ssize_t index = cache->index; + STACK_SHRINK(1); + PyObject *value = POP(); + PyDictValues *values = _PyDictOrValues_GetValues(dorv); + PyObject *old_value = values->values[index]; + values->values[index] = value; + if (old_value == NULL) { + _PyDictValues_AddToInsertionOrder(values, index); + } + else { + Py_DECREF(old_value); + } + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); + DISPATCH(); + } + + TARGET(STORE_ATTR_WITH_HINT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(_PyDictOrValues_IsValues(dorv), STORE_ATTR); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); + DEOPT_IF(dict == NULL, STORE_ATTR); + assert(PyDict_CheckExact((PyObject *)dict)); + PyObject *name = GETITEM(names, oparg); + uint16_t hint = cache->index; + DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR); + PyObject *value, *old_value; + uint64_t new_version; + if (DK_IS_UNICODE(dict->ma_keys)) { + PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, STORE_ATTR); + old_value = ep->me_value; + DEOPT_IF(old_value == NULL, STORE_ATTR); + STACK_SHRINK(1); + value = POP(); + new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); + ep->me_value = value; + } + else { + PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; + DEOPT_IF(ep->me_key != name, STORE_ATTR); + old_value = ep->me_value; + DEOPT_IF(old_value == NULL, STORE_ATTR); + STACK_SHRINK(1); + value = POP(); + new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value); + ep->me_value = value; + } + Py_DECREF(old_value); + STAT_INC(STORE_ATTR, hit); + /* Ensure dict is GC tracked if it needs to be */ + if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { + _PyObject_GC_TRACK(dict); + } + /* PEP 509 */ + dict->ma_version_tag = new_version; + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); + DISPATCH(); + } + + TARGET(STORE_ATTR_SLOT) { + assert(cframe.use_tracing == 0); + PyObject *owner = TOP(); + PyTypeObject *tp = Py_TYPE(owner); + _PyAttrCache *cache = (_PyAttrCache *)next_instr; + uint32_t type_version = read_u32(cache->version); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + char *addr = (char *)owner + cache->index; + STAT_INC(STORE_ATTR, hit); + STACK_SHRINK(1); + PyObject *value = POP(); + PyObject *old_value = *(PyObject **)addr; + *(PyObject **)addr = value; + Py_XDECREF(old_value); + Py_DECREF(owner); + JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR); + DISPATCH(); + } + + TARGET(COMPARE_OP) { + PREDICTED(COMPARE_OP); + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + PyObject *right = TOP(); + PyObject *left = SECOND(); + next_instr--; + _Py_Specialize_CompareOp(left, right, next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(COMPARE_OP, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + assert(oparg <= Py_GE); + PyObject *right = POP(); + PyObject *left = TOP(); + PyObject *res = PyObject_RichCompare(left, right, oparg); + SET_TOP(res); + Py_DECREF(left); + Py_DECREF(right); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); + DISPATCH(); + } + + TARGET(COMPARE_OP_FLOAT_JUMP) { + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false) + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + int when_to_jump_mask = cache->mask; + PyObject *right = TOP(); + PyObject *left = SECOND(); + DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); + double dleft = PyFloat_AS_DOUBLE(left); + double dright = PyFloat_AS_DOUBLE(right); + int sign = (dleft > dright) - (dleft < dright); + DEOPT_IF(isnan(dleft), COMPARE_OP); + DEOPT_IF(isnan(dright), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); + NEXTOPARG(); + STACK_SHRINK(2); + _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + int jump = (1 << (sign + 1)) & when_to_jump_mask; + if (!jump) { + next_instr++; + } + else { + JUMPBY(1 + oparg); + } + DISPATCH(); + } + + TARGET(COMPARE_OP_INT_JUMP) { + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false) + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + int when_to_jump_mask = cache->mask; + PyObject *right = TOP(); + PyObject *left = SECOND(); + DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); + DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP); + DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1); + Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0]; + Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0]; + int sign = (ileft > iright) - (ileft < iright); + JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); + NEXTOPARG(); + STACK_SHRINK(2); + _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); + _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + int jump = (1 << (sign + 1)) & when_to_jump_mask; + if (!jump) { + next_instr++; + } + else { + JUMPBY(1 + oparg); + } + DISPATCH(); + } + + TARGET(COMPARE_OP_STR_JUMP) { + assert(cframe.use_tracing == 0); + // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false) + _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; + int invert = cache->mask; + PyObject *right = TOP(); + PyObject *left = SECOND(); + DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + int res = _PyUnicode_Equal(left, right); + assert(oparg == Py_EQ || oparg == Py_NE); + JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP); + NEXTOPARG(); + assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE); + STACK_SHRINK(2); + _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); + _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); + assert(res == 0 || res == 1); + assert(invert == 0 || invert == 1); + int jump = res ^ invert; + if (!jump) { + next_instr++; + } + else { + JUMPBY(1 + oparg); + } + DISPATCH(); + } + + TARGET(IS_OP) { + PyObject *right = POP(); + PyObject *left = TOP(); + int res = Py_Is(left, right) ^ oparg; + PyObject *b = res ? Py_True : Py_False; + SET_TOP(Py_NewRef(b)); + Py_DECREF(left); + Py_DECREF(right); + DISPATCH(); + } + + TARGET(CONTAINS_OP) { + PyObject *right = POP(); + PyObject *left = POP(); + int res = PySequence_Contains(right, left); + Py_DECREF(left); + Py_DECREF(right); + if (res < 0) { + goto error; + } + PyObject *b = (res^oparg) ? Py_True : Py_False; + PUSH(Py_NewRef(b)); + DISPATCH(); + } + + TARGET(CHECK_EG_MATCH) { + PyObject *match_type = POP(); + if (check_except_star_type_valid(tstate, match_type) < 0) { + Py_DECREF(match_type); + goto error; + } + + PyObject *exc_value = TOP(); + PyObject *match = NULL, *rest = NULL; + int res = exception_group_match(exc_value, match_type, + &match, &rest); + Py_DECREF(match_type); + if (res < 0) { + goto error; + } + + if (match == NULL || rest == NULL) { + assert(match == NULL); + assert(rest == NULL); + goto error; + } + if (Py_IsNone(match)) { + PUSH(match); + Py_XDECREF(rest); + } + else { + /* Total or partial match - update the stack from + * [val] + * to + * [rest, match] + * (rest can be Py_None) + */ + + SET_TOP(rest); + PUSH(match); + PyErr_SetExcInfo(NULL, Py_NewRef(match), NULL); + Py_DECREF(exc_value); + } + DISPATCH(); + } + + TARGET(CHECK_EXC_MATCH) { + PyObject *right = POP(); + PyObject *left = TOP(); + assert(PyExceptionInstance_Check(left)); + if (check_except_type_valid(tstate, right) < 0) { + Py_DECREF(right); + goto error; + } + + int res = PyErr_GivenExceptionMatches(left, right); + Py_DECREF(right); + PUSH(Py_NewRef(res ? Py_True : Py_False)); + DISPATCH(); + } + + TARGET(IMPORT_NAME) { + PyObject *name = GETITEM(names, oparg); + PyObject *fromlist = POP(); + PyObject *level = TOP(); + PyObject *res; + res = import_name(tstate, frame, name, fromlist, level); + Py_DECREF(level); + Py_DECREF(fromlist); + SET_TOP(res); + if (res == NULL) + goto error; + DISPATCH(); + } + + TARGET(IMPORT_STAR) { + PyObject *from = POP(), *locals; + int err; + if (_PyFrame_FastToLocalsWithError(frame) < 0) { + Py_DECREF(from); + goto error; + } + + locals = LOCALS(); + if (locals == NULL) { + _PyErr_SetString(tstate, PyExc_SystemError, + "no locals found during 'import *'"); + Py_DECREF(from); + goto error; + } + err = import_all_from(tstate, locals, from); + _PyFrame_LocalsToFast(frame, 0); + Py_DECREF(from); + if (err != 0) + goto error; + DISPATCH(); + } + + TARGET(IMPORT_FROM) { + PyObject *name = GETITEM(names, oparg); + PyObject *from = TOP(); + PyObject *res; + res = import_from(tstate, from, name); + PUSH(res); + if (res == NULL) + goto error; + DISPATCH(); + } + + TARGET(JUMP_FORWARD) { + JUMPBY(oparg); + DISPATCH(); + } + + TARGET(JUMP_BACKWARD) { + PREDICTED(JUMP_BACKWARD); + assert(oparg < INSTR_OFFSET()); + JUMPBY(-oparg); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(POP_JUMP_IF_FALSE) { + PREDICTED(POP_JUMP_IF_FALSE); + PyObject *cond = POP(); + if (Py_IsTrue(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsFalse(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + JUMPBY(oparg); + } + else { + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err > 0) + ; + else if (err == 0) { + JUMPBY(oparg); + } + else + goto error; + } + DISPATCH(); + } + + TARGET(POP_JUMP_IF_TRUE) { + PyObject *cond = POP(); + if (Py_IsFalse(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsTrue(cond)) { + _Py_DECREF_NO_DEALLOC(cond); + JUMPBY(oparg); + } + else { + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err > 0) { + JUMPBY(oparg); + } + else if (err == 0) + ; + else + goto error; + } + DISPATCH(); + } + + TARGET(POP_JUMP_IF_NOT_NONE) { + PyObject *value = POP(); + if (!Py_IsNone(value)) { + JUMPBY(oparg); + } + Py_DECREF(value); + DISPATCH(); + } + + TARGET(POP_JUMP_IF_NONE) { + PyObject *value = POP(); + if (Py_IsNone(value)) { + _Py_DECREF_NO_DEALLOC(value); + JUMPBY(oparg); + } + else { + Py_DECREF(value); + } + DISPATCH(); + } + + TARGET(JUMP_IF_FALSE_OR_POP) { + PyObject *cond = TOP(); + int err; + if (Py_IsTrue(cond)) { + STACK_SHRINK(1); + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsFalse(cond)) { + JUMPBY(oparg); + } + else { + err = PyObject_IsTrue(cond); + if (err > 0) { + STACK_SHRINK(1); + Py_DECREF(cond); + } + else if (err == 0) { + JUMPBY(oparg); + } + else { + goto error; + } + } + DISPATCH(); + } + + TARGET(JUMP_IF_TRUE_OR_POP) { + PyObject *cond = TOP(); + int err; + if (Py_IsFalse(cond)) { + STACK_SHRINK(1); + _Py_DECREF_NO_DEALLOC(cond); + } + else if (Py_IsTrue(cond)) { + JUMPBY(oparg); + } + else { + err = PyObject_IsTrue(cond); + if (err > 0) { + JUMPBY(oparg); + } + else if (err == 0) { + STACK_SHRINK(1); + Py_DECREF(cond); + } + else { + goto error; + } + } + DISPATCH(); + } + + TARGET(JUMP_BACKWARD_NO_INTERRUPT) { + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ + JUMPBY(-oparg); + DISPATCH(); + } + + TARGET(GET_LEN) { + // PUSH(len(TOS)) + Py_ssize_t len_i = PyObject_Length(TOP()); + if (len_i < 0) { + goto error; + } + PyObject *len_o = PyLong_FromSsize_t(len_i); + if (len_o == NULL) { + goto error; + } + PUSH(len_o); + DISPATCH(); + } + + TARGET(MATCH_CLASS) { + // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or + // None on failure. + PyObject *names = POP(); + PyObject *type = POP(); + PyObject *subject = TOP(); + assert(PyTuple_CheckExact(names)); + PyObject *attrs = match_class(tstate, subject, type, oparg, names); + Py_DECREF(names); + Py_DECREF(type); + if (attrs) { + // Success! + assert(PyTuple_CheckExact(attrs)); + SET_TOP(attrs); + } + else if (_PyErr_Occurred(tstate)) { + // Error! + goto error; + } + else { + // Failure! + SET_TOP(Py_NewRef(Py_None)); + } + Py_DECREF(subject); + DISPATCH(); + } + + TARGET(MATCH_MAPPING) { + PyObject *subject = TOP(); + int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; + PyObject *res = match ? Py_True : Py_False; + PUSH(Py_NewRef(res)); + PREDICT(POP_JUMP_IF_FALSE); + DISPATCH(); + } + + TARGET(MATCH_SEQUENCE) { + PyObject *subject = TOP(); + int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; + PyObject *res = match ? Py_True : Py_False; + PUSH(Py_NewRef(res)); + PREDICT(POP_JUMP_IF_FALSE); + DISPATCH(); + } + + TARGET(MATCH_KEYS) { + // On successful match, PUSH(values). Otherwise, PUSH(None). + PyObject *keys = TOP(); + PyObject *subject = SECOND(); + PyObject *values_or_none = match_keys(tstate, subject, keys); + if (values_or_none == NULL) { + goto error; + } + PUSH(values_or_none); + DISPATCH(); + } + + TARGET(GET_ITER) { + /* before: [obj]; after [getiter(obj)] */ + PyObject *iterable = TOP(); + PyObject *iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + SET_TOP(iter); + if (iter == NULL) + goto error; + DISPATCH(); + } + + TARGET(GET_YIELD_FROM_ITER) { + /* before: [obj]; after [getiter(obj)] */ + PyObject *iterable = TOP(); + PyObject *iter; + if (PyCoro_CheckExact(iterable)) { + /* `iterable` is a coroutine */ + if (!(frame->f_code->co_flags & (CO_COROUTINE | CO_ITERABLE_COROUTINE))) { + /* and it is used in a 'yield from' expression of a + regular generator. */ + Py_DECREF(iterable); + SET_TOP(NULL); + _PyErr_SetString(tstate, PyExc_TypeError, + "cannot 'yield from' a coroutine object " + "in a non-coroutine generator"); + goto error; + } + } + else if (!PyGen_CheckExact(iterable)) { + /* `iterable` is not a generator. */ + iter = PyObject_GetIter(iterable); + Py_DECREF(iterable); + SET_TOP(iter); + if (iter == NULL) + goto error; + } + PREDICT(LOAD_CONST); + DISPATCH(); + } + + TARGET(FOR_ITER) { + PREDICTED(FOR_ITER); + _PyForIterCache *cache = (_PyForIterCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr--; + _Py_Specialize_ForIter(TOP(), next_instr, oparg); + DISPATCH_SAME_OPARG(); + } + STAT_INC(FOR_ITER, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + /* before: [iter]; after: [iter, iter()] *or* [] */ + PyObject *iter = TOP(); + PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); + if (next != NULL) { + PUSH(next); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + } + else { + if (_PyErr_Occurred(tstate)) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { + goto error; + } + else if (tstate->c_tracefunc != NULL) { + call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + } + _PyErr_Clear(tstate); + } + /* iterator ended normally */ + assert(_Py_OPCODE(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg]) == END_FOR); + STACK_SHRINK(1); + Py_DECREF(iter); + /* Skip END_FOR */ + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1); + } + DISPATCH(); + } + + TARGET(FOR_ITER_LIST) { + assert(cframe.use_tracing == 0); + _PyListIterObject *it = (_PyListIterObject *)TOP(); + DEOPT_IF(Py_TYPE(it) != &PyListIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + PyListObject *seq = it->it_seq; + if (seq) { + if (it->it_index < PyList_GET_SIZE(seq)) { + PyObject *next = PyList_GET_ITEM(seq, it->it_index++); + PUSH(Py_NewRef(next)); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + goto end_for_iter_list; // End of this instruction + } + it->it_seq = NULL; + Py_DECREF(seq); + } + STACK_SHRINK(1); + Py_DECREF(it); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1); + end_for_iter_list: + DISPATCH(); + } + + TARGET(FOR_ITER_RANGE) { + assert(cframe.use_tracing == 0); + _PyRangeIterObject *r = (_PyRangeIterObject *)TOP(); + DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); + STAT_INC(FOR_ITER, hit); + _Py_CODEUNIT next = next_instr[INLINE_CACHE_ENTRIES_FOR_ITER]; + assert(_PyOpcode_Deopt[_Py_OPCODE(next)] == STORE_FAST); + if (r->index >= r->len) { + STACK_SHRINK(1); + Py_DECREF(r); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1); + } + else { + long value = (long)(r->start + + (unsigned long)(r->index++) * r->step); + if (_PyLong_AssignValue(&GETLOCAL(_Py_OPARG(next)), value) < 0) { + goto error; + } + // The STORE_FAST is already done. + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + 1); + } + DISPATCH(); + } + + TARGET(FOR_ITER_GEN) { + assert(cframe.use_tracing == 0); + PyGenObject *gen = (PyGenObject *)TOP(); + DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); + DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER); + STAT_INC(FOR_ITER, hit); + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + frame->yield_offset = oparg; + _PyFrame_StackPush(gen_frame, Py_NewRef(Py_None)); + gen->gi_frame_state = FRAME_EXECUTING; + gen->gi_exc_state.previous_item = tstate->exc_info; + tstate->exc_info = &gen->gi_exc_state; + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg); + assert(_Py_OPCODE(*next_instr) == END_FOR); + DISPATCH_INLINED(gen_frame); + } + + TARGET(BEFORE_ASYNC_WITH) { + PyObject *mgr = TOP(); + PyObject *res; + PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); + if (enter == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "asynchronous context manager protocol", + Py_TYPE(mgr)->tp_name); + } + goto error; + } + PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__)); + if (exit == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "asynchronous context manager protocol " + "(missed __aexit__ method)", + Py_TYPE(mgr)->tp_name); + } + Py_DECREF(enter); + goto error; + } + SET_TOP(exit); + Py_DECREF(mgr); + res = _PyObject_CallNoArgs(enter); + Py_DECREF(enter); + if (res == NULL) + goto error; + PUSH(res); + PREDICT(GET_AWAITABLE); + DISPATCH(); + } + + TARGET(BEFORE_WITH) { + PyObject *mgr = TOP(); + PyObject *res; + PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__enter__)); + if (enter == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "context manager protocol", + Py_TYPE(mgr)->tp_name); + } + goto error; + } + PyObject *exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__)); + if (exit == NULL) { + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object does not support the " + "context manager protocol " + "(missed __exit__ method)", + Py_TYPE(mgr)->tp_name); + } + Py_DECREF(enter); + goto error; + } + SET_TOP(exit); + Py_DECREF(mgr); + res = _PyObject_CallNoArgs(enter); + Py_DECREF(enter); + if (res == NULL) { + goto error; + } + PUSH(res); + DISPATCH(); + } + + TARGET(WITH_EXCEPT_START) { + PyObject *val = PEEK(1); + PyObject *lasti = PEEK(3); + PyObject *exit_func = PEEK(4); + PyObject *res; + /* At the top of the stack are 4 values: + - val: TOP = exc_info() + - unused: SECOND = previous exception + - lasti: THIRD = lasti of exception in exc_info() + - exit_func: FOURTH = the context.__exit__ bound method + We call FOURTH(type(TOP), TOP, GetTraceback(TOP)). + Then we push the __exit__ return value. + */ + PyObject *exc, *tb; + + assert(val && PyExceptionInstance_Check(val)); + exc = PyExceptionInstance_Class(val); + tb = PyException_GetTraceback(val); + Py_XDECREF(tb); + assert(PyLong_Check(lasti)); + (void)lasti; // Shut up compiler warning if asserts are off + PyObject *stack[4] = {NULL, exc, val, tb}; + res = PyObject_Vectorcall(exit_func, stack + 1, + 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); + if (res == NULL) goto error; + STACK_GROW(1); + POKE(1, res); + DISPATCH(); + } + + TARGET(PUSH_EXC_INFO) { + PyObject *value = TOP(); + + _PyErr_StackItem *exc_info = tstate->exc_info; + if (exc_info->exc_value != NULL) { + SET_TOP(exc_info->exc_value); + } + else { + SET_TOP(Py_NewRef(Py_None)); + } + + PUSH(Py_NewRef(value)); + assert(PyExceptionInstance_Check(value)); + exc_info->exc_value = value; + DISPATCH(); + } + + TARGET(LOAD_ATTR_METHOD_WITH_VALUES) { + /* Cached method object */ + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + assert(type_version != 0); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; + DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != + read_u32(cache->keys_version), LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_METHOD_WITH_DICT) { + /* Can be either a managed dict, or a tp_dictoffset offset.*/ + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + + DEOPT_IF(self_cls->tp_version_tag != read_u32(cache->type_version), + LOAD_ATTR); + /* Treat index as a signed 16 bit value */ + Py_ssize_t dictoffset = self_cls->tp_dictoffset; + assert(dictoffset > 0); + PyDictObject **dictptr = (PyDictObject**)(((char *)self)+dictoffset); + PyDictObject *dict = *dictptr; + DEOPT_IF(dict == NULL, LOAD_ATTR); + DEOPT_IF(dict->ma_keys->dk_version != read_u32(cache->keys_version), + LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_METHOD_NO_DICT) { + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(self_cls->tp_dictoffset == 0); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(LOAD_ATTR_METHOD_LAZY_DICT) { + assert(cframe.use_tracing == 0); + PyObject *self = TOP(); + PyTypeObject *self_cls = Py_TYPE(self); + _PyLoadMethodCache *cache = (_PyLoadMethodCache *)next_instr; + uint32_t type_version = read_u32(cache->type_version); + DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); + Py_ssize_t dictoffset = self_cls->tp_dictoffset; + assert(dictoffset > 0); + PyObject *dict = *(PyObject **)((char *)self + dictoffset); + /* This object has a __dict__, just not yet created */ + DEOPT_IF(dict != NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + PyObject *res = read_obj(cache->descr); + assert(res != NULL); + assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)); + SET_TOP(Py_NewRef(res)); + PUSH(self); + JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + DISPATCH(); + } + + TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { + DEOPT_IF(is_method(stack_pointer, oparg), CALL); + PyObject *function = PEEK(oparg + 1); + DEOPT_IF(Py_TYPE(function) != &PyMethod_Type, CALL); + STAT_INC(CALL, hit); + PyObject *self = ((PyMethodObject *)function)->im_self; + PEEK(oparg + 1) = Py_NewRef(self); + PyObject *meth = ((PyMethodObject *)function)->im_func; + PEEK(oparg + 2) = Py_NewRef(meth); + Py_DECREF(function); + GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); + } + + TARGET(KW_NAMES) { + assert(kwnames == NULL); + assert(oparg < PyTuple_GET_SIZE(consts)); + kwnames = GETITEM(consts, oparg); + DISPATCH(); + } + + TARGET(CALL) { + PREDICTED(CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + int is_meth = is_method(stack_pointer, oparg); + int nargs = oparg + is_meth; + PyObject *callable = PEEK(nargs + 1); + next_instr--; + _Py_Specialize_Call(callable, next_instr, nargs, kwnames); + DISPATCH_SAME_OPARG(); + } + STAT_INC(CALL, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + int total_args, is_meth; + is_meth = is_method(stack_pointer, oparg); + PyObject *function = PEEK(oparg + 1); + if (!is_meth && Py_TYPE(function) == &PyMethod_Type) { + PyObject *self = ((PyMethodObject *)function)->im_self; + PEEK(oparg+1) = Py_NewRef(self); + PyObject *meth = ((PyMethodObject *)function)->im_func; + PEEK(oparg+2) = Py_NewRef(meth); + Py_DECREF(function); + is_meth = 1; + } + total_args = oparg + is_meth; + function = PEEK(total_args + 1); + int positional_args = total_args - KWNAMES_LEN(); + // Check if the call can be inlined or not + if (Py_TYPE(function) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)function)->vectorcall == _PyFunction_Vectorcall) + { + int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(function))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(function)); + STACK_SHRINK(total_args); + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( + tstate, (PyFunctionObject *)function, locals, + stack_pointer, positional_args, kwnames + ); + kwnames = NULL; + STACK_SHRINK(2-is_meth); + // The frame has stolen all the arguments from the stack, + // so there is no need to clean them up. + if (new_frame == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + /* Callable is not a normal Python function */ + PyObject *res; + if (cframe.use_tracing) { + res = trace_call_function( + tstate, function, stack_pointer-total_args, + positional_args, kwnames); + } + else { + res = PyObject_Vectorcall( + function, stack_pointer-total_args, + positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, + kwnames); + } + kwnames = NULL; + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(function); + /* Clear the stack */ + STACK_SHRINK(total_args); + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_PY_EXACT_ARGS) { + PREDICTED(CALL_PY_EXACT_ARGS); + assert(kwnames == NULL); + DEOPT_IF(tstate->interp->eval_frame, CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + int is_meth = is_method(stack_pointer, oparg); + int argcount = oparg + is_meth; + PyObject *callable = PEEK(argcount + 1); + DEOPT_IF(!PyFunction_Check(callable), CALL); + PyFunctionObject *func = (PyFunctionObject *)callable; + DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); + PyCodeObject *code = (PyCodeObject *)func->func_code; + DEOPT_IF(code->co_argcount != argcount, CALL); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); + STAT_INC(CALL, hit); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); + STACK_SHRINK(argcount); + for (int i = 0; i < argcount; i++) { + new_frame->localsplus[i] = stack_pointer[i]; + } + for (int i = argcount; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + STACK_SHRINK(2-is_meth); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + + TARGET(CALL_PY_WITH_DEFAULTS) { + assert(kwnames == NULL); + DEOPT_IF(tstate->interp->eval_frame, CALL); + _PyCallCache *cache = (_PyCallCache *)next_instr; + int is_meth = is_method(stack_pointer, oparg); + int argcount = oparg + is_meth; + PyObject *callable = PEEK(argcount + 1); + DEOPT_IF(!PyFunction_Check(callable), CALL); + PyFunctionObject *func = (PyFunctionObject *)callable; + DEOPT_IF(func->func_version != read_u32(cache->func_version), CALL); + PyCodeObject *code = (PyCodeObject *)func->func_code; + DEOPT_IF(argcount > code->co_argcount, CALL); + int minargs = cache->min_args; + DEOPT_IF(argcount < minargs, CALL); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); + STAT_INC(CALL, hit); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, func); + STACK_SHRINK(argcount); + for (int i = 0; i < argcount; i++) { + new_frame->localsplus[i] = stack_pointer[i]; + } + for (int i = argcount; i < code->co_argcount; i++) { + PyObject *def = PyTuple_GET_ITEM(func->func_defaults, + i - minargs); + new_frame->localsplus[i] = Py_NewRef(def); + } + for (int i = code->co_argcount; i < code->co_nlocalsplus; i++) { + new_frame->localsplus[i] = NULL; + } + STACK_SHRINK(2-is_meth); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH_INLINED(new_frame); + } + + TARGET(CALL_NO_KW_TYPE_1) { + assert(kwnames == NULL); + assert(cframe.use_tracing == 0); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *obj = TOP(); + PyObject *callable = SECOND(); + DEOPT_IF(callable != (PyObject *)&PyType_Type, CALL); + STAT_INC(CALL, hit); + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + PyObject *res = Py_NewRef(Py_TYPE(obj)); + Py_DECREF(callable); + Py_DECREF(obj); + STACK_SHRINK(2); + SET_TOP(res); + DISPATCH(); + } + + TARGET(CALL_NO_KW_STR_1) { + assert(kwnames == NULL); + assert(cframe.use_tracing == 0); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *callable = PEEK(2); + DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + PyObject *res = PyObject_Str(arg); + Py_DECREF(arg); + Py_DECREF(&PyUnicode_Type); + STACK_SHRINK(2); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_TUPLE_1) { + assert(kwnames == NULL); + assert(oparg == 1); + DEOPT_IF(is_method(stack_pointer, 1), CALL); + PyObject *callable = PEEK(2); + DEOPT_IF(callable != (PyObject *)&PyTuple_Type, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + PyObject *res = PySequence_Tuple(arg); + Py_DECREF(arg); + Py_DECREF(&PyTuple_Type); + STACK_SHRINK(2); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_BUILTIN_CLASS) { + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + int kwnames_len = KWNAMES_LEN(); + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyType_Check(callable), CALL); + PyTypeObject *tp = (PyTypeObject *)callable; + DEOPT_IF(tp->tp_vectorcall == NULL, CALL); + STAT_INC(CALL, hit); + STACK_SHRINK(total_args); + PyObject *res = tp->tp_vectorcall((PyObject *)tp, stack_pointer, + total_args-kwnames_len, kwnames); + kwnames = NULL; + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(tp); + STACK_SHRINK(1-is_meth); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_BUILTIN_O) { + assert(cframe.use_tracing == 0); + /* Builtin METH_O functions */ + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_O, CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *arg = TOP(); + PyObject *res = _PyCFunction_TrampolineCall(cfunc, PyCFunction_GET_SELF(callable), arg); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + Py_DECREF(arg); + Py_DECREF(callable); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_BUILTIN_FAST) { + assert(cframe.use_tracing == 0); + /* Builtin METH_FASTCALL functions, without keywords */ + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_FASTCALL, + CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); + STACK_SHRINK(total_args); + /* res = func(self, args, nargs) */ + PyObject *res = ((_PyCFunctionFast)(void(*)(void))cfunc)( + PyCFunction_GET_SELF(callable), + stack_pointer, + total_args); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + Py_DECREF(callable); + if (res == NULL) { + /* Not deopting because this doesn't mean our optimization was + wrong. `res` can be NULL for valid reasons. Eg. getattr(x, + 'invalid'). In those cases an exception is set, so we must + handle it. + */ + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_BUILTIN_FAST_WITH_KEYWORDS) { + assert(cframe.use_tracing == 0); + /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(!PyCFunction_CheckExact(callable), CALL); + DEOPT_IF(PyCFunction_GET_FLAGS(callable) != + (METH_FASTCALL | METH_KEYWORDS), CALL); + STAT_INC(CALL, hit); + STACK_SHRINK(total_args); + /* res = func(self, args, nargs, kwnames) */ + _PyCFunctionFastWithKeywords cfunc = + (_PyCFunctionFastWithKeywords)(void(*)(void)) + PyCFunction_GET_FUNCTION(callable); + PyObject *res = cfunc( + PyCFunction_GET_SELF(callable), + stack_pointer, + total_args - KWNAMES_LEN(), + kwnames + ); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + kwnames = NULL; + + /* Free the arguments. */ + for (int i = 0; i < total_args; i++) { + Py_DECREF(stack_pointer[i]); + } + STACK_SHRINK(2-is_meth); + PUSH(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_LEN) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + /* len(o) */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyObject *callable = PEEK(total_args + 1); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.len, CALL); + STAT_INC(CALL, hit); + PyObject *arg = TOP(); + Py_ssize_t len_i = PyObject_Length(arg); + if (len_i < 0) { + goto error; + } + PyObject *res = PyLong_FromSsize_t(len_i); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + Py_DECREF(arg); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH(); + } + + TARGET(CALL_NO_KW_ISINSTANCE) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + /* isinstance(o, o2) */ + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyObject *callable = PEEK(total_args + 1); + DEOPT_IF(total_args != 2, CALL); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.isinstance, CALL); + STAT_INC(CALL, hit); + PyObject *cls = POP(); + PyObject *inst = TOP(); + int retval = PyObject_IsInstance(inst, cls); + if (retval < 0) { + Py_DECREF(cls); + goto error; + } + PyObject *res = PyBool_FromLong(retval); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(inst); + Py_DECREF(cls); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + DISPATCH(); + } + + TARGET(CALL_NO_KW_LIST_APPEND) { + assert(cframe.use_tracing == 0); + assert(kwnames == NULL); + assert(oparg == 1); + PyObject *callable = PEEK(3); + PyInterpreterState *interp = _PyInterpreterState_GET(); + DEOPT_IF(callable != interp->callable_cache.list_append, CALL); + PyObject *list = SECOND(); + DEOPT_IF(!PyList_Check(list), CALL); + STAT_INC(CALL, hit); + PyObject *arg = POP(); + if (_PyList_AppendTakeRef((PyListObject *)list, arg) < 0) { + goto error; + } + STACK_SHRINK(2); + Py_DECREF(list); + Py_DECREF(callable); + // CALL + POP_TOP + JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); + assert(_Py_OPCODE(next_instr[-1]) == POP_TOP); + DISPATCH(); + } + + TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + DEOPT_IF(total_args != 2, CALL); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != METH_O, CALL); + PyObject *arg = TOP(); + PyObject *self = SECOND(); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = meth->ml_meth; + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *res = _PyCFunction_TrampolineCall(cfunc, self, arg); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(self); + Py_DECREF(arg); + STACK_SHRINK(oparg + 1); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) { + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); + PyTypeObject *d_type = callable->d_common.d_type; + PyObject *self = PEEK(total_args); + DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); + STAT_INC(CALL, hit); + int nargs = total_args-1; + STACK_SHRINK(nargs); + _PyCFunctionFastWithKeywords cfunc = + (_PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; + PyObject *res = cfunc(self, stack_pointer, nargs - KWNAMES_LEN(), + kwnames); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + kwnames = NULL; + + /* Free the arguments. */ + for (int i = 0; i < nargs; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(self); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS) { + assert(kwnames == NULL); + assert(oparg == 0 || oparg == 1); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + DEOPT_IF(total_args != 1, CALL); + PyMethodDescrObject *callable = (PyMethodDescrObject *)SECOND(); + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + PyObject *self = TOP(); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); + STAT_INC(CALL, hit); + PyCFunction cfunc = meth->ml_meth; + // This is slower but CPython promises to check all non-vectorcall + // function calls. + if (_Py_EnterRecursiveCallTstate(tstate, " while calling a Python object")) { + goto error; + } + PyObject *res = _PyCFunction_TrampolineCall(cfunc, self, NULL); + _Py_LeaveRecursiveCallTstate(tstate); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + Py_DECREF(self); + STACK_SHRINK(oparg + 1); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_FAST) { + assert(kwnames == NULL); + int is_meth = is_method(stack_pointer, oparg); + int total_args = oparg + is_meth; + PyMethodDescrObject *callable = + (PyMethodDescrObject *)PEEK(total_args + 1); + /* Builtin METH_FASTCALL methods, without keywords */ + DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = callable->d_method; + DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); + PyObject *self = PEEK(total_args); + DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + STAT_INC(CALL, hit); + _PyCFunctionFast cfunc = + (_PyCFunctionFast)(void(*)(void))meth->ml_meth; + int nargs = total_args-1; + STACK_SHRINK(nargs); + PyObject *res = cfunc(self, stack_pointer, nargs); + assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + /* Clear the stack of the arguments. */ + for (int i = 0; i < nargs; i++) { + Py_DECREF(stack_pointer[i]); + } + Py_DECREF(self); + STACK_SHRINK(2-is_meth); + SET_TOP(res); + Py_DECREF(callable); + if (res == NULL) { + goto error; + } + JUMPBY(INLINE_CACHE_ENTRIES_CALL); + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(CALL_FUNCTION_EX) { + PREDICTED(CALL_FUNCTION_EX); + PyObject *func, *callargs, *kwargs = NULL, *result; + if (oparg & 0x01) { + kwargs = POP(); + // DICT_MERGE is called before this opcode if there are kwargs. + // It converts all dict subtypes in kwargs into regular dicts. + assert(PyDict_CheckExact(kwargs)); + } + callargs = POP(); + func = TOP(); + if (!PyTuple_CheckExact(callargs)) { + if (check_args_iterable(tstate, func, callargs) < 0) { + Py_DECREF(callargs); + goto error; + } + Py_SETREF(callargs, PySequence_Tuple(callargs)); + if (callargs == NULL) { + goto error; + } + } + assert(PyTuple_CheckExact(callargs)); + + result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing); + Py_DECREF(func); + Py_DECREF(callargs); + Py_XDECREF(kwargs); + + STACK_SHRINK(1); + assert(TOP() == NULL); + SET_TOP(result); + if (result == NULL) { + goto error; + } + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(MAKE_FUNCTION) { + PyObject *codeobj = POP(); + PyFunctionObject *func = (PyFunctionObject *) + PyFunction_New(codeobj, GLOBALS()); + + Py_DECREF(codeobj); + if (func == NULL) { + goto error; + } + + if (oparg & 0x08) { + assert(PyTuple_CheckExact(TOP())); + func->func_closure = POP(); + } + if (oparg & 0x04) { + assert(PyTuple_CheckExact(TOP())); + func->func_annotations = POP(); + } + if (oparg & 0x02) { + assert(PyDict_CheckExact(TOP())); + func->func_kwdefaults = POP(); + } + if (oparg & 0x01) { + assert(PyTuple_CheckExact(TOP())); + func->func_defaults = POP(); + } + + PUSH((PyObject *)func); + DISPATCH(); + } + + TARGET(RETURN_GENERATOR) { + assert(PyFunction_Check(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; + PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); + if (gen == NULL) { + goto error; + } + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + _PyFrame_Copy(frame, gen_frame); + assert(frame->frame_obj == NULL); + gen->gi_frame_state = FRAME_CREATED; + gen_frame->owner = FRAME_OWNED_BY_GENERATOR; + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + _PyInterpreterFrame *prev = frame->previous; + _PyThreadState_PopFrame(tstate, frame); + frame = cframe.current_frame = prev; + _PyFrame_StackPush(frame, (PyObject *)gen); + goto resume_frame; + } + + TARGET(BUILD_SLICE) { + PyObject *start, *stop, *step, *slice; + if (oparg == 3) + step = POP(); + else + step = NULL; + stop = POP(); + start = TOP(); + slice = PySlice_New(start, stop, step); + Py_DECREF(start); + Py_DECREF(stop); + Py_XDECREF(step); + SET_TOP(slice); + if (slice == NULL) + goto error; + DISPATCH(); + } + + TARGET(FORMAT_VALUE) { + /* Handles f-string value formatting. */ + PyObject *result; + PyObject *fmt_spec; + PyObject *value; + PyObject *(*conv_fn)(PyObject *); + int which_conversion = oparg & FVC_MASK; + int have_fmt_spec = (oparg & FVS_MASK) == FVS_HAVE_SPEC; + + fmt_spec = have_fmt_spec ? POP() : NULL; + value = POP(); + + /* See if any conversion is specified. */ + switch (which_conversion) { + case FVC_NONE: conv_fn = NULL; break; + case FVC_STR: conv_fn = PyObject_Str; break; + case FVC_REPR: conv_fn = PyObject_Repr; break; + case FVC_ASCII: conv_fn = PyObject_ASCII; break; + default: + _PyErr_Format(tstate, PyExc_SystemError, + "unexpected conversion flag %d", + which_conversion); + goto error; + } + + /* If there's a conversion function, call it and replace + value with that result. Otherwise, just use value, + without conversion. */ + if (conv_fn != NULL) { + result = conv_fn(value); + Py_DECREF(value); + if (result == NULL) { + Py_XDECREF(fmt_spec); + goto error; + } + value = result; + } + + /* If value is a unicode object, and there's no fmt_spec, + then we know the result of format(value) is value + itself. In that case, skip calling format(). I plan to + move this optimization in to PyObject_Format() + itself. */ + if (PyUnicode_CheckExact(value) && fmt_spec == NULL) { + /* Do nothing, just transfer ownership to result. */ + result = value; + } else { + /* Actually call format(). */ + result = PyObject_Format(value, fmt_spec); + Py_DECREF(value); + Py_XDECREF(fmt_spec); + if (result == NULL) { + goto error; + } + } + + PUSH(result); + DISPATCH(); + } + + TARGET(COPY) { + assert(oparg != 0); + PyObject *peek = PEEK(oparg); + PUSH(Py_NewRef(peek)); + DISPATCH(); + } + + TARGET(BINARY_OP) { + PREDICTED(BINARY_OP); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + PyObject *rhs = PEEK(1); + PyObject *lhs = PEEK(2); + PyObject *res; + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + assert(cframe.use_tracing == 0); + next_instr--; + _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0)); + DISPATCH_SAME_OPARG(); + } + STAT_INC(BINARY_OP, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + assert(0 <= oparg); + assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); + assert(binary_ops[oparg]); + res = binary_ops[oparg](lhs, rhs); + Py_DECREF(lhs); + Py_DECREF(rhs); + if (res == NULL) goto pop_2_error; + STACK_SHRINK(1); + POKE(1, res); + next_instr += 1; + DISPATCH(); + } + + TARGET(SWAP) { + assert(oparg != 0); + PyObject *top = TOP(); + SET_TOP(PEEK(oparg)); + PEEK(oparg) = top; + DISPATCH(); + } + + TARGET(EXTENDED_ARG) { + assert(oparg); + assert(cframe.use_tracing == 0); + opcode = _Py_OPCODE(*next_instr); + oparg = oparg << 8 | _Py_OPARG(*next_instr); + PRE_DISPATCH_GOTO(); + DISPATCH_GOTO(); + } + + TARGET(CACHE) { + Py_UNREACHABLE(); + } + + TARGET(LOAD_FAST__LOAD_FAST) { + PyObject *_tmp_1; + PyObject *_tmp_2; + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_1 = value; + } + NEXTOPARG(); + next_instr++; + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_2 = value; + } + STACK_GROW(2); + POKE(1, _tmp_2); + POKE(2, _tmp_1); + DISPATCH(); + } + + TARGET(LOAD_FAST__LOAD_CONST) { + PyObject *_tmp_1; + PyObject *_tmp_2; + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_1 = value; + } + NEXTOPARG(); + next_instr++; + { + PyObject *value; + value = GETITEM(consts, oparg); + Py_INCREF(value); + _tmp_2 = value; + } + STACK_GROW(2); + POKE(1, _tmp_2); + POKE(2, _tmp_1); + DISPATCH(); + } + + TARGET(STORE_FAST__LOAD_FAST) { + PyObject *_tmp_1 = PEEK(1); + { + PyObject *value = _tmp_1; + SETLOCAL(oparg, value); + } + NEXTOPARG(); + next_instr++; + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_1 = value; + } + POKE(1, _tmp_1); + DISPATCH(); + } + + TARGET(STORE_FAST__STORE_FAST) { + PyObject *_tmp_1 = PEEK(2); + PyObject *_tmp_2 = PEEK(1); + { + PyObject *value = _tmp_2; + SETLOCAL(oparg, value); + } + NEXTOPARG(); + next_instr++; + { + PyObject *value = _tmp_1; + SETLOCAL(oparg, value); + } + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(LOAD_CONST__LOAD_FAST) { + PyObject *_tmp_1; + PyObject *_tmp_2; + { + PyObject *value; + value = GETITEM(consts, oparg); + Py_INCREF(value); + _tmp_1 = value; + } + NEXTOPARG(); + next_instr++; + { + PyObject *value; + value = GETLOCAL(oparg); + assert(value != NULL); + Py_INCREF(value); + _tmp_2 = value; + } + STACK_GROW(2); + POKE(1, _tmp_2); + POKE(2, _tmp_1); + DISPATCH(); + } + + TARGET(END_FOR) { + PyObject *_tmp_1 = PEEK(2); + PyObject *_tmp_2 = PEEK(1); + { + PyObject *value = _tmp_2; + Py_DECREF(value); + } + { + PyObject *value = _tmp_1; + Py_DECREF(value); + } + STACK_SHRINK(2); + DISPATCH(); + } diff --git a/Python/getargs.c b/Python/getargs.c index f0b84b8338ddc3..0167dd753d88f7 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -1046,8 +1046,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, /* Encode object */ if (!recode_strings && (PyBytes_Check(arg) || PyByteArray_Check(arg))) { - s = arg; - Py_INCREF(s); + s = Py_NewRef(arg); if (PyBytes_Check(arg)) { size = PyBytes_GET_SIZE(s); ptr = PyBytes_AS_STRING(s); @@ -1847,9 +1846,6 @@ vgetargskeywords(PyObject *args, PyObject *kwargs, const char *format, } -/* List of static parsers. */ -static struct _PyArg_Parser *static_arg_parsers = NULL; - static int scan_keywords(const char * const *keywords, int *ptotal, int *pposonly) { @@ -2025,8 +2021,8 @@ _parser_init(struct _PyArg_Parser *parser) parser->initialized = owned ? 1 : -1; assert(parser->next == NULL); - parser->next = static_arg_parsers; - static_arg_parsers = parser; + parser->next = _PyRuntime.getargs.static_parsers; + _PyRuntime.getargs.static_parsers = parser; return 1; } @@ -2575,8 +2571,7 @@ _PyArg_UnpackKeywordsWithVararg(PyObject *const *args, Py_ssize_t nargs, /* copy tuple args */ for (i = 0; i < nargs; i++) { if (i >= vararg) { - Py_INCREF(args[i]); - PyTuple_SET_ITEM(buf[vararg], i - vararg, args[i]); + PyTuple_SET_ITEM(buf[vararg], i - vararg, Py_NewRef(args[i])); continue; } else { @@ -2603,7 +2598,25 @@ _PyArg_UnpackKeywordsWithVararg(PyObject *const *args, Py_ssize_t nargs, current_arg = NULL; } - buf[i + vararg + 1] = current_arg; + /* If an arguments is passed in as a keyword argument, + * it should be placed before `buf[vararg]`. + * + * For example: + * def f(a, /, b, *args): + * pass + * f(1, b=2) + * + * This `buf` array should be: [1, 2, NULL]. + * In this case, nargs < vararg. + * + * Otherwise, we leave a place at `buf[vararg]` for vararg tuple + * so the index is `i + 1`. */ + if (nargs < vararg) { + buf[i] = current_arg; + } + else { + buf[i + 1] = current_arg; + } if (current_arg) { --nkwargs; @@ -2932,14 +2945,14 @@ _PyArg_NoKwnames(const char *funcname, PyObject *kwnames) void _PyArg_Fini(void) { - struct _PyArg_Parser *tmp, *s = static_arg_parsers; + struct _PyArg_Parser *tmp, *s = _PyRuntime.getargs.static_parsers; while (s) { tmp = s->next; s->next = NULL; parser_clear(s); s = tmp; } - static_arg_parsers = NULL; + _PyRuntime.getargs.static_parsers = NULL; } #ifdef __cplusplus diff --git a/Python/getversion.c b/Python/getversion.c index 46910451fdf859..5db836ab4bfd6d 100644 --- a/Python/getversion.c +++ b/Python/getversion.c @@ -5,12 +5,23 @@ #include "patchlevel.h" -const char * -Py_GetVersion(void) +static int initialized = 0; +static char version[250]; + +void _Py_InitVersion(void) { - static char version[250]; + if (initialized) { + return; + } + initialized = 1; PyOS_snprintf(version, sizeof(version), "%.80s (%.80s) %.80s", PY_VERSION, Py_GetBuildInfo(), Py_GetCompiler()); +} + +const char * +Py_GetVersion(void) +{ + _Py_InitVersion(); return version; } diff --git a/Python/hamt.c b/Python/hamt.c index 3aa31c625824cb..8cb94641bef251 100644 --- a/Python/hamt.c +++ b/Python/hamt.c @@ -319,13 +319,6 @@ typedef struct { } PyHamtNode_Array; -typedef struct { - PyObject_VAR_HEAD - uint32_t b_bitmap; - PyObject *b_array[1]; -} PyHamtNode_Bitmap; - - typedef struct { PyObject_VAR_HEAD int32_t c_hash; @@ -333,10 +326,6 @@ typedef struct { } PyHamtNode_Collision; -static PyHamtNode_Bitmap *_empty_bitmap_node; -static PyHamtObject *_empty_hamt; - - static PyHamtObject * hamt_alloc(void); @@ -521,14 +510,16 @@ hamt_node_bitmap_new(Py_ssize_t size) PyHamtNode_Bitmap *node; Py_ssize_t i; + if (size == 0) { + /* Since bitmap nodes are immutable, we can cache the instance + for size=0 and reuse it whenever we need an empty bitmap node. + */ + return (PyHamtNode *)Py_NewRef(&_Py_SINGLETON(hamt_bitmap_node_empty)); + } + assert(size >= 0); assert(size % 2 == 0); - if (size == 0 && _empty_bitmap_node != NULL) { - Py_INCREF(_empty_bitmap_node); - return (PyHamtNode *)_empty_bitmap_node; - } - /* No freelist; allocate a new bitmap node */ node = PyObject_GC_NewVar( PyHamtNode_Bitmap, &_PyHamt_BitmapNode_Type, size); @@ -546,14 +537,6 @@ hamt_node_bitmap_new(Py_ssize_t size) _PyObject_GC_TRACK(node); - if (size == 0 && _empty_bitmap_node == NULL) { - /* Since bitmap nodes are immutable, we can cache the instance - for size=0 and reuse it whenever we need an empty bitmap node. - */ - _empty_bitmap_node = node; - Py_INCREF(_empty_bitmap_node); - } - return (PyHamtNode *)node; } @@ -577,8 +560,7 @@ hamt_node_bitmap_clone(PyHamtNode_Bitmap *node) } for (i = 0; i < Py_SIZE(node); i++) { - Py_XINCREF(node->b_array[i]); - clone->b_array[i] = node->b_array[i]; + clone->b_array[i] = Py_XNewRef(node->b_array[i]); } clone->b_bitmap = node->b_bitmap; @@ -603,14 +585,12 @@ hamt_node_bitmap_clone_without(PyHamtNode_Bitmap *o, uint32_t bit) uint32_t i; for (i = 0; i < key_idx; i++) { - Py_XINCREF(o->b_array[i]); - new->b_array[i] = o->b_array[i]; + new->b_array[i] = Py_XNewRef(o->b_array[i]); } assert(Py_SIZE(o) >= 0 && Py_SIZE(o) <= 32); for (i = val_idx + 1; i < (uint32_t)Py_SIZE(o); i++) { - Py_XINCREF(o->b_array[i]); - new->b_array[i - 2] = o->b_array[i]; + new->b_array[i - 2] = Py_XNewRef(o->b_array[i]); } new->b_bitmap = o->b_bitmap & ~bit; @@ -643,15 +623,11 @@ hamt_node_new_bitmap_or_collision(uint32_t shift, return NULL; } - Py_INCREF(key1); - n->c_array[0] = key1; - Py_INCREF(val1); - n->c_array[1] = val1; + n->c_array[0] = Py_NewRef(key1); + n->c_array[1] = Py_NewRef(val1); - Py_INCREF(key2); - n->c_array[2] = key2; - Py_INCREF(val2); - n->c_array[3] = val2; + n->c_array[2] = Py_NewRef(key2); + n->c_array[3] = Py_NewRef(val2); return (PyHamtNode *)n; } @@ -736,8 +712,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, if (val_or_node == (PyObject *)sub_node) { Py_DECREF(sub_node); - Py_INCREF(self); - return (PyHamtNode *)self; + return (PyHamtNode *)Py_NewRef(self); } PyHamtNode_Bitmap *ret = hamt_node_bitmap_clone(self); @@ -759,8 +734,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, if (comp_err == 1) { /* key == key_or_null */ if (val == val_or_node) { /* we already have the same key/val pair; return self. */ - Py_INCREF(self); - return (PyHamtNode *)self; + return (PyHamtNode *)Py_NewRef(self); } /* We're setting a new value for the key we had before. @@ -769,8 +743,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, if (ret == NULL) { return NULL; } - Py_INCREF(val); - Py_SETREF(ret->b_array[val_idx], val); + Py_SETREF(ret->b_array[val_idx], Py_NewRef(val)); return (PyHamtNode *)ret; } @@ -865,8 +838,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, if (self->b_array[j] == NULL) { new_node->a_array[i] = - (PyHamtNode *)self->b_array[j + 1]; - Py_INCREF(new_node->a_array[i]); + (PyHamtNode *)Py_NewRef(self->b_array[j + 1]); } else { int32_t rehash = hamt_hash(self->b_array[j]); @@ -923,22 +895,18 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, /* Copy all keys/values that will be before the new key/value we are adding. */ for (i = 0; i < key_idx; i++) { - Py_XINCREF(self->b_array[i]); - new_node->b_array[i] = self->b_array[i]; + new_node->b_array[i] = Py_XNewRef(self->b_array[i]); } /* Set the new key/value to the new Bitmap node. */ - Py_INCREF(key); - new_node->b_array[key_idx] = key; - Py_INCREF(val); - new_node->b_array[val_idx] = val; + new_node->b_array[key_idx] = Py_NewRef(key); + new_node->b_array[val_idx] = Py_NewRef(val); /* Copy all keys/values that will be after the new key/value we are adding. */ assert(Py_SIZE(self) >= 0 && Py_SIZE(self) <= 32); for (i = key_idx; i < (uint32_t)Py_SIZE(self); i++) { - Py_XINCREF(self->b_array[i]); - new_node->b_array[i + 2] = self->b_array[i]; + new_node->b_array[i + 2] = Py_XNewRef(self->b_array[i]); } new_node->b_bitmap = self->b_bitmap | bit; @@ -1019,10 +987,8 @@ hamt_node_bitmap_without(PyHamtNode_Bitmap *self, PyObject *key = sub_tree->b_array[0]; PyObject *val = sub_tree->b_array[1]; - Py_INCREF(key); - Py_XSETREF(clone->b_array[key_idx], key); - Py_INCREF(val); - Py_SETREF(clone->b_array[val_idx], val); + Py_XSETREF(clone->b_array[key_idx], Py_NewRef(key)); + Py_SETREF(clone->b_array[val_idx], Py_NewRef(val)); Py_DECREF(sub_tree); @@ -1160,6 +1126,16 @@ hamt_node_bitmap_dealloc(PyHamtNode_Bitmap *self) Py_ssize_t len = Py_SIZE(self); Py_ssize_t i; + if (Py_SIZE(self) == 0) { + /* The empty node is statically allocated. */ + assert(self == &_Py_SINGLETON(hamt_bitmap_node_empty)); +#ifdef Py_DEBUG + _Py_FatalRefcountError("deallocating the empty hamt node bitmap singleton"); +#else + return; +#endif + } + PyObject_GC_UnTrack(self); Py_TRASHCAN_BEGIN(self, hamt_node_bitmap_dealloc) @@ -1343,14 +1319,11 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self, } for (i = 0; i < Py_SIZE(self); i++) { - Py_INCREF(self->c_array[i]); - new_node->c_array[i] = self->c_array[i]; + new_node->c_array[i] = Py_NewRef(self->c_array[i]); } - Py_INCREF(key); - new_node->c_array[i] = key; - Py_INCREF(val); - new_node->c_array[i + 1] = val; + new_node->c_array[i] = Py_NewRef(key); + new_node->c_array[i + 1] = Py_NewRef(val); *added_leaf = 1; return (PyHamtNode *)new_node; @@ -1364,8 +1337,7 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self, if (self->c_array[val_idx] == val) { /* We're setting a key/value pair that's already set. */ - Py_INCREF(self); - return (PyHamtNode *)self; + return (PyHamtNode *)Py_NewRef(self); } /* We need to replace old value for the key @@ -1378,14 +1350,11 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self, /* Copy all elements of the old node to the new one. */ for (i = 0; i < Py_SIZE(self); i++) { - Py_INCREF(self->c_array[i]); - new_node->c_array[i] = self->c_array[i]; + new_node->c_array[i] = Py_NewRef(self->c_array[i]); } /* Replace the old value with the new value for the our key. */ - Py_DECREF(new_node->c_array[val_idx]); - Py_INCREF(val); - new_node->c_array[val_idx] = val; + Py_SETREF(new_node->c_array[val_idx], Py_NewRef(val)); return (PyHamtNode *)new_node; @@ -1410,8 +1379,7 @@ hamt_node_collision_assoc(PyHamtNode_Collision *self, return NULL; } new_node->b_bitmap = hamt_bitpos(self->c_hash, shift); - Py_INCREF(self); - new_node->b_array[1] = (PyObject*) self; + new_node->b_array[1] = Py_NewRef(self); assoc_res = hamt_node_bitmap_assoc( new_node, shift, hash, key, val, added_leaf); @@ -1473,17 +1441,13 @@ hamt_node_collision_without(PyHamtNode_Collision *self, } if (key_idx == 0) { - Py_INCREF(self->c_array[2]); - node->b_array[0] = self->c_array[2]; - Py_INCREF(self->c_array[3]); - node->b_array[1] = self->c_array[3]; + node->b_array[0] = Py_NewRef(self->c_array[2]); + node->b_array[1] = Py_NewRef(self->c_array[3]); } else { assert(key_idx == 2); - Py_INCREF(self->c_array[0]); - node->b_array[0] = self->c_array[0]; - Py_INCREF(self->c_array[1]); - node->b_array[1] = self->c_array[1]; + node->b_array[0] = Py_NewRef(self->c_array[0]); + node->b_array[1] = Py_NewRef(self->c_array[1]); } node->b_bitmap = hamt_bitpos(hash, shift); @@ -1504,12 +1468,10 @@ hamt_node_collision_without(PyHamtNode_Collision *self, /* Copy all other keys from `self` to `new` */ Py_ssize_t i; for (i = 0; i < key_idx; i++) { - Py_INCREF(self->c_array[i]); - new->c_array[i] = self->c_array[i]; + new->c_array[i] = Py_NewRef(self->c_array[i]); } for (i = key_idx + 2; i < Py_SIZE(self); i++) { - Py_INCREF(self->c_array[i]); - new->c_array[i - 2] = self->c_array[i]; + new->c_array[i - 2] = Py_NewRef(self->c_array[i]); } *new_node = (PyHamtNode*)new; @@ -1661,8 +1623,7 @@ hamt_node_array_clone(PyHamtNode_Array *node) /* Copy all elements from the current Array node to the new one. */ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) { - Py_XINCREF(node->a_array[i]); - clone->a_array[i] = node->a_array[i]; + clone->a_array[i] = (PyHamtNode*)Py_XNewRef(node->a_array[i]); } VALIDATE_ARRAY_NODE(clone) @@ -1719,8 +1680,7 @@ hamt_node_array_assoc(PyHamtNode_Array *self, /* Copy all elements from the current Array node to the new one. */ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) { - Py_XINCREF(self->a_array[i]); - new_node->a_array[i] = self->a_array[i]; + new_node->a_array[i] = (PyHamtNode*)Py_XNewRef(self->a_array[i]); } assert(new_node->a_array[idx] == NULL); @@ -1868,15 +1828,12 @@ hamt_node_array_without(PyHamtNode_Array *self, PyObject *key = child->b_array[0]; PyObject *val = child->b_array[1]; - Py_INCREF(key); - new->b_array[new_i] = key; - Py_INCREF(val); - new->b_array[new_i + 1] = val; + new->b_array[new_i] = Py_NewRef(key); + new->b_array[new_i + 1] = Py_NewRef(val); } else { new->b_array[new_i] = NULL; - Py_INCREF(node); - new->b_array[new_i + 1] = (PyObject*)node; + new->b_array[new_i + 1] = Py_NewRef(node); } } else { @@ -1894,8 +1851,7 @@ hamt_node_array_without(PyHamtNode_Array *self, /* Just copy the node into our new Bitmap */ new->b_array[new_i] = NULL; - Py_INCREF(node); - new->b_array[new_i + 1] = (PyObject*)node; + new->b_array[new_i + 1] = Py_NewRef(node); } new_i += 2; @@ -2311,8 +2267,7 @@ _PyHamt_Assoc(PyHamtObject *o, PyObject *key, PyObject *val) if (new_root == o->h_root) { Py_DECREF(new_root); - Py_INCREF(o); - return o; + return (PyHamtObject*)Py_NewRef(o); } new_o = hamt_alloc(); @@ -2348,8 +2303,7 @@ _PyHamt_Without(PyHamtObject *o, PyObject *key) case W_EMPTY: return _PyHamt_New(); case W_NOT_FOUND: - Py_INCREF(o); - return o; + return (PyHamtObject*)Py_NewRef(o); case W_NEWNODE: { assert(new_root != NULL); @@ -2470,35 +2424,15 @@ hamt_alloc(void) return o; } +#define _empty_hamt \ + (&_Py_INTERP_SINGLETON(_PyInterpreterState_Get(), hamt_empty)) + PyHamtObject * _PyHamt_New(void) { - if (_empty_hamt != NULL) { - /* HAMT is an immutable object so we can easily cache an - empty instance. */ - Py_INCREF(_empty_hamt); - return _empty_hamt; - } - - PyHamtObject *o = hamt_alloc(); - if (o == NULL) { - return NULL; - } - - o->h_root = hamt_node_bitmap_new(0); - if (o->h_root == NULL) { - Py_DECREF(o); - return NULL; - } - - o->h_count = 0; - - if (_empty_hamt == NULL) { - Py_INCREF(o); - _empty_hamt = o; - } - - return o; + /* HAMT is an immutable object so we can easily cache an + empty instance. */ + return (PyHamtObject*)Py_NewRef(_empty_hamt); } #ifdef Py_DEBUG @@ -2591,8 +2525,7 @@ hamt_baseiter_new(PyTypeObject *type, binaryfunc yield, PyHamtObject *o) return NULL; } - Py_INCREF(o); - it->hi_obj = o; + it->hi_obj = (PyHamtObject*)Py_NewRef(o); it->hi_yield = yield; hamt_iterator_init(&it->hi_iter, o->h_root); @@ -2648,8 +2581,7 @@ PyTypeObject _PyHamtKeys_Type = { static PyObject * hamt_iter_yield_keys(PyObject *key, PyObject *val) { - Py_INCREF(key); - return key; + return Py_NewRef(key); } PyObject * @@ -2672,8 +2604,7 @@ PyTypeObject _PyHamtValues_Type = { static PyObject * hamt_iter_yield_values(PyObject *key, PyObject *val) { - Py_INCREF(val); - return val; + return Py_NewRef(val); } PyObject * @@ -2717,6 +2648,15 @@ hamt_tp_traverse(PyHamtObject *self, visitproc visit, void *arg) static void hamt_tp_dealloc(PyHamtObject *self) { + if (self == _empty_hamt) { + /* The empty one is statically allocated. */ +#ifdef Py_DEBUG + _Py_FatalRefcountError("deallocating the empty hamt singleton"); +#else + return; +#endif + } + PyObject_GC_UnTrack(self); if (self->h_weakreflist != NULL) { PyObject_ClearWeakRefs((PyObject*)self); @@ -2766,8 +2706,7 @@ hamt_tp_subscript(PyHamtObject *self, PyObject *key) case F_ERROR: return NULL; case F_FOUND: - Py_INCREF(val); - return val; + return Py_NewRef(val); case F_NOT_FOUND: PyErr_SetObject(PyExc_KeyError, key); return NULL; @@ -2817,14 +2756,12 @@ hamt_py_get(PyHamtObject *self, PyObject *args) case F_ERROR: return NULL; case F_FOUND: - Py_INCREF(val); - return val; + return Py_NewRef(val); case F_NOT_FOUND: if (def == NULL) { Py_RETURN_NONE; } - Py_INCREF(def); - return def; + return Py_NewRef(def); default: Py_UNREACHABLE(); } @@ -2955,11 +2892,3 @@ PyTypeObject _PyHamt_CollisionNode_Type = { .tp_free = PyObject_GC_Del, .tp_hash = PyObject_HashNotImplemented, }; - - -void -_PyHamt_Fini(PyInterpreterState *interp) -{ - Py_CLEAR(_empty_hamt); - Py_CLEAR(_empty_bitmap_node); -} diff --git a/Python/import.c b/Python/import.c index 698ef37ce0a131..da6c15c5fd4144 100644 --- a/Python/import.c +++ b/Python/import.c @@ -27,13 +27,14 @@ extern "C" { /* Forward references */ static PyObject *import_add_module(PyThreadState *tstate, PyObject *name); -/* See _PyImport_FixupExtensionObject() below */ -static PyObject *extensions = NULL; - /* This table is defined in config.c: */ extern struct _inittab _PyImport_Inittab[]; +// This is not used after Py_Initialize() is called. +// (See _PyRuntimeState.imports.inittab.) struct _inittab *PyImport_Inittab = _PyImport_Inittab; +// When we dynamically allocate a larger table for PyImport_ExtendInittab(), +// we track the pointer here so we can deallocate it during finalization. static struct _inittab *inittab_copy = NULL; /*[clinic input] @@ -93,9 +94,9 @@ _PyImportZip_Init(PyThreadState *tstate) in different threads to return with a partially loaded module. These calls are serialized by the global interpreter lock. */ -static PyThread_type_lock import_lock = NULL; -static unsigned long import_lock_thread = PYTHREAD_INVALID_THREAD_ID; -static int import_lock_level = 0; +#define import_lock _PyRuntime.imports.lock.mutex +#define import_lock_thread _PyRuntime.imports.lock.thread +#define import_lock_level _PyRuntime.imports.lock.level void _PyImport_AcquireLock(void) @@ -221,14 +222,59 @@ _imp_release_lock_impl(PyObject *module) Py_RETURN_NONE; } +PyStatus +_PyImport_Init(void) +{ + if (_PyRuntime.imports.inittab != NULL) { + return _PyStatus_ERR("global import state already initialized"); + } + PyStatus status = _PyStatus_OK(); + + size_t size; + for (size = 0; PyImport_Inittab[size].name != NULL; size++) + ; + size++; + + /* Force default raw memory allocator to get a known allocator to be able + to release the memory in _PyImport_Fini() */ + PyMemAllocatorEx old_alloc; + _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); + + /* Make the copy. */ + struct _inittab *copied = PyMem_RawMalloc(size * sizeof(struct _inittab)); + if (copied == NULL) { + status = PyStatus_NoMemory(); + goto done; + } + memcpy(copied, PyImport_Inittab, size * sizeof(struct _inittab)); + _PyRuntime.imports.inittab = copied; + +done: + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); + return status; +} + +static inline void _extensions_cache_clear(void); + void _PyImport_Fini(void) { - Py_CLEAR(extensions); + _extensions_cache_clear(); if (import_lock != NULL) { PyThread_free_lock(import_lock); import_lock = NULL; } + + /* Use the same memory allocator as _PyImport_Init(). */ + PyMemAllocatorEx old_alloc; + _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); + + /* Free memory allocated by _PyImport_Init() */ + struct _inittab *inittab = _PyRuntime.imports.inittab; + _PyRuntime.imports.inittab = NULL; + PyMem_RawFree(inittab); + + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); } void @@ -398,6 +444,51 @@ PyImport_GetMagicTag(void) dictionary, to avoid loading shared libraries twice. */ +static PyModuleDef * +_extensions_cache_get(PyObject *filename, PyObject *name) +{ + PyObject *extensions = _PyRuntime.imports.extensions; + if (extensions == NULL) { + return NULL; + } + PyObject *key = PyTuple_Pack(2, filename, name); + if (key == NULL) { + return NULL; + } + PyModuleDef *def = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); + Py_DECREF(key); + return def; +} + +static int +_extensions_cache_set(PyObject *filename, PyObject *name, PyModuleDef *def) +{ + PyObject *extensions = _PyRuntime.imports.extensions; + if (extensions == NULL) { + extensions = PyDict_New(); + if (extensions == NULL) { + return -1; + } + _PyRuntime.imports.extensions = extensions; + } + PyObject *key = PyTuple_Pack(2, filename, name); + if (key == NULL) { + return -1; + } + int res = PyDict_SetItem(extensions, key, (PyObject *)def); + Py_DECREF(key); + if (res < 0) { + return -1; + } + return 0; +} + +static void +_extensions_cache_clear(void) +{ + Py_CLEAR(_PyRuntime.imports.extensions); +} + int _PyImport_FixupExtensionObject(PyObject *mod, PyObject *name, PyObject *filename, PyObject *modules) @@ -442,20 +533,7 @@ _PyImport_FixupExtensionObject(PyObject *mod, PyObject *name, } } - if (extensions == NULL) { - extensions = PyDict_New(); - if (extensions == NULL) { - return -1; - } - } - - PyObject *key = PyTuple_Pack(2, filename, name); - if (key == NULL) { - return -1; - } - int res = PyDict_SetItem(extensions, key, (PyObject *)def); - Py_DECREF(key); - if (res < 0) { + if (_extensions_cache_set(filename, name, def) < 0) { return -1; } } @@ -480,16 +558,7 @@ static PyObject * import_find_extension(PyThreadState *tstate, PyObject *name, PyObject *filename) { - if (extensions == NULL) { - return NULL; - } - - PyObject *key = PyTuple_Pack(2, filename, name); - if (key == NULL) { - return NULL; - } - PyModuleDef* def = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); - Py_DECREF(key); + PyModuleDef *def = _extensions_cache_get(filename, name); if (def == NULL) { return NULL; } @@ -556,8 +625,7 @@ import_add_module(PyThreadState *tstate, PyObject *name) PyObject *m; if (PyDict_CheckExact(modules)) { - m = PyDict_GetItemWithError(modules, name); - Py_XINCREF(m); + m = Py_XNewRef(PyDict_GetItemWithError(modules, name)); } else { m = PyObject_GetItem(modules, name); @@ -810,8 +878,7 @@ update_code_filenames(PyCodeObject *co, PyObject *oldname, PyObject *newname) if (PyUnicode_Compare(co->co_filename, oldname)) return; - Py_INCREF(newname); - Py_XSETREF(co->co_filename, newname); + Py_XSETREF(co->co_filename, Py_NewRef(newname)); constants = co->co_consts; n = PyTuple_GET_SIZE(constants); @@ -868,9 +935,10 @@ static int is_builtin(PyObject *name) { int i; - for (i = 0; PyImport_Inittab[i].name != NULL; i++) { - if (_PyUnicode_EqualToASCIIString(name, PyImport_Inittab[i].name)) { - if (PyImport_Inittab[i].initfunc == NULL) + struct _inittab *inittab = _PyRuntime.imports.inittab; + for (i = 0; inittab[i].name != NULL; i++) { + if (_PyUnicode_EqualToASCIIString(name, inittab[i].name)) { + if (inittab[i].initfunc == NULL) return -1; else return 1; @@ -905,8 +973,7 @@ get_path_importer(PyThreadState *tstate, PyObject *path_importer_cache, importer = PyDict_GetItemWithError(path_importer_cache, p); if (importer != NULL || _PyErr_Occurred(tstate)) { - Py_XINCREF(importer); - return importer; + return Py_XNewRef(importer); } /* set path_importer_cache[p] to None to avoid recursion */ @@ -964,11 +1031,12 @@ create_builtin(PyThreadState *tstate, PyObject *name, PyObject *spec) } PyObject *modules = tstate->interp->modules; - for (struct _inittab *p = PyImport_Inittab; p->name != NULL; p++) { + for (struct _inittab *p = _PyRuntime.imports.inittab; p->name != NULL; p++) { if (_PyUnicode_EqualToASCIIString(name, p->name)) { if (p->initfunc == NULL) { /* Cannot re-init internal module ("sys" or "builtins") */ - return PyImport_AddModuleObject(name); + mod = PyImport_AddModuleObject(name); + return Py_XNewRef(mod); } mod = _PyImport_InitFunc_TrampolineCall(*p->initfunc); if (mod == NULL) { @@ -1021,6 +1089,14 @@ _imp_create_builtin(PyObject *module, PyObject *spec) return NULL; } + if (!PyUnicode_Check(name)) { + PyErr_Format(PyExc_TypeError, + "name must be string, not %.200s", + Py_TYPE(name)->tp_name); + Py_DECREF(name); + return NULL; + } + PyObject *mod = create_builtin(tstate, name, spec); Py_DECREF(name); return mod; @@ -1395,8 +1471,7 @@ PyImport_ImportFrozenModuleObject(PyObject *name) } } else { - Py_INCREF(Py_None); - origname = Py_None; + origname = Py_NewRef(Py_None); } err = PyDict_SetItemString(d, "__origname__", origname); Py_DECREF(origname); @@ -1508,8 +1583,7 @@ remove_importlib_frames(PyThreadState *tstate) if (in_importlib && (always_trim || _PyUnicode_EqualToASCIIString(code->co_name, remove_frames))) { - Py_XINCREF(next); - Py_XSETREF(*outer_link, next); + Py_XSETREF(*outer_link, Py_XNewRef(next)); prev_link = outer_link; } else { @@ -1685,8 +1759,8 @@ import_find_and_load(PyThreadState *tstate, PyObject *abs_name) PyObject *mod = NULL; PyInterpreterState *interp = tstate->interp; int import_time = _PyInterpreterState_GetConfig(interp)->import_time; - static int import_level; - static _PyTime_t accumulated; +#define import_level _PyRuntime.imports.find_and_load.import_level +#define accumulated _PyRuntime.imports.find_and_load.accumulated _PyTime_t t1 = 0, accumulated_copy = accumulated; @@ -1707,12 +1781,13 @@ import_find_and_load(PyThreadState *tstate, PyObject *abs_name) * _PyDict_GetItemIdWithError(). */ if (import_time) { - static int header = 1; +#define header _PyRuntime.imports.find_and_load.header if (header) { fputs("import time: self [us] | cumulative | imported package\n", stderr); header = 0; } +#undef header import_level++; t1 = _PyTime_GetPerfCounter(); @@ -1742,6 +1817,8 @@ import_find_and_load(PyThreadState *tstate, PyObject *abs_name) } return mod; +#undef import_level +#undef accumulated } PyObject * @@ -1805,8 +1882,7 @@ PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, _PyErr_SetString(tstate, PyExc_ValueError, "Empty module name"); goto error; } - abs_name = name; - Py_INCREF(abs_name); + abs_name = Py_NewRef(name); } mod = import_get_module(tstate, abs_name); @@ -1845,8 +1921,7 @@ PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, if (dot == -1) { /* No dot in module name, simple exit */ - final_mod = mod; - Py_INCREF(mod); + final_mod = Py_NewRef(mod); goto error; } @@ -1881,8 +1956,7 @@ PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, } } else { - final_mod = mod; - Py_INCREF(mod); + final_mod = Py_NewRef(mod); } } else { @@ -1897,8 +1971,7 @@ PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, mod, fromlist, interp->import_func, NULL); } else { - final_mod = mod; - Py_INCREF(mod); + final_mod = Py_NewRef(mod); } } @@ -2570,6 +2643,10 @@ PyImport_ExtendInittab(struct _inittab *newtab) size_t i, n; int res = 0; + if (_PyRuntime.imports.inittab != NULL) { + Py_FatalError("PyImport_ExtendInittab() may be be called after Py_Initialize()"); + } + /* Count the number of entries in both tables */ for (n = 0; newtab[n].name != NULL; n++) ; @@ -2614,6 +2691,10 @@ PyImport_AppendInittab(const char *name, PyObject* (*initfunc)(void)) { struct _inittab newtab[2]; + if (_PyRuntime.imports.inittab != NULL) { + Py_FatalError("PyImport_AppendInittab() may be be called after Py_Initialize()"); + } + memset(newtab, '\0', sizeof newtab); newtab[0].name = name; diff --git a/Python/importdl.c b/Python/importdl.c index 870ae2730071bb..40227674ca47ee 100644 --- a/Python/importdl.c +++ b/Python/importdl.c @@ -160,6 +160,7 @@ _PyImport_LoadDynamicModuleWithSpec(PyObject *spec, FILE *fp) p0 = (PyModInitFunction)exportfunc; /* Package context is needed for single-phase init */ +#define _Py_PackageContext (_PyRuntime.imports.pkgcontext) oldcontext = _Py_PackageContext; _Py_PackageContext = PyUnicode_AsUTF8(name_unicode); if (_Py_PackageContext == NULL) { @@ -168,6 +169,7 @@ _PyImport_LoadDynamicModuleWithSpec(PyObject *spec, FILE *fp) } m = _PyImport_InitFunc_TrampolineCall(p0); _Py_PackageContext = oldcontext; +#undef _Py_PackageContext if (m == NULL) { if (!PyErr_Occurred()) { diff --git a/Python/importlib.h b/Python/importlib.h new file mode 100644 index 00000000000000..586f3b21f46246 --- /dev/null +++ b/Python/importlib.h @@ -0,0 +1,1783 @@ +/* Auto-generated by Programs/_freeze_importlib.c */ +const unsigned char _Py_M__importlib_bootstrap[] = { + 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,4,0,0,0,64,0,0,0,115,194,1,0,0,100,0, + 90,0,100,1,97,1,100,2,100,3,132,0,90,2,100,4, + 100,5,132,0,90,3,105,0,90,4,105,0,90,5,71,0, + 100,6,100,7,132,0,100,7,101,6,131,3,90,7,71,0, + 100,8,100,9,132,0,100,9,131,2,90,8,71,0,100,10, + 100,11,132,0,100,11,131,2,90,9,71,0,100,12,100,13, + 132,0,100,13,131,2,90,10,100,14,100,15,132,0,90,11, + 100,16,100,17,132,0,90,12,100,18,100,19,132,0,90,13, + 100,20,100,21,156,1,100,22,100,23,132,2,90,14,100,24, + 100,25,132,0,90,15,100,26,100,27,132,0,90,16,100,28, + 100,29,132,0,90,17,100,30,100,31,132,0,90,18,71,0, + 100,32,100,33,132,0,100,33,131,2,90,19,100,1,100,1, + 100,34,156,2,100,35,100,36,132,2,90,20,100,94,100,37, + 100,38,132,1,90,21,100,39,100,40,156,1,100,41,100,42, + 132,2,90,22,100,43,100,44,132,0,90,23,100,45,100,46, + 132,0,90,24,100,47,100,48,132,0,90,25,100,49,100,50, + 132,0,90,26,100,51,100,52,132,0,90,27,100,53,100,54, + 132,0,90,28,71,0,100,55,100,56,132,0,100,56,131,2, + 90,29,71,0,100,57,100,58,132,0,100,58,131,2,90,30, + 71,0,100,59,100,60,132,0,100,60,131,2,90,31,100,61, + 100,62,132,0,90,32,100,63,100,64,132,0,90,33,100,95, + 100,65,100,66,132,1,90,34,100,67,100,68,132,0,90,35, + 100,69,90,36,101,36,100,70,23,0,90,37,100,71,100,72, + 132,0,90,38,101,39,131,0,90,40,100,73,100,74,132,0, + 90,41,100,96,100,76,100,77,132,1,90,42,100,39,100,78, + 156,1,100,79,100,80,132,2,90,43,100,81,100,82,132,0, + 90,44,100,97,100,84,100,85,132,1,90,45,100,86,100,87, + 132,0,90,46,100,88,100,89,132,0,90,47,100,90,100,91, + 132,0,90,48,100,92,100,93,132,0,90,49,100,1,83,0, + 41,98,97,83,1,0,0,67,111,114,101,32,105,109,112,108, + 101,109,101,110,116,97,116,105,111,110,32,111,102,32,105,109, + 112,111,114,116,46,10,10,84,104,105,115,32,109,111,100,117, + 108,101,32,105,115,32,78,79,84,32,109,101,97,110,116,32, + 116,111,32,98,101,32,100,105,114,101,99,116,108,121,32,105, + 109,112,111,114,116,101,100,33,32,73,116,32,104,97,115,32, + 98,101,101,110,32,100,101,115,105,103,110,101,100,32,115,117, + 99,104,10,116,104,97,116,32,105,116,32,99,97,110,32,98, + 101,32,98,111,111,116,115,116,114,97,112,112,101,100,32,105, + 110,116,111,32,80,121,116,104,111,110,32,97,115,32,116,104, + 101,32,105,109,112,108,101,109,101,110,116,97,116,105,111,110, + 32,111,102,32,105,109,112,111,114,116,46,32,65,115,10,115, + 117,99,104,32,105,116,32,114,101,113,117,105,114,101,115,32, + 116,104,101,32,105,110,106,101,99,116,105,111,110,32,111,102, + 32,115,112,101,99,105,102,105,99,32,109,111,100,117,108,101, + 115,32,97,110,100,32,97,116,116,114,105,98,117,116,101,115, + 32,105,110,32,111,114,100,101,114,32,116,111,10,119,111,114, + 107,46,32,79,110,101,32,115,104,111,117,108,100,32,117,115, + 101,32,105,109,112,111,114,116,108,105,98,32,97,115,32,116, + 104,101,32,112,117,98,108,105,99,45,102,97,99,105,110,103, + 32,118,101,114,115,105,111,110,32,111,102,32,116,104,105,115, + 32,109,111,100,117,108,101,46,10,10,78,99,2,0,0,0, + 0,0,0,0,0,0,0,0,3,0,0,0,7,0,0,0, + 67,0,0,0,115,56,0,0,0,100,1,68,0,93,32,125, + 2,116,0,124,1,124,2,131,2,114,4,116,1,124,0,124, + 2,116,2,124,1,124,2,131,2,131,3,1,0,113,4,124, + 0,106,3,160,4,124,1,106,3,161,1,1,0,100,2,83, + 0,41,3,122,47,83,105,109,112,108,101,32,115,117,98,115, + 116,105,116,117,116,101,32,102,111,114,32,102,117,110,99,116, + 111,111,108,115,46,117,112,100,97,116,101,95,119,114,97,112, + 112,101,114,46,41,4,218,10,95,95,109,111,100,117,108,101, + 95,95,218,8,95,95,110,97,109,101,95,95,218,12,95,95, + 113,117,97,108,110,97,109,101,95,95,218,7,95,95,100,111, + 99,95,95,78,41,5,218,7,104,97,115,97,116,116,114,218, + 7,115,101,116,97,116,116,114,218,7,103,101,116,97,116,116, + 114,218,8,95,95,100,105,99,116,95,95,218,6,117,112,100, + 97,116,101,41,3,90,3,110,101,119,90,3,111,108,100,218, + 7,114,101,112,108,97,99,101,169,0,114,10,0,0,0,250, + 29,60,102,114,111,122,101,110,32,105,109,112,111,114,116,108, + 105,98,46,95,98,111,111,116,115,116,114,97,112,62,218,5, + 95,119,114,97,112,27,0,0,0,115,8,0,0,0,0,2, + 8,1,10,1,20,1,114,12,0,0,0,99,1,0,0,0, + 0,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0, + 67,0,0,0,115,12,0,0,0,116,0,116,1,131,1,124, + 0,131,1,83,0,169,1,78,41,2,218,4,116,121,112,101, + 218,3,115,121,115,169,1,218,4,110,97,109,101,114,10,0, + 0,0,114,10,0,0,0,114,11,0,0,0,218,11,95,110, + 101,119,95,109,111,100,117,108,101,35,0,0,0,115,2,0, + 0,0,0,1,114,18,0,0,0,99,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,1,0,0,0,64,0, + 0,0,115,12,0,0,0,101,0,90,1,100,0,90,2,100, + 1,83,0,41,2,218,14,95,68,101,97,100,108,111,99,107, + 69,114,114,111,114,78,41,3,114,1,0,0,0,114,0,0, + 0,0,114,2,0,0,0,114,10,0,0,0,114,10,0,0, + 0,114,10,0,0,0,114,11,0,0,0,114,19,0,0,0, + 48,0,0,0,115,2,0,0,0,8,1,114,19,0,0,0, + 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,2,0,0,0,64,0,0,0,115,56,0,0,0,101,0, + 90,1,100,0,90,2,100,1,90,3,100,2,100,3,132,0, + 90,4,100,4,100,5,132,0,90,5,100,6,100,7,132,0, + 90,6,100,8,100,9,132,0,90,7,100,10,100,11,132,0, + 90,8,100,12,83,0,41,13,218,11,95,77,111,100,117,108, + 101,76,111,99,107,122,169,65,32,114,101,99,117,114,115,105, + 118,101,32,108,111,99,107,32,105,109,112,108,101,109,101,110, + 116,97,116,105,111,110,32,119,104,105,99,104,32,105,115,32, + 97,98,108,101,32,116,111,32,100,101,116,101,99,116,32,100, + 101,97,100,108,111,99,107,115,10,32,32,32,32,40,101,46, + 103,46,32,116,104,114,101,97,100,32,49,32,116,114,121,105, + 110,103,32,116,111,32,116,97,107,101,32,108,111,99,107,115, + 32,65,32,116,104,101,110,32,66,44,32,97,110,100,32,116, + 104,114,101,97,100,32,50,32,116,114,121,105,110,103,32,116, + 111,10,32,32,32,32,116,97,107,101,32,108,111,99,107,115, + 32,66,32,116,104,101,110,32,65,41,46,10,32,32,32,32, + 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,2,0,0,0,67,0,0,0,115,48,0,0,0,116,0, + 160,1,161,0,124,0,95,2,116,0,160,1,161,0,124,0, + 95,3,124,1,124,0,95,4,100,0,124,0,95,5,100,1, + 124,0,95,6,100,1,124,0,95,7,100,0,83,0,169,2, + 78,233,0,0,0,0,41,8,218,7,95,116,104,114,101,97, + 100,90,13,97,108,108,111,99,97,116,101,95,108,111,99,107, + 218,4,108,111,99,107,218,6,119,97,107,101,117,112,114,17, + 0,0,0,218,5,111,119,110,101,114,218,5,99,111,117,110, + 116,218,7,119,97,105,116,101,114,115,169,2,218,4,115,101, + 108,102,114,17,0,0,0,114,10,0,0,0,114,10,0,0, + 0,114,11,0,0,0,218,8,95,95,105,110,105,116,95,95, + 58,0,0,0,115,12,0,0,0,0,1,10,1,10,1,6, + 1,6,1,6,1,122,20,95,77,111,100,117,108,101,76,111, + 99,107,46,95,95,105,110,105,116,95,95,99,1,0,0,0, + 0,0,0,0,0,0,0,0,4,0,0,0,3,0,0,0, + 67,0,0,0,115,60,0,0,0,116,0,160,1,161,0,125, + 1,124,0,106,2,125,2,116,3,160,4,124,2,161,1,125, + 3,124,3,100,0,107,8,114,36,100,1,83,0,124,3,106, + 2,125,2,124,2,124,1,107,2,114,14,100,2,83,0,113, + 14,100,0,83,0,41,3,78,70,84,41,5,114,23,0,0, + 0,218,9,103,101,116,95,105,100,101,110,116,114,26,0,0, + 0,218,12,95,98,108,111,99,107,105,110,103,95,111,110,218, + 3,103,101,116,41,4,114,30,0,0,0,90,2,109,101,218, + 3,116,105,100,114,24,0,0,0,114,10,0,0,0,114,10, + 0,0,0,114,11,0,0,0,218,12,104,97,115,95,100,101, + 97,100,108,111,99,107,66,0,0,0,115,16,0,0,0,0, + 2,8,1,6,2,10,1,8,1,4,1,6,1,8,1,122, + 24,95,77,111,100,117,108,101,76,111,99,107,46,104,97,115, + 95,100,101,97,100,108,111,99,107,99,1,0,0,0,0,0, + 0,0,0,0,0,0,2,0,0,0,9,0,0,0,67,0, + 0,0,115,178,0,0,0,116,0,160,1,161,0,125,1,124, + 0,116,2,124,1,60,0,122,148,124,0,106,3,143,110,1, + 0,124,0,106,4,100,1,107,2,115,46,124,0,106,5,124, + 1,107,2,114,84,124,1,124,0,95,5,124,0,4,0,106, + 4,100,2,55,0,2,0,95,4,87,0,53,0,81,0,82, + 0,163,0,87,0,162,86,100,3,83,0,124,0,160,6,161, + 0,114,104,116,7,100,4,124,0,22,0,131,1,130,1,124, + 0,106,8,160,9,100,5,161,1,114,130,124,0,4,0,106, + 10,100,2,55,0,2,0,95,10,87,0,53,0,81,0,82, + 0,88,0,124,0,106,8,160,9,161,0,1,0,124,0,106, + 8,160,11,161,0,1,0,113,18,87,0,53,0,116,2,124, + 1,61,0,88,0,100,6,83,0,41,7,122,185,10,32,32, + 32,32,32,32,32,32,65,99,113,117,105,114,101,32,116,104, + 101,32,109,111,100,117,108,101,32,108,111,99,107,46,32,32, + 73,102,32,97,32,112,111,116,101,110,116,105,97,108,32,100, + 101,97,100,108,111,99,107,32,105,115,32,100,101,116,101,99, + 116,101,100,44,10,32,32,32,32,32,32,32,32,97,32,95, + 68,101,97,100,108,111,99,107,69,114,114,111,114,32,105,115, + 32,114,97,105,115,101,100,46,10,32,32,32,32,32,32,32, + 32,79,116,104,101,114,119,105,115,101,44,32,116,104,101,32, + 108,111,99,107,32,105,115,32,97,108,119,97,121,115,32,97, + 99,113,117,105,114,101,100,32,97,110,100,32,84,114,117,101, + 32,105,115,32,114,101,116,117,114,110,101,100,46,10,32,32, + 32,32,32,32,32,32,114,22,0,0,0,233,1,0,0,0, + 84,122,23,100,101,97,100,108,111,99,107,32,100,101,116,101, + 99,116,101,100,32,98,121,32,37,114,70,78,41,12,114,23, + 0,0,0,114,32,0,0,0,114,33,0,0,0,114,24,0, + 0,0,114,27,0,0,0,114,26,0,0,0,114,36,0,0, + 0,114,19,0,0,0,114,25,0,0,0,218,7,97,99,113, + 117,105,114,101,114,28,0,0,0,218,7,114,101,108,101,97, + 115,101,169,2,114,30,0,0,0,114,35,0,0,0,114,10, + 0,0,0,114,10,0,0,0,114,11,0,0,0,114,38,0, + 0,0,78,0,0,0,115,30,0,0,0,0,6,8,1,8, + 1,2,2,8,1,20,1,6,1,14,1,18,1,8,1,12, + 1,12,1,24,2,10,1,16,2,122,19,95,77,111,100,117, + 108,101,76,111,99,107,46,97,99,113,117,105,114,101,99,1, + 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,9, + 0,0,0,67,0,0,0,115,122,0,0,0,116,0,160,1, + 161,0,125,1,124,0,106,2,143,98,1,0,124,0,106,3, + 124,1,107,3,114,34,116,4,100,1,131,1,130,1,124,0, + 106,5,100,2,107,4,115,48,74,0,130,1,124,0,4,0, + 106,5,100,3,56,0,2,0,95,5,124,0,106,5,100,2, + 107,2,114,108,100,0,124,0,95,3,124,0,106,6,114,108, + 124,0,4,0,106,6,100,3,56,0,2,0,95,6,124,0, + 106,7,160,8,161,0,1,0,87,0,53,0,81,0,82,0, + 88,0,100,0,83,0,41,4,78,250,31,99,97,110,110,111, + 116,32,114,101,108,101,97,115,101,32,117,110,45,97,99,113, + 117,105,114,101,100,32,108,111,99,107,114,22,0,0,0,114, + 37,0,0,0,41,9,114,23,0,0,0,114,32,0,0,0, + 114,24,0,0,0,114,26,0,0,0,218,12,82,117,110,116, + 105,109,101,69,114,114,111,114,114,27,0,0,0,114,28,0, + 0,0,114,25,0,0,0,114,39,0,0,0,114,40,0,0, + 0,114,10,0,0,0,114,10,0,0,0,114,11,0,0,0, + 114,39,0,0,0,103,0,0,0,115,22,0,0,0,0,1, + 8,1,8,1,10,1,8,1,14,1,14,1,10,1,6,1, + 6,1,14,1,122,19,95,77,111,100,117,108,101,76,111,99, + 107,46,114,101,108,101,97,115,101,99,1,0,0,0,0,0, + 0,0,0,0,0,0,1,0,0,0,5,0,0,0,67,0, + 0,0,115,22,0,0,0,100,1,124,0,106,0,155,2,100, + 2,116,1,124,0,131,1,155,0,157,4,83,0,41,3,78, + 122,12,95,77,111,100,117,108,101,76,111,99,107,40,250,5, + 41,32,97,116,32,169,2,114,17,0,0,0,218,2,105,100, + 169,1,114,30,0,0,0,114,10,0,0,0,114,10,0,0, + 0,114,11,0,0,0,218,8,95,95,114,101,112,114,95,95, + 116,0,0,0,115,2,0,0,0,0,1,122,20,95,77,111, + 100,117,108,101,76,111,99,107,46,95,95,114,101,112,114,95, + 95,78,41,9,114,1,0,0,0,114,0,0,0,0,114,2, + 0,0,0,114,3,0,0,0,114,31,0,0,0,114,36,0, + 0,0,114,38,0,0,0,114,39,0,0,0,114,47,0,0, + 0,114,10,0,0,0,114,10,0,0,0,114,10,0,0,0, + 114,11,0,0,0,114,20,0,0,0,52,0,0,0,115,12, + 0,0,0,8,1,4,5,8,8,8,12,8,25,8,13,114, + 20,0,0,0,99,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,2,0,0,0,64,0,0,0,115,48,0, + 0,0,101,0,90,1,100,0,90,2,100,1,90,3,100,2, + 100,3,132,0,90,4,100,4,100,5,132,0,90,5,100,6, + 100,7,132,0,90,6,100,8,100,9,132,0,90,7,100,10, + 83,0,41,11,218,16,95,68,117,109,109,121,77,111,100,117, + 108,101,76,111,99,107,122,86,65,32,115,105,109,112,108,101, + 32,95,77,111,100,117,108,101,76,111,99,107,32,101,113,117, + 105,118,97,108,101,110,116,32,102,111,114,32,80,121,116,104, + 111,110,32,98,117,105,108,100,115,32,119,105,116,104,111,117, + 116,10,32,32,32,32,109,117,108,116,105,45,116,104,114,101, + 97,100,105,110,103,32,115,117,112,112,111,114,116,46,99,2, + 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,2, + 0,0,0,67,0,0,0,115,16,0,0,0,124,1,124,0, + 95,0,100,1,124,0,95,1,100,0,83,0,114,21,0,0, + 0,41,2,114,17,0,0,0,114,27,0,0,0,114,29,0, + 0,0,114,10,0,0,0,114,10,0,0,0,114,11,0,0, + 0,114,31,0,0,0,124,0,0,0,115,4,0,0,0,0, + 1,6,1,122,25,95,68,117,109,109,121,77,111,100,117,108, + 101,76,111,99,107,46,95,95,105,110,105,116,95,95,99,1, + 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,3, + 0,0,0,67,0,0,0,115,18,0,0,0,124,0,4,0, + 106,0,100,1,55,0,2,0,95,0,100,2,83,0,41,3, + 78,114,37,0,0,0,84,41,1,114,27,0,0,0,114,46, + 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, + 0,0,114,38,0,0,0,128,0,0,0,115,4,0,0,0, + 0,1,14,1,122,24,95,68,117,109,109,121,77,111,100,117, + 108,101,76,111,99,107,46,97,99,113,117,105,114,101,99,1, + 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,3, + 0,0,0,67,0,0,0,115,36,0,0,0,124,0,106,0, + 100,1,107,2,114,18,116,1,100,2,131,1,130,1,124,0, + 4,0,106,0,100,3,56,0,2,0,95,0,100,0,83,0, + 41,4,78,114,22,0,0,0,114,41,0,0,0,114,37,0, + 0,0,41,2,114,27,0,0,0,114,42,0,0,0,114,46, + 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, + 0,0,114,39,0,0,0,132,0,0,0,115,6,0,0,0, + 0,1,10,1,8,1,122,24,95,68,117,109,109,121,77,111, + 100,117,108,101,76,111,99,107,46,114,101,108,101,97,115,101, + 99,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0, + 0,5,0,0,0,67,0,0,0,115,22,0,0,0,100,1, + 124,0,106,0,155,2,100,2,116,1,124,0,131,1,155,0, + 157,4,83,0,41,3,78,122,17,95,68,117,109,109,121,77, + 111,100,117,108,101,76,111,99,107,40,114,43,0,0,0,114, + 44,0,0,0,114,46,0,0,0,114,10,0,0,0,114,10, + 0,0,0,114,11,0,0,0,114,47,0,0,0,137,0,0, + 0,115,2,0,0,0,0,1,122,25,95,68,117,109,109,121, + 77,111,100,117,108,101,76,111,99,107,46,95,95,114,101,112, + 114,95,95,78,41,8,114,1,0,0,0,114,0,0,0,0, + 114,2,0,0,0,114,3,0,0,0,114,31,0,0,0,114, + 38,0,0,0,114,39,0,0,0,114,47,0,0,0,114,10, + 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, + 0,0,114,48,0,0,0,120,0,0,0,115,10,0,0,0, + 8,1,4,3,8,4,8,4,8,5,114,48,0,0,0,99, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 2,0,0,0,64,0,0,0,115,36,0,0,0,101,0,90, + 1,100,0,90,2,100,1,100,2,132,0,90,3,100,3,100, + 4,132,0,90,4,100,5,100,6,132,0,90,5,100,7,83, + 0,41,8,218,18,95,77,111,100,117,108,101,76,111,99,107, + 77,97,110,97,103,101,114,99,2,0,0,0,0,0,0,0, + 0,0,0,0,2,0,0,0,2,0,0,0,67,0,0,0, + 115,16,0,0,0,124,1,124,0,95,0,100,0,124,0,95, + 1,100,0,83,0,114,13,0,0,0,41,2,218,5,95,110, + 97,109,101,218,5,95,108,111,99,107,114,29,0,0,0,114, + 10,0,0,0,114,10,0,0,0,114,11,0,0,0,114,31, + 0,0,0,143,0,0,0,115,4,0,0,0,0,1,6,1, + 122,27,95,77,111,100,117,108,101,76,111,99,107,77,97,110, + 97,103,101,114,46,95,95,105,110,105,116,95,95,99,1,0, + 0,0,0,0,0,0,0,0,0,0,1,0,0,0,2,0, + 0,0,67,0,0,0,115,26,0,0,0,116,0,124,0,106, + 1,131,1,124,0,95,2,124,0,106,2,160,3,161,0,1, + 0,100,0,83,0,114,13,0,0,0,41,4,218,16,95,103, + 101,116,95,109,111,100,117,108,101,95,108,111,99,107,114,50, + 0,0,0,114,51,0,0,0,114,38,0,0,0,114,46,0, + 0,0,114,10,0,0,0,114,10,0,0,0,114,11,0,0, + 0,218,9,95,95,101,110,116,101,114,95,95,147,0,0,0, + 115,4,0,0,0,0,1,12,1,122,28,95,77,111,100,117, + 108,101,76,111,99,107,77,97,110,97,103,101,114,46,95,95, + 101,110,116,101,114,95,95,99,1,0,0,0,0,0,0,0, + 0,0,0,0,3,0,0,0,2,0,0,0,79,0,0,0, + 115,14,0,0,0,124,0,106,0,160,1,161,0,1,0,100, + 0,83,0,114,13,0,0,0,41,2,114,51,0,0,0,114, + 39,0,0,0,41,3,114,30,0,0,0,218,4,97,114,103, + 115,90,6,107,119,97,114,103,115,114,10,0,0,0,114,10, + 0,0,0,114,11,0,0,0,218,8,95,95,101,120,105,116, + 95,95,151,0,0,0,115,2,0,0,0,0,1,122,27,95, + 77,111,100,117,108,101,76,111,99,107,77,97,110,97,103,101, + 114,46,95,95,101,120,105,116,95,95,78,41,6,114,1,0, + 0,0,114,0,0,0,0,114,2,0,0,0,114,31,0,0, + 0,114,53,0,0,0,114,55,0,0,0,114,10,0,0,0, + 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,114, + 49,0,0,0,141,0,0,0,115,6,0,0,0,8,2,8, + 4,8,4,114,49,0,0,0,99,1,0,0,0,0,0,0, + 0,0,0,0,0,3,0,0,0,8,0,0,0,67,0,0, + 0,115,130,0,0,0,116,0,160,1,161,0,1,0,122,106, + 122,14,116,3,124,0,25,0,131,0,125,1,87,0,110,24, + 4,0,116,4,107,10,114,48,1,0,1,0,1,0,100,1, + 125,1,89,0,110,2,88,0,124,1,100,1,107,8,114,112, + 116,5,100,1,107,8,114,76,116,6,124,0,131,1,125,1, + 110,8,116,7,124,0,131,1,125,1,124,0,102,1,100,2, + 100,3,132,1,125,2,116,8,160,9,124,1,124,2,161,2, + 116,3,124,0,60,0,87,0,53,0,116,0,160,2,161,0, + 1,0,88,0,124,1,83,0,41,4,122,139,71,101,116,32, + 111,114,32,99,114,101,97,116,101,32,116,104,101,32,109,111, + 100,117,108,101,32,108,111,99,107,32,102,111,114,32,97,32, + 103,105,118,101,110,32,109,111,100,117,108,101,32,110,97,109, + 101,46,10,10,32,32,32,32,65,99,113,117,105,114,101,47, + 114,101,108,101,97,115,101,32,105,110,116,101,114,110,97,108, + 108,121,32,116,104,101,32,103,108,111,98,97,108,32,105,109, + 112,111,114,116,32,108,111,99,107,32,116,111,32,112,114,111, + 116,101,99,116,10,32,32,32,32,95,109,111,100,117,108,101, + 95,108,111,99,107,115,46,78,99,2,0,0,0,0,0,0, + 0,0,0,0,0,2,0,0,0,8,0,0,0,83,0,0, + 0,115,48,0,0,0,116,0,160,1,161,0,1,0,122,24, + 116,3,160,4,124,1,161,1,124,0,107,8,114,30,116,3, + 124,1,61,0,87,0,53,0,116,0,160,2,161,0,1,0, + 88,0,100,0,83,0,114,13,0,0,0,41,5,218,4,95, + 105,109,112,218,12,97,99,113,117,105,114,101,95,108,111,99, + 107,218,12,114,101,108,101,97,115,101,95,108,111,99,107,218, + 13,95,109,111,100,117,108,101,95,108,111,99,107,115,114,34, + 0,0,0,41,2,218,3,114,101,102,114,17,0,0,0,114, + 10,0,0,0,114,10,0,0,0,114,11,0,0,0,218,2, + 99,98,176,0,0,0,115,10,0,0,0,0,1,8,1,2, + 4,14,1,10,2,122,28,95,103,101,116,95,109,111,100,117, + 108,101,95,108,111,99,107,46,60,108,111,99,97,108,115,62, + 46,99,98,41,10,114,56,0,0,0,114,57,0,0,0,114, + 58,0,0,0,114,59,0,0,0,218,8,75,101,121,69,114, + 114,111,114,114,23,0,0,0,114,48,0,0,0,114,20,0, + 0,0,218,8,95,119,101,97,107,114,101,102,114,60,0,0, + 0,41,3,114,17,0,0,0,114,24,0,0,0,114,61,0, + 0,0,114,10,0,0,0,114,10,0,0,0,114,11,0,0, + 0,114,52,0,0,0,157,0,0,0,115,28,0,0,0,0, + 6,8,1,2,1,2,1,14,1,14,1,10,2,8,1,8, + 1,10,2,8,2,12,11,20,2,10,2,114,52,0,0,0, + 99,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,8,0,0,0,67,0,0,0,115,54,0,0,0,116,0, + 124,0,131,1,125,1,122,12,124,1,160,1,161,0,1,0, + 87,0,110,20,4,0,116,2,107,10,114,40,1,0,1,0, + 1,0,89,0,110,10,88,0,124,1,160,3,161,0,1,0, + 100,1,83,0,41,2,122,189,65,99,113,117,105,114,101,115, + 32,116,104,101,110,32,114,101,108,101,97,115,101,115,32,116, + 104,101,32,109,111,100,117,108,101,32,108,111,99,107,32,102, + 111,114,32,97,32,103,105,118,101,110,32,109,111,100,117,108, + 101,32,110,97,109,101,46,10,10,32,32,32,32,84,104,105, + 115,32,105,115,32,117,115,101,100,32,116,111,32,101,110,115, + 117,114,101,32,97,32,109,111,100,117,108,101,32,105,115,32, + 99,111,109,112,108,101,116,101,108,121,32,105,110,105,116,105, + 97,108,105,122,101,100,44,32,105,110,32,116,104,101,10,32, + 32,32,32,101,118,101,110,116,32,105,116,32,105,115,32,98, + 101,105,110,103,32,105,109,112,111,114,116,101,100,32,98,121, + 32,97,110,111,116,104,101,114,32,116,104,114,101,97,100,46, + 10,32,32,32,32,78,41,4,114,52,0,0,0,114,38,0, + 0,0,114,19,0,0,0,114,39,0,0,0,41,2,114,17, + 0,0,0,114,24,0,0,0,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,218,19,95,108,111,99,107,95,117, + 110,108,111,99,107,95,109,111,100,117,108,101,194,0,0,0, + 115,12,0,0,0,0,6,8,1,2,1,12,1,14,3,6, + 2,114,64,0,0,0,99,1,0,0,0,0,0,0,0,0, + 0,0,0,3,0,0,0,3,0,0,0,79,0,0,0,115, + 10,0,0,0,124,0,124,1,124,2,142,1,83,0,41,1, + 97,46,1,0,0,114,101,109,111,118,101,95,105,109,112,111, + 114,116,108,105,98,95,102,114,97,109,101,115,32,105,110,32, + 105,109,112,111,114,116,46,99,32,119,105,108,108,32,97,108, + 119,97,121,115,32,114,101,109,111,118,101,32,115,101,113,117, + 101,110,99,101,115,10,32,32,32,32,111,102,32,105,109,112, + 111,114,116,108,105,98,32,102,114,97,109,101,115,32,116,104, + 97,116,32,101,110,100,32,119,105,116,104,32,97,32,99,97, + 108,108,32,116,111,32,116,104,105,115,32,102,117,110,99,116, + 105,111,110,10,10,32,32,32,32,85,115,101,32,105,116,32, + 105,110,115,116,101,97,100,32,111,102,32,97,32,110,111,114, + 109,97,108,32,99,97,108,108,32,105,110,32,112,108,97,99, + 101,115,32,119,104,101,114,101,32,105,110,99,108,117,100,105, + 110,103,32,116,104,101,32,105,109,112,111,114,116,108,105,98, + 10,32,32,32,32,102,114,97,109,101,115,32,105,110,116,114, + 111,100,117,99,101,115,32,117,110,119,97,110,116,101,100,32, + 110,111,105,115,101,32,105,110,116,111,32,116,104,101,32,116, + 114,97,99,101,98,97,99,107,32,40,101,46,103,46,32,119, + 104,101,110,32,101,120,101,99,117,116,105,110,103,10,32,32, + 32,32,109,111,100,117,108,101,32,99,111,100,101,41,10,32, + 32,32,32,114,10,0,0,0,41,3,218,1,102,114,54,0, + 0,0,90,4,107,119,100,115,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,218,25,95,99,97,108,108,95,119, + 105,116,104,95,102,114,97,109,101,115,95,114,101,109,111,118, + 101,100,211,0,0,0,115,2,0,0,0,0,8,114,66,0, + 0,0,114,37,0,0,0,41,1,218,9,118,101,114,98,111, + 115,105,116,121,99,1,0,0,0,0,0,0,0,1,0,0, + 0,3,0,0,0,4,0,0,0,71,0,0,0,115,54,0, + 0,0,116,0,106,1,106,2,124,1,107,5,114,50,124,0, + 160,3,100,1,161,1,115,30,100,2,124,0,23,0,125,0, + 116,4,124,0,106,5,124,2,142,0,116,0,106,6,100,3, + 141,2,1,0,100,4,83,0,41,5,122,61,80,114,105,110, + 116,32,116,104,101,32,109,101,115,115,97,103,101,32,116,111, + 32,115,116,100,101,114,114,32,105,102,32,45,118,47,80,89, + 84,72,79,78,86,69,82,66,79,83,69,32,105,115,32,116, + 117,114,110,101,100,32,111,110,46,41,2,250,1,35,122,7, + 105,109,112,111,114,116,32,122,2,35,32,41,1,90,4,102, + 105,108,101,78,41,7,114,15,0,0,0,218,5,102,108,97, + 103,115,218,7,118,101,114,98,111,115,101,218,10,115,116,97, + 114,116,115,119,105,116,104,218,5,112,114,105,110,116,218,6, + 102,111,114,109,97,116,218,6,115,116,100,101,114,114,41,3, + 218,7,109,101,115,115,97,103,101,114,67,0,0,0,114,54, + 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, + 0,0,218,16,95,118,101,114,98,111,115,101,95,109,101,115, + 115,97,103,101,222,0,0,0,115,8,0,0,0,0,2,12, + 1,10,1,8,1,114,76,0,0,0,99,1,0,0,0,0, + 0,0,0,0,0,0,0,2,0,0,0,3,0,0,0,3, + 0,0,0,115,26,0,0,0,135,0,102,1,100,1,100,2, + 132,8,125,1,116,0,124,1,136,0,131,2,1,0,124,1, + 83,0,41,3,122,49,68,101,99,111,114,97,116,111,114,32, + 116,111,32,118,101,114,105,102,121,32,116,104,101,32,110,97, + 109,101,100,32,109,111,100,117,108,101,32,105,115,32,98,117, + 105,108,116,45,105,110,46,99,2,0,0,0,0,0,0,0, + 0,0,0,0,2,0,0,0,4,0,0,0,19,0,0,0, + 115,38,0,0,0,124,1,116,0,106,1,107,7,114,28,116, + 2,124,1,155,2,100,1,157,2,124,1,100,2,141,2,130, + 1,136,0,124,0,124,1,131,2,83,0,41,3,78,250,25, + 32,105,115,32,110,111,116,32,97,32,98,117,105,108,116,45, + 105,110,32,109,111,100,117,108,101,114,16,0,0,0,41,3, + 114,15,0,0,0,218,20,98,117,105,108,116,105,110,95,109, + 111,100,117,108,101,95,110,97,109,101,115,218,11,73,109,112, + 111,114,116,69,114,114,111,114,169,2,114,30,0,0,0,218, + 8,102,117,108,108,110,97,109,101,169,1,218,3,102,120,110, + 114,10,0,0,0,114,11,0,0,0,218,25,95,114,101,113, + 117,105,114,101,115,95,98,117,105,108,116,105,110,95,119,114, + 97,112,112,101,114,232,0,0,0,115,10,0,0,0,0,1, + 10,1,10,1,2,255,6,2,122,52,95,114,101,113,117,105, + 114,101,115,95,98,117,105,108,116,105,110,46,60,108,111,99, + 97,108,115,62,46,95,114,101,113,117,105,114,101,115,95,98, + 117,105,108,116,105,110,95,119,114,97,112,112,101,114,169,1, + 114,12,0,0,0,41,2,114,83,0,0,0,114,84,0,0, + 0,114,10,0,0,0,114,82,0,0,0,114,11,0,0,0, + 218,17,95,114,101,113,117,105,114,101,115,95,98,117,105,108, + 116,105,110,230,0,0,0,115,6,0,0,0,0,2,12,5, + 10,1,114,86,0,0,0,99,1,0,0,0,0,0,0,0, + 0,0,0,0,2,0,0,0,3,0,0,0,3,0,0,0, + 115,26,0,0,0,135,0,102,1,100,1,100,2,132,8,125, + 1,116,0,124,1,136,0,131,2,1,0,124,1,83,0,41, + 3,122,47,68,101,99,111,114,97,116,111,114,32,116,111,32, + 118,101,114,105,102,121,32,116,104,101,32,110,97,109,101,100, + 32,109,111,100,117,108,101,32,105,115,32,102,114,111,122,101, + 110,46,99,2,0,0,0,0,0,0,0,0,0,0,0,2, + 0,0,0,4,0,0,0,19,0,0,0,115,38,0,0,0, + 116,0,160,1,124,1,161,1,115,28,116,2,124,1,155,2, + 100,1,157,2,124,1,100,2,141,2,130,1,136,0,124,0, + 124,1,131,2,83,0,169,3,78,122,23,32,105,115,32,110, + 111,116,32,97,32,102,114,111,122,101,110,32,109,111,100,117, + 108,101,114,16,0,0,0,41,3,114,56,0,0,0,218,9, + 105,115,95,102,114,111,122,101,110,114,79,0,0,0,114,80, + 0,0,0,114,82,0,0,0,114,10,0,0,0,114,11,0, + 0,0,218,24,95,114,101,113,117,105,114,101,115,95,102,114, + 111,122,101,110,95,119,114,97,112,112,101,114,243,0,0,0, + 115,10,0,0,0,0,1,10,1,10,1,2,255,6,2,122, + 50,95,114,101,113,117,105,114,101,115,95,102,114,111,122,101, + 110,46,60,108,111,99,97,108,115,62,46,95,114,101,113,117, + 105,114,101,115,95,102,114,111,122,101,110,95,119,114,97,112, + 112,101,114,114,85,0,0,0,41,2,114,83,0,0,0,114, + 89,0,0,0,114,10,0,0,0,114,82,0,0,0,114,11, + 0,0,0,218,16,95,114,101,113,117,105,114,101,115,95,102, + 114,111,122,101,110,241,0,0,0,115,6,0,0,0,0,2, + 12,5,10,1,114,90,0,0,0,99,2,0,0,0,0,0, + 0,0,0,0,0,0,4,0,0,0,3,0,0,0,67,0, + 0,0,115,62,0,0,0,116,0,124,1,124,0,131,2,125, + 2,124,1,116,1,106,2,107,6,114,50,116,1,106,2,124, + 1,25,0,125,3,116,3,124,2,124,3,131,2,1,0,116, + 1,106,2,124,1,25,0,83,0,116,4,124,2,131,1,83, + 0,100,1,83,0,41,2,122,128,76,111,97,100,32,116,104, + 101,32,115,112,101,99,105,102,105,101,100,32,109,111,100,117, + 108,101,32,105,110,116,111,32,115,121,115,46,109,111,100,117, + 108,101,115,32,97,110,100,32,114,101,116,117,114,110,32,105, + 116,46,10,10,32,32,32,32,84,104,105,115,32,109,101,116, + 104,111,100,32,105,115,32,100,101,112,114,101,99,97,116,101, + 100,46,32,32,85,115,101,32,108,111,97,100,101,114,46,101, + 120,101,99,95,109,111,100,117,108,101,32,105,110,115,116,101, + 97,100,46,10,10,32,32,32,32,78,41,5,218,16,115,112, + 101,99,95,102,114,111,109,95,108,111,97,100,101,114,114,15, + 0,0,0,218,7,109,111,100,117,108,101,115,218,5,95,101, + 120,101,99,218,5,95,108,111,97,100,41,4,114,30,0,0, + 0,114,81,0,0,0,218,4,115,112,101,99,218,6,109,111, + 100,117,108,101,114,10,0,0,0,114,10,0,0,0,114,11, + 0,0,0,218,17,95,108,111,97,100,95,109,111,100,117,108, + 101,95,115,104,105,109,253,0,0,0,115,12,0,0,0,0, + 6,10,1,10,1,10,1,10,1,10,2,114,97,0,0,0, + 99,1,0,0,0,0,0,0,0,0,0,0,0,5,0,0, + 0,8,0,0,0,67,0,0,0,115,240,0,0,0,116,0, + 124,0,100,1,100,0,131,3,125,1,116,1,124,1,100,2, + 131,2,114,56,122,12,124,1,160,2,124,0,161,1,87,0, + 83,0,4,0,116,3,107,10,114,54,1,0,1,0,1,0, + 89,0,110,2,88,0,122,10,124,0,106,4,125,2,87,0, + 110,20,4,0,116,5,107,10,114,86,1,0,1,0,1,0, + 89,0,110,18,88,0,124,2,100,0,107,9,114,104,116,6, + 124,2,131,1,83,0,122,10,124,0,106,7,125,3,87,0, + 110,24,4,0,116,5,107,10,114,138,1,0,1,0,1,0, + 100,3,125,3,89,0,110,2,88,0,122,10,124,0,106,8, + 125,4,87,0,110,66,4,0,116,5,107,10,114,216,1,0, + 1,0,1,0,124,1,100,0,107,8,114,190,100,4,124,3, + 155,2,100,5,157,3,6,0,89,0,83,0,100,4,124,3, + 155,2,100,6,124,1,155,2,100,7,157,5,6,0,89,0, + 83,0,89,0,110,20,88,0,100,4,124,3,155,2,100,8, + 124,4,155,2,100,5,157,5,83,0,100,0,83,0,41,9, + 78,218,10,95,95,108,111,97,100,101,114,95,95,218,11,109, + 111,100,117,108,101,95,114,101,112,114,250,1,63,250,8,60, + 109,111,100,117,108,101,32,250,1,62,250,2,32,40,250,2, + 41,62,250,6,32,102,114,111,109,32,41,9,114,6,0,0, + 0,114,4,0,0,0,114,99,0,0,0,218,9,69,120,99, + 101,112,116,105,111,110,218,8,95,95,115,112,101,99,95,95, + 218,14,65,116,116,114,105,98,117,116,101,69,114,114,111,114, + 218,22,95,109,111,100,117,108,101,95,114,101,112,114,95,102, + 114,111,109,95,115,112,101,99,114,1,0,0,0,218,8,95, + 95,102,105,108,101,95,95,41,5,114,96,0,0,0,218,6, + 108,111,97,100,101,114,114,95,0,0,0,114,17,0,0,0, + 218,8,102,105,108,101,110,97,109,101,114,10,0,0,0,114, + 10,0,0,0,114,11,0,0,0,218,12,95,109,111,100,117, + 108,101,95,114,101,112,114,13,1,0,0,115,46,0,0,0, + 0,2,12,1,10,4,2,1,12,1,14,1,6,1,2,1, + 10,1,14,1,6,2,8,1,8,4,2,1,10,1,14,1, + 10,1,2,1,10,1,14,1,8,1,16,2,28,2,114,113, + 0,0,0,99,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,4,0,0,0,64,0,0,0,115,114,0,0, + 0,101,0,90,1,100,0,90,2,100,1,90,3,100,2,100, + 2,100,2,100,3,156,3,100,4,100,5,132,2,90,4,100, + 6,100,7,132,0,90,5,100,8,100,9,132,0,90,6,101, + 7,100,10,100,11,132,0,131,1,90,8,101,8,106,9,100, + 12,100,11,132,0,131,1,90,8,101,7,100,13,100,14,132, + 0,131,1,90,10,101,7,100,15,100,16,132,0,131,1,90, + 11,101,11,106,9,100,17,100,16,132,0,131,1,90,11,100, + 2,83,0,41,18,218,10,77,111,100,117,108,101,83,112,101, + 99,97,208,5,0,0,84,104,101,32,115,112,101,99,105,102, + 105,99,97,116,105,111,110,32,102,111,114,32,97,32,109,111, + 100,117,108,101,44,32,117,115,101,100,32,102,111,114,32,108, + 111,97,100,105,110,103,46,10,10,32,32,32,32,65,32,109, + 111,100,117,108,101,39,115,32,115,112,101,99,32,105,115,32, + 116,104,101,32,115,111,117,114,99,101,32,102,111,114,32,105, + 110,102,111,114,109,97,116,105,111,110,32,97,98,111,117,116, + 32,116,104,101,32,109,111,100,117,108,101,46,32,32,70,111, + 114,10,32,32,32,32,100,97,116,97,32,97,115,115,111,99, + 105,97,116,101,100,32,119,105,116,104,32,116,104,101,32,109, + 111,100,117,108,101,44,32,105,110,99,108,117,100,105,110,103, + 32,115,111,117,114,99,101,44,32,117,115,101,32,116,104,101, + 32,115,112,101,99,39,115,10,32,32,32,32,108,111,97,100, + 101,114,46,10,10,32,32,32,32,96,110,97,109,101,96,32, + 105,115,32,116,104,101,32,97,98,115,111,108,117,116,101,32, + 110,97,109,101,32,111,102,32,116,104,101,32,109,111,100,117, + 108,101,46,32,32,96,108,111,97,100,101,114,96,32,105,115, + 32,116,104,101,32,108,111,97,100,101,114,10,32,32,32,32, + 116,111,32,117,115,101,32,119,104,101,110,32,108,111,97,100, + 105,110,103,32,116,104,101,32,109,111,100,117,108,101,46,32, + 32,96,112,97,114,101,110,116,96,32,105,115,32,116,104,101, + 32,110,97,109,101,32,111,102,32,116,104,101,10,32,32,32, + 32,112,97,99,107,97,103,101,32,116,104,101,32,109,111,100, + 117,108,101,32,105,115,32,105,110,46,32,32,84,104,101,32, + 112,97,114,101,110,116,32,105,115,32,100,101,114,105,118,101, + 100,32,102,114,111,109,32,116,104,101,32,110,97,109,101,46, + 10,10,32,32,32,32,96,105,115,95,112,97,99,107,97,103, + 101,96,32,100,101,116,101,114,109,105,110,101,115,32,105,102, + 32,116,104,101,32,109,111,100,117,108,101,32,105,115,32,99, + 111,110,115,105,100,101,114,101,100,32,97,32,112,97,99,107, + 97,103,101,32,111,114,10,32,32,32,32,110,111,116,46,32, + 32,79,110,32,109,111,100,117,108,101,115,32,116,104,105,115, + 32,105,115,32,114,101,102,108,101,99,116,101,100,32,98,121, + 32,116,104,101,32,96,95,95,112,97,116,104,95,95,96,32, + 97,116,116,114,105,98,117,116,101,46,10,10,32,32,32,32, + 96,111,114,105,103,105,110,96,32,105,115,32,116,104,101,32, + 115,112,101,99,105,102,105,99,32,108,111,99,97,116,105,111, + 110,32,117,115,101,100,32,98,121,32,116,104,101,32,108,111, + 97,100,101,114,32,102,114,111,109,32,119,104,105,99,104,32, + 116,111,10,32,32,32,32,108,111,97,100,32,116,104,101,32, + 109,111,100,117,108,101,44,32,105,102,32,116,104,97,116,32, + 105,110,102,111,114,109,97,116,105,111,110,32,105,115,32,97, + 118,97,105,108,97,98,108,101,46,32,32,87,104,101,110,32, + 102,105,108,101,110,97,109,101,32,105,115,10,32,32,32,32, + 115,101,116,44,32,111,114,105,103,105,110,32,119,105,108,108, + 32,109,97,116,99,104,46,10,10,32,32,32,32,96,104,97, + 115,95,108,111,99,97,116,105,111,110,96,32,105,110,100,105, + 99,97,116,101,115,32,116,104,97,116,32,97,32,115,112,101, + 99,39,115,32,34,111,114,105,103,105,110,34,32,114,101,102, + 108,101,99,116,115,32,97,32,108,111,99,97,116,105,111,110, + 46,10,32,32,32,32,87,104,101,110,32,116,104,105,115,32, + 105,115,32,84,114,117,101,44,32,96,95,95,102,105,108,101, + 95,95,96,32,97,116,116,114,105,98,117,116,101,32,111,102, + 32,116,104,101,32,109,111,100,117,108,101,32,105,115,32,115, + 101,116,46,10,10,32,32,32,32,96,99,97,99,104,101,100, + 96,32,105,115,32,116,104,101,32,108,111,99,97,116,105,111, + 110,32,111,102,32,116,104,101,32,99,97,99,104,101,100,32, + 98,121,116,101,99,111,100,101,32,102,105,108,101,44,32,105, + 102,32,97,110,121,46,32,32,73,116,10,32,32,32,32,99, + 111,114,114,101,115,112,111,110,100,115,32,116,111,32,116,104, + 101,32,96,95,95,99,97,99,104,101,100,95,95,96,32,97, + 116,116,114,105,98,117,116,101,46,10,10,32,32,32,32,96, + 115,117,98,109,111,100,117,108,101,95,115,101,97,114,99,104, + 95,108,111,99,97,116,105,111,110,115,96,32,105,115,32,116, + 104,101,32,115,101,113,117,101,110,99,101,32,111,102,32,112, + 97,116,104,32,101,110,116,114,105,101,115,32,116,111,10,32, + 32,32,32,115,101,97,114,99,104,32,119,104,101,110,32,105, + 109,112,111,114,116,105,110,103,32,115,117,98,109,111,100,117, + 108,101,115,46,32,32,73,102,32,115,101,116,44,32,105,115, + 95,112,97,99,107,97,103,101,32,115,104,111,117,108,100,32, + 98,101,10,32,32,32,32,84,114,117,101,45,45,97,110,100, + 32,70,97,108,115,101,32,111,116,104,101,114,119,105,115,101, + 46,10,10,32,32,32,32,80,97,99,107,97,103,101,115,32, + 97,114,101,32,115,105,109,112,108,121,32,109,111,100,117,108, + 101,115,32,116,104,97,116,32,40,109,97,121,41,32,104,97, + 118,101,32,115,117,98,109,111,100,117,108,101,115,46,32,32, + 73,102,32,97,32,115,112,101,99,10,32,32,32,32,104,97, + 115,32,97,32,110,111,110,45,78,111,110,101,32,118,97,108, + 117,101,32,105,110,32,96,115,117,98,109,111,100,117,108,101, + 95,115,101,97,114,99,104,95,108,111,99,97,116,105,111,110, + 115,96,44,32,116,104,101,32,105,109,112,111,114,116,10,32, + 32,32,32,115,121,115,116,101,109,32,119,105,108,108,32,99, + 111,110,115,105,100,101,114,32,109,111,100,117,108,101,115,32, + 108,111,97,100,101,100,32,102,114,111,109,32,116,104,101,32, + 115,112,101,99,32,97,115,32,112,97,99,107,97,103,101,115, + 46,10,10,32,32,32,32,79,110,108,121,32,102,105,110,100, + 101,114,115,32,40,115,101,101,32,105,109,112,111,114,116,108, + 105,98,46,97,98,99,46,77,101,116,97,80,97,116,104,70, + 105,110,100,101,114,32,97,110,100,10,32,32,32,32,105,109, + 112,111,114,116,108,105,98,46,97,98,99,46,80,97,116,104, + 69,110,116,114,121,70,105,110,100,101,114,41,32,115,104,111, + 117,108,100,32,109,111,100,105,102,121,32,77,111,100,117,108, + 101,83,112,101,99,32,105,110,115,116,97,110,99,101,115,46, + 10,10,32,32,32,32,78,41,3,218,6,111,114,105,103,105, + 110,218,12,108,111,97,100,101,114,95,115,116,97,116,101,218, + 10,105,115,95,112,97,99,107,97,103,101,99,3,0,0,0, + 0,0,0,0,3,0,0,0,6,0,0,0,2,0,0,0, + 67,0,0,0,115,54,0,0,0,124,1,124,0,95,0,124, + 2,124,0,95,1,124,3,124,0,95,2,124,4,124,0,95, + 3,124,5,114,32,103,0,110,2,100,0,124,0,95,4,100, + 1,124,0,95,5,100,0,124,0,95,6,100,0,83,0,41, + 2,78,70,41,7,114,17,0,0,0,114,111,0,0,0,114, + 115,0,0,0,114,116,0,0,0,218,26,115,117,98,109,111, + 100,117,108,101,95,115,101,97,114,99,104,95,108,111,99,97, + 116,105,111,110,115,218,13,95,115,101,116,95,102,105,108,101, + 97,116,116,114,218,7,95,99,97,99,104,101,100,41,6,114, + 30,0,0,0,114,17,0,0,0,114,111,0,0,0,114,115, + 0,0,0,114,116,0,0,0,114,117,0,0,0,114,10,0, + 0,0,114,10,0,0,0,114,11,0,0,0,114,31,0,0, + 0,86,1,0,0,115,14,0,0,0,0,2,6,1,6,1, + 6,1,6,1,14,3,6,1,122,19,77,111,100,117,108,101, + 83,112,101,99,46,95,95,105,110,105,116,95,95,99,1,0, + 0,0,0,0,0,0,0,0,0,0,2,0,0,0,5,0, + 0,0,67,0,0,0,115,106,0,0,0,100,1,124,0,106, + 0,155,2,157,2,100,2,124,0,106,1,155,2,157,2,103, + 2,125,1,124,0,106,2,100,0,107,9,114,52,124,1,160, + 3,100,3,124,0,106,2,155,2,157,2,161,1,1,0,124, + 0,106,4,100,0,107,9,114,80,124,1,160,3,100,4,124, + 0,106,4,155,0,157,2,161,1,1,0,124,0,106,5,106, + 6,155,0,100,5,100,6,160,7,124,1,161,1,155,0,100, + 7,157,4,83,0,41,8,78,122,5,110,97,109,101,61,122, + 7,108,111,97,100,101,114,61,122,7,111,114,105,103,105,110, + 61,122,27,115,117,98,109,111,100,117,108,101,95,115,101,97, + 114,99,104,95,108,111,99,97,116,105,111,110,115,61,250,1, + 40,122,2,44,32,250,1,41,41,8,114,17,0,0,0,114, + 111,0,0,0,114,115,0,0,0,218,6,97,112,112,101,110, + 100,114,118,0,0,0,218,9,95,95,99,108,97,115,115,95, + 95,114,1,0,0,0,218,4,106,111,105,110,41,2,114,30, + 0,0,0,114,54,0,0,0,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,114,47,0,0,0,98,1,0,0, + 115,16,0,0,0,0,1,10,1,10,255,4,2,10,1,18, + 1,10,1,18,1,122,19,77,111,100,117,108,101,83,112,101, + 99,46,95,95,114,101,112,114,95,95,99,2,0,0,0,0, + 0,0,0,0,0,0,0,3,0,0,0,8,0,0,0,67, + 0,0,0,115,108,0,0,0,124,0,106,0,125,2,122,72, + 124,0,106,1,124,1,106,1,107,2,111,76,124,0,106,2, + 124,1,106,2,107,2,111,76,124,0,106,3,124,1,106,3, + 107,2,111,76,124,2,124,1,106,0,107,2,111,76,124,0, + 106,4,124,1,106,4,107,2,111,76,124,0,106,5,124,1, + 106,5,107,2,87,0,83,0,4,0,116,6,107,10,114,102, + 1,0,1,0,1,0,116,7,6,0,89,0,83,0,88,0, + 100,0,83,0,114,13,0,0,0,41,8,114,118,0,0,0, + 114,17,0,0,0,114,111,0,0,0,114,115,0,0,0,218, + 6,99,97,99,104,101,100,218,12,104,97,115,95,108,111,99, + 97,116,105,111,110,114,108,0,0,0,218,14,78,111,116,73, + 109,112,108,101,109,101,110,116,101,100,41,3,114,30,0,0, + 0,90,5,111,116,104,101,114,90,4,115,109,115,108,114,10, + 0,0,0,114,10,0,0,0,114,11,0,0,0,218,6,95, + 95,101,113,95,95,107,1,0,0,115,30,0,0,0,0,1, + 6,1,2,1,12,1,10,255,2,2,10,254,2,3,8,253, + 2,4,10,252,2,5,10,251,4,6,14,1,122,17,77,111, + 100,117,108,101,83,112,101,99,46,95,95,101,113,95,95,99, + 1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, + 3,0,0,0,67,0,0,0,115,58,0,0,0,124,0,106, + 0,100,0,107,8,114,52,124,0,106,1,100,0,107,9,114, + 52,124,0,106,2,114,52,116,3,100,0,107,8,114,38,116, + 4,130,1,116,3,160,5,124,0,106,1,161,1,124,0,95, + 0,124,0,106,0,83,0,114,13,0,0,0,41,6,114,120, + 0,0,0,114,115,0,0,0,114,119,0,0,0,218,19,95, + 98,111,111,116,115,116,114,97,112,95,101,120,116,101,114,110, + 97,108,218,19,78,111,116,73,109,112,108,101,109,101,110,116, + 101,100,69,114,114,111,114,90,11,95,103,101,116,95,99,97, + 99,104,101,100,114,46,0,0,0,114,10,0,0,0,114,10, + 0,0,0,114,11,0,0,0,114,126,0,0,0,119,1,0, + 0,115,12,0,0,0,0,2,10,1,16,1,8,1,4,1, + 14,1,122,17,77,111,100,117,108,101,83,112,101,99,46,99, + 97,99,104,101,100,99,2,0,0,0,0,0,0,0,0,0, + 0,0,2,0,0,0,2,0,0,0,67,0,0,0,115,10, + 0,0,0,124,1,124,0,95,0,100,0,83,0,114,13,0, + 0,0,41,1,114,120,0,0,0,41,2,114,30,0,0,0, + 114,126,0,0,0,114,10,0,0,0,114,10,0,0,0,114, + 11,0,0,0,114,126,0,0,0,128,1,0,0,115,2,0, + 0,0,0,2,99,1,0,0,0,0,0,0,0,0,0,0, + 0,1,0,0,0,3,0,0,0,67,0,0,0,115,36,0, + 0,0,124,0,106,0,100,1,107,8,114,26,124,0,106,1, + 160,2,100,2,161,1,100,3,25,0,83,0,124,0,106,1, + 83,0,100,1,83,0,41,4,122,32,84,104,101,32,110,97, + 109,101,32,111,102,32,116,104,101,32,109,111,100,117,108,101, + 39,115,32,112,97,114,101,110,116,46,78,218,1,46,114,22, + 0,0,0,41,3,114,118,0,0,0,114,17,0,0,0,218, + 10,114,112,97,114,116,105,116,105,111,110,114,46,0,0,0, + 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,218, + 6,112,97,114,101,110,116,132,1,0,0,115,6,0,0,0, + 0,3,10,1,16,2,122,17,77,111,100,117,108,101,83,112, + 101,99,46,112,97,114,101,110,116,99,1,0,0,0,0,0, + 0,0,0,0,0,0,1,0,0,0,1,0,0,0,67,0, + 0,0,115,6,0,0,0,124,0,106,0,83,0,114,13,0, + 0,0,41,1,114,119,0,0,0,114,46,0,0,0,114,10, + 0,0,0,114,10,0,0,0,114,11,0,0,0,114,127,0, + 0,0,140,1,0,0,115,2,0,0,0,0,2,122,23,77, + 111,100,117,108,101,83,112,101,99,46,104,97,115,95,108,111, + 99,97,116,105,111,110,99,2,0,0,0,0,0,0,0,0, + 0,0,0,2,0,0,0,2,0,0,0,67,0,0,0,115, + 14,0,0,0,116,0,124,1,131,1,124,0,95,1,100,0, + 83,0,114,13,0,0,0,41,2,218,4,98,111,111,108,114, + 119,0,0,0,41,2,114,30,0,0,0,218,5,118,97,108, + 117,101,114,10,0,0,0,114,10,0,0,0,114,11,0,0, + 0,114,127,0,0,0,144,1,0,0,115,2,0,0,0,0, + 2,41,12,114,1,0,0,0,114,0,0,0,0,114,2,0, + 0,0,114,3,0,0,0,114,31,0,0,0,114,47,0,0, + 0,114,129,0,0,0,218,8,112,114,111,112,101,114,116,121, + 114,126,0,0,0,218,6,115,101,116,116,101,114,114,134,0, + 0,0,114,127,0,0,0,114,10,0,0,0,114,10,0,0, + 0,114,10,0,0,0,114,11,0,0,0,114,114,0,0,0, + 49,1,0,0,115,32,0,0,0,8,1,4,36,4,1,2, + 255,12,12,8,9,8,12,2,1,10,8,4,1,10,3,2, + 1,10,7,2,1,10,3,4,1,114,114,0,0,0,169,2, + 114,115,0,0,0,114,117,0,0,0,99,2,0,0,0,0, + 0,0,0,2,0,0,0,6,0,0,0,8,0,0,0,67, + 0,0,0,115,154,0,0,0,116,0,124,1,100,1,131,2, + 114,74,116,1,100,2,107,8,114,22,116,2,130,1,116,1, + 106,3,125,4,124,3,100,2,107,8,114,48,124,4,124,0, + 124,1,100,3,141,2,83,0,124,3,114,56,103,0,110,2, + 100,2,125,5,124,4,124,0,124,1,124,5,100,4,141,3, + 83,0,124,3,100,2,107,8,114,138,116,0,124,1,100,5, + 131,2,114,134,122,14,124,1,160,4,124,0,161,1,125,3, + 87,0,113,138,4,0,116,5,107,10,114,130,1,0,1,0, + 1,0,100,2,125,3,89,0,113,138,88,0,110,4,100,6, + 125,3,116,6,124,0,124,1,124,2,124,3,100,7,141,4, + 83,0,41,8,122,53,82,101,116,117,114,110,32,97,32,109, + 111,100,117,108,101,32,115,112,101,99,32,98,97,115,101,100, + 32,111,110,32,118,97,114,105,111,117,115,32,108,111,97,100, + 101,114,32,109,101,116,104,111,100,115,46,90,12,103,101,116, + 95,102,105,108,101,110,97,109,101,78,41,1,114,111,0,0, + 0,41,2,114,111,0,0,0,114,118,0,0,0,114,117,0, + 0,0,70,114,139,0,0,0,41,7,114,4,0,0,0,114, + 130,0,0,0,114,131,0,0,0,218,23,115,112,101,99,95, + 102,114,111,109,95,102,105,108,101,95,108,111,99,97,116,105, + 111,110,114,117,0,0,0,114,79,0,0,0,114,114,0,0, + 0,41,6,114,17,0,0,0,114,111,0,0,0,114,115,0, + 0,0,114,117,0,0,0,114,140,0,0,0,90,6,115,101, + 97,114,99,104,114,10,0,0,0,114,10,0,0,0,114,11, + 0,0,0,114,91,0,0,0,149,1,0,0,115,36,0,0, + 0,0,2,10,1,8,1,4,1,6,2,8,1,12,1,12, + 1,6,1,2,255,6,3,8,1,10,1,2,1,14,1,14, + 1,12,3,4,2,114,91,0,0,0,99,3,0,0,0,0, + 0,0,0,0,0,0,0,8,0,0,0,8,0,0,0,67, + 0,0,0,115,56,1,0,0,122,10,124,0,106,0,125,3, + 87,0,110,20,4,0,116,1,107,10,114,30,1,0,1,0, + 1,0,89,0,110,14,88,0,124,3,100,0,107,9,114,44, + 124,3,83,0,124,0,106,2,125,4,124,1,100,0,107,8, + 114,90,122,10,124,0,106,3,125,1,87,0,110,20,4,0, + 116,1,107,10,114,88,1,0,1,0,1,0,89,0,110,2, + 88,0,122,10,124,0,106,4,125,5,87,0,110,24,4,0, + 116,1,107,10,114,124,1,0,1,0,1,0,100,0,125,5, + 89,0,110,2,88,0,124,2,100,0,107,8,114,184,124,5, + 100,0,107,8,114,180,122,10,124,1,106,5,125,2,87,0, + 113,184,4,0,116,1,107,10,114,176,1,0,1,0,1,0, + 100,0,125,2,89,0,113,184,88,0,110,4,124,5,125,2, + 122,10,124,0,106,6,125,6,87,0,110,24,4,0,116,1, + 107,10,114,218,1,0,1,0,1,0,100,0,125,6,89,0, + 110,2,88,0,122,14,116,7,124,0,106,8,131,1,125,7, + 87,0,110,26,4,0,116,1,107,10,144,1,114,4,1,0, + 1,0,1,0,100,0,125,7,89,0,110,2,88,0,116,9, + 124,4,124,1,124,2,100,1,141,3,125,3,124,5,100,0, + 107,8,144,1,114,34,100,2,110,2,100,3,124,3,95,10, + 124,6,124,3,95,11,124,7,124,3,95,12,124,3,83,0, + 41,4,78,169,1,114,115,0,0,0,70,84,41,13,114,107, + 0,0,0,114,108,0,0,0,114,1,0,0,0,114,98,0, + 0,0,114,110,0,0,0,218,7,95,79,82,73,71,73,78, + 218,10,95,95,99,97,99,104,101,100,95,95,218,4,108,105, + 115,116,218,8,95,95,112,97,116,104,95,95,114,114,0,0, + 0,114,119,0,0,0,114,126,0,0,0,114,118,0,0,0, + 41,8,114,96,0,0,0,114,111,0,0,0,114,115,0,0, + 0,114,95,0,0,0,114,17,0,0,0,90,8,108,111,99, + 97,116,105,111,110,114,126,0,0,0,114,118,0,0,0,114, + 10,0,0,0,114,10,0,0,0,114,11,0,0,0,218,17, + 95,115,112,101,99,95,102,114,111,109,95,109,111,100,117,108, + 101,175,1,0,0,115,72,0,0,0,0,2,2,1,10,1, + 14,1,6,2,8,1,4,2,6,1,8,1,2,1,10,1, + 14,2,6,1,2,1,10,1,14,1,10,1,8,1,8,1, + 2,1,10,1,14,1,12,2,4,1,2,1,10,1,14,1, + 10,1,2,1,14,1,16,1,10,2,14,1,20,1,6,1, + 6,1,114,146,0,0,0,70,169,1,218,8,111,118,101,114, + 114,105,100,101,99,2,0,0,0,0,0,0,0,1,0,0, + 0,5,0,0,0,8,0,0,0,67,0,0,0,115,226,1, + 0,0,124,2,115,20,116,0,124,1,100,1,100,0,131,3, + 100,0,107,8,114,54,122,12,124,0,106,1,124,1,95,2, + 87,0,110,20,4,0,116,3,107,10,114,52,1,0,1,0, + 1,0,89,0,110,2,88,0,124,2,115,74,116,0,124,1, + 100,2,100,0,131,3,100,0,107,8,114,178,124,0,106,4, + 125,3,124,3,100,0,107,8,114,146,124,0,106,5,100,0, + 107,9,114,146,116,6,100,0,107,8,114,110,116,7,130,1, + 116,6,106,8,125,4,124,4,160,9,124,4,161,1,125,3, + 124,0,106,5,124,3,95,10,124,3,124,0,95,4,100,0, + 124,1,95,11,122,10,124,3,124,1,95,12,87,0,110,20, + 4,0,116,3,107,10,114,176,1,0,1,0,1,0,89,0, + 110,2,88,0,124,2,115,198,116,0,124,1,100,3,100,0, + 131,3,100,0,107,8,114,232,122,12,124,0,106,13,124,1, + 95,14,87,0,110,20,4,0,116,3,107,10,114,230,1,0, + 1,0,1,0,89,0,110,2,88,0,122,10,124,0,124,1, + 95,15,87,0,110,22,4,0,116,3,107,10,144,1,114,8, + 1,0,1,0,1,0,89,0,110,2,88,0,124,2,144,1, + 115,34,116,0,124,1,100,4,100,0,131,3,100,0,107,8, + 144,1,114,82,124,0,106,5,100,0,107,9,144,1,114,82, + 122,12,124,0,106,5,124,1,95,16,87,0,110,22,4,0, + 116,3,107,10,144,1,114,80,1,0,1,0,1,0,89,0, + 110,2,88,0,124,0,106,17,144,1,114,222,124,2,144,1, + 115,114,116,0,124,1,100,5,100,0,131,3,100,0,107,8, + 144,1,114,150,122,12,124,0,106,18,124,1,95,11,87,0, + 110,22,4,0,116,3,107,10,144,1,114,148,1,0,1,0, + 1,0,89,0,110,2,88,0,124,2,144,1,115,174,116,0, + 124,1,100,6,100,0,131,3,100,0,107,8,144,1,114,222, + 124,0,106,19,100,0,107,9,144,1,114,222,122,12,124,0, + 106,19,124,1,95,20,87,0,110,22,4,0,116,3,107,10, + 144,1,114,220,1,0,1,0,1,0,89,0,110,2,88,0, + 124,1,83,0,41,7,78,114,1,0,0,0,114,98,0,0, + 0,218,11,95,95,112,97,99,107,97,103,101,95,95,114,145, + 0,0,0,114,110,0,0,0,114,143,0,0,0,41,21,114, + 6,0,0,0,114,17,0,0,0,114,1,0,0,0,114,108, + 0,0,0,114,111,0,0,0,114,118,0,0,0,114,130,0, + 0,0,114,131,0,0,0,218,16,95,78,97,109,101,115,112, + 97,99,101,76,111,97,100,101,114,218,7,95,95,110,101,119, + 95,95,90,5,95,112,97,116,104,114,110,0,0,0,114,98, + 0,0,0,114,134,0,0,0,114,149,0,0,0,114,107,0, + 0,0,114,145,0,0,0,114,127,0,0,0,114,115,0,0, + 0,114,126,0,0,0,114,143,0,0,0,41,5,114,95,0, + 0,0,114,96,0,0,0,114,148,0,0,0,114,111,0,0, + 0,114,150,0,0,0,114,10,0,0,0,114,10,0,0,0, + 114,11,0,0,0,218,18,95,105,110,105,116,95,109,111,100, + 117,108,101,95,97,116,116,114,115,220,1,0,0,115,96,0, + 0,0,0,4,20,1,2,1,12,1,14,1,6,2,20,1, + 6,1,8,2,10,1,8,1,4,1,6,2,10,1,8,1, + 6,11,6,1,2,1,10,1,14,1,6,2,20,1,2,1, + 12,1,14,1,6,2,2,1,10,1,16,1,6,2,24,1, + 12,1,2,1,12,1,16,1,6,2,8,1,24,1,2,1, + 12,1,16,1,6,2,24,1,12,1,2,1,12,1,16,1, + 6,1,114,152,0,0,0,99,1,0,0,0,0,0,0,0, + 0,0,0,0,2,0,0,0,3,0,0,0,67,0,0,0, + 115,82,0,0,0,100,1,125,1,116,0,124,0,106,1,100, + 2,131,2,114,30,124,0,106,1,160,2,124,0,161,1,125, + 1,110,20,116,0,124,0,106,1,100,3,131,2,114,50,116, + 3,100,4,131,1,130,1,124,1,100,1,107,8,114,68,116, + 4,124,0,106,5,131,1,125,1,116,6,124,0,124,1,131, + 2,1,0,124,1,83,0,41,5,122,43,67,114,101,97,116, + 101,32,97,32,109,111,100,117,108,101,32,98,97,115,101,100, + 32,111,110,32,116,104,101,32,112,114,111,118,105,100,101,100, + 32,115,112,101,99,46,78,218,13,99,114,101,97,116,101,95, + 109,111,100,117,108,101,218,11,101,120,101,99,95,109,111,100, + 117,108,101,122,66,108,111,97,100,101,114,115,32,116,104,97, + 116,32,100,101,102,105,110,101,32,101,120,101,99,95,109,111, + 100,117,108,101,40,41,32,109,117,115,116,32,97,108,115,111, + 32,100,101,102,105,110,101,32,99,114,101,97,116,101,95,109, + 111,100,117,108,101,40,41,41,7,114,4,0,0,0,114,111, + 0,0,0,114,153,0,0,0,114,79,0,0,0,114,18,0, + 0,0,114,17,0,0,0,114,152,0,0,0,169,2,114,95, + 0,0,0,114,96,0,0,0,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,218,16,109,111,100,117,108,101,95, + 102,114,111,109,95,115,112,101,99,36,2,0,0,115,18,0, + 0,0,0,3,4,1,12,3,14,1,12,1,8,2,8,1, + 10,1,10,1,114,156,0,0,0,99,1,0,0,0,0,0, + 0,0,0,0,0,0,2,0,0,0,5,0,0,0,67,0, + 0,0,115,126,0,0,0,124,0,106,0,100,1,107,8,114, + 14,100,2,110,4,124,0,106,0,125,1,124,0,106,1,100, + 1,107,8,114,74,124,0,106,2,100,1,107,8,114,52,100, + 3,124,1,155,2,100,4,157,3,83,0,100,3,124,1,155, + 2,100,5,124,0,106,2,155,2,100,6,157,5,83,0,110, + 48,124,0,106,3,114,100,100,3,124,1,155,2,100,7,124, + 0,106,1,155,2,100,4,157,5,83,0,100,3,124,0,106, + 0,155,2,100,5,124,0,106,1,155,0,100,6,157,5,83, + 0,100,1,83,0,41,8,122,38,82,101,116,117,114,110,32, + 116,104,101,32,114,101,112,114,32,116,111,32,117,115,101,32, + 102,111,114,32,116,104,101,32,109,111,100,117,108,101,46,78, + 114,100,0,0,0,114,101,0,0,0,114,102,0,0,0,114, + 103,0,0,0,114,104,0,0,0,114,105,0,0,0,41,4, + 114,17,0,0,0,114,115,0,0,0,114,111,0,0,0,114, + 127,0,0,0,41,2,114,95,0,0,0,114,17,0,0,0, + 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,114, + 109,0,0,0,53,2,0,0,115,16,0,0,0,0,3,20, + 1,10,1,10,1,12,2,22,2,6,1,20,2,114,109,0, + 0,0,99,2,0,0,0,0,0,0,0,0,0,0,0,4, + 0,0,0,10,0,0,0,67,0,0,0,115,206,0,0,0, + 124,0,106,0,125,2,116,1,124,2,131,1,143,182,1,0, + 116,2,106,3,160,4,124,2,161,1,124,1,107,9,114,56, + 100,1,124,2,155,2,100,2,157,3,125,3,116,5,124,3, + 124,2,100,3,141,2,130,1,122,106,124,0,106,7,100,4, + 107,8,114,108,124,0,106,8,100,4,107,8,114,92,116,5, + 100,5,124,0,106,0,100,3,141,2,130,1,116,9,124,0, + 124,1,100,6,100,7,141,3,1,0,110,52,116,9,124,0, + 124,1,100,6,100,7,141,3,1,0,116,10,124,0,106,7, + 100,8,131,2,115,148,124,0,106,7,160,11,124,2,161,1, + 1,0,110,12,124,0,106,7,160,12,124,1,161,1,1,0, + 87,0,53,0,116,2,106,3,160,6,124,0,106,0,161,1, + 125,1,124,1,116,2,106,3,124,0,106,0,60,0,88,0, + 87,0,53,0,81,0,82,0,88,0,124,1,83,0,41,9, + 122,70,69,120,101,99,117,116,101,32,116,104,101,32,115,112, + 101,99,39,115,32,115,112,101,99,105,102,105,101,100,32,109, + 111,100,117,108,101,32,105,110,32,97,110,32,101,120,105,115, + 116,105,110,103,32,109,111,100,117,108,101,39,115,32,110,97, + 109,101,115,112,97,99,101,46,122,7,109,111,100,117,108,101, + 32,122,19,32,110,111,116,32,105,110,32,115,121,115,46,109, + 111,100,117,108,101,115,114,16,0,0,0,78,250,14,109,105, + 115,115,105,110,103,32,108,111,97,100,101,114,84,114,147,0, + 0,0,114,154,0,0,0,41,13,114,17,0,0,0,114,49, + 0,0,0,114,15,0,0,0,114,92,0,0,0,114,34,0, + 0,0,114,79,0,0,0,218,3,112,111,112,114,111,0,0, + 0,114,118,0,0,0,114,152,0,0,0,114,4,0,0,0, + 218,11,108,111,97,100,95,109,111,100,117,108,101,114,154,0, + 0,0,41,4,114,95,0,0,0,114,96,0,0,0,114,17, + 0,0,0,218,3,109,115,103,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,114,93,0,0,0,70,2,0,0, + 115,34,0,0,0,0,2,6,1,10,1,16,1,12,1,12, + 1,2,1,10,1,10,1,14,2,16,2,14,1,12,4,14, + 2,16,4,14,1,24,1,114,93,0,0,0,99,1,0,0, + 0,0,0,0,0,0,0,0,0,2,0,0,0,8,0,0, + 0,67,0,0,0,115,26,1,0,0,122,18,124,0,106,0, + 160,1,124,0,106,2,161,1,1,0,87,0,110,52,1,0, + 1,0,1,0,124,0,106,2,116,3,106,4,107,6,114,64, + 116,3,106,4,160,5,124,0,106,2,161,1,125,1,124,1, + 116,3,106,4,124,0,106,2,60,0,130,0,89,0,110,2, + 88,0,116,3,106,4,160,5,124,0,106,2,161,1,125,1, + 124,1,116,3,106,4,124,0,106,2,60,0,116,6,124,1, + 100,1,100,0,131,3,100,0,107,8,114,148,122,12,124,0, + 106,0,124,1,95,7,87,0,110,20,4,0,116,8,107,10, + 114,146,1,0,1,0,1,0,89,0,110,2,88,0,116,6, + 124,1,100,2,100,0,131,3,100,0,107,8,114,226,122,40, + 124,1,106,9,124,1,95,10,116,11,124,1,100,3,131,2, + 115,202,124,0,106,2,160,12,100,4,161,1,100,5,25,0, + 124,1,95,10,87,0,110,20,4,0,116,8,107,10,114,224, + 1,0,1,0,1,0,89,0,110,2,88,0,116,6,124,1, + 100,6,100,0,131,3,100,0,107,8,144,1,114,22,122,10, + 124,0,124,1,95,13,87,0,110,22,4,0,116,8,107,10, + 144,1,114,20,1,0,1,0,1,0,89,0,110,2,88,0, + 124,1,83,0,41,7,78,114,98,0,0,0,114,149,0,0, + 0,114,145,0,0,0,114,132,0,0,0,114,22,0,0,0, + 114,107,0,0,0,41,14,114,111,0,0,0,114,159,0,0, + 0,114,17,0,0,0,114,15,0,0,0,114,92,0,0,0, + 114,158,0,0,0,114,6,0,0,0,114,98,0,0,0,114, + 108,0,0,0,114,1,0,0,0,114,149,0,0,0,114,4, + 0,0,0,114,133,0,0,0,114,107,0,0,0,114,155,0, + 0,0,114,10,0,0,0,114,10,0,0,0,114,11,0,0, + 0,218,25,95,108,111,97,100,95,98,97,99,107,119,97,114, + 100,95,99,111,109,112,97,116,105,98,108,101,100,2,0,0, + 115,54,0,0,0,0,4,2,1,18,1,6,1,12,1,14, + 1,12,1,8,3,14,1,12,1,16,1,2,1,12,1,14, + 1,6,1,16,1,2,4,8,1,10,1,22,1,14,1,6, + 1,18,1,2,1,10,1,16,1,6,1,114,161,0,0,0, + 99,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,11,0,0,0,67,0,0,0,115,220,0,0,0,124,0, + 106,0,100,0,107,9,114,30,116,1,124,0,106,0,100,1, + 131,2,115,30,116,2,124,0,131,1,83,0,116,3,124,0, + 131,1,125,1,100,2,124,0,95,4,122,162,124,1,116,5, + 106,6,124,0,106,7,60,0,122,52,124,0,106,0,100,0, + 107,8,114,96,124,0,106,8,100,0,107,8,114,108,116,9, + 100,4,124,0,106,7,100,5,141,2,130,1,110,12,124,0, + 106,0,160,10,124,1,161,1,1,0,87,0,110,50,1,0, + 1,0,1,0,122,14,116,5,106,6,124,0,106,7,61,0, + 87,0,110,20,4,0,116,11,107,10,114,152,1,0,1,0, + 1,0,89,0,110,2,88,0,130,0,89,0,110,2,88,0, + 116,5,106,6,160,12,124,0,106,7,161,1,125,1,124,1, + 116,5,106,6,124,0,106,7,60,0,116,13,100,6,124,0, + 106,7,124,0,106,0,131,3,1,0,87,0,53,0,100,3, + 124,0,95,4,88,0,124,1,83,0,41,7,78,114,154,0, + 0,0,84,70,114,157,0,0,0,114,16,0,0,0,122,18, + 105,109,112,111,114,116,32,123,33,114,125,32,35,32,123,33, + 114,125,41,14,114,111,0,0,0,114,4,0,0,0,114,161, + 0,0,0,114,156,0,0,0,90,13,95,105,110,105,116,105, + 97,108,105,122,105,110,103,114,15,0,0,0,114,92,0,0, + 0,114,17,0,0,0,114,118,0,0,0,114,79,0,0,0, + 114,154,0,0,0,114,62,0,0,0,114,158,0,0,0,114, + 76,0,0,0,114,155,0,0,0,114,10,0,0,0,114,10, + 0,0,0,114,11,0,0,0,218,14,95,108,111,97,100,95, + 117,110,108,111,99,107,101,100,137,2,0,0,115,46,0,0, + 0,0,2,10,2,12,1,8,2,8,5,6,1,2,1,12, + 1,2,1,10,1,10,1,16,3,16,1,6,1,2,1,14, + 1,14,1,6,1,8,5,14,1,12,1,20,2,8,2,114, + 162,0,0,0,99,1,0,0,0,0,0,0,0,0,0,0, + 0,1,0,0,0,10,0,0,0,67,0,0,0,115,42,0, + 0,0,116,0,124,0,106,1,131,1,143,22,1,0,116,2, + 124,0,131,1,87,0,2,0,53,0,81,0,82,0,163,0, + 83,0,81,0,82,0,88,0,100,1,83,0,41,2,122,191, + 82,101,116,117,114,110,32,97,32,110,101,119,32,109,111,100, + 117,108,101,32,111,98,106,101,99,116,44,32,108,111,97,100, + 101,100,32,98,121,32,116,104,101,32,115,112,101,99,39,115, + 32,108,111,97,100,101,114,46,10,10,32,32,32,32,84,104, + 101,32,109,111,100,117,108,101,32,105,115,32,110,111,116,32, + 97,100,100,101,100,32,116,111,32,105,116,115,32,112,97,114, + 101,110,116,46,10,10,32,32,32,32,73,102,32,97,32,109, + 111,100,117,108,101,32,105,115,32,97,108,114,101,97,100,121, + 32,105,110,32,115,121,115,46,109,111,100,117,108,101,115,44, + 32,116,104,97,116,32,101,120,105,115,116,105,110,103,32,109, + 111,100,117,108,101,32,103,101,116,115,10,32,32,32,32,99, + 108,111,98,98,101,114,101,100,46,10,10,32,32,32,32,78, + 41,3,114,49,0,0,0,114,17,0,0,0,114,162,0,0, + 0,41,1,114,95,0,0,0,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,114,94,0,0,0,179,2,0,0, + 115,4,0,0,0,0,9,12,1,114,94,0,0,0,99,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4, + 0,0,0,64,0,0,0,115,140,0,0,0,101,0,90,1, + 100,0,90,2,100,1,90,3,100,2,90,4,101,5,100,3, + 100,4,132,0,131,1,90,6,101,7,100,20,100,6,100,7, + 132,1,131,1,90,8,101,7,100,21,100,8,100,9,132,1, + 131,1,90,9,101,7,100,10,100,11,132,0,131,1,90,10, + 101,7,100,12,100,13,132,0,131,1,90,11,101,7,101,12, + 100,14,100,15,132,0,131,1,131,1,90,13,101,7,101,12, + 100,16,100,17,132,0,131,1,131,1,90,14,101,7,101,12, + 100,18,100,19,132,0,131,1,131,1,90,15,101,7,101,16, + 131,1,90,17,100,5,83,0,41,22,218,15,66,117,105,108, + 116,105,110,73,109,112,111,114,116,101,114,122,144,77,101,116, + 97,32,112,97,116,104,32,105,109,112,111,114,116,32,102,111, + 114,32,98,117,105,108,116,45,105,110,32,109,111,100,117,108, + 101,115,46,10,10,32,32,32,32,65,108,108,32,109,101,116, + 104,111,100,115,32,97,114,101,32,101,105,116,104,101,114,32, + 99,108,97,115,115,32,111,114,32,115,116,97,116,105,99,32, + 109,101,116,104,111,100,115,32,116,111,32,97,118,111,105,100, + 32,116,104,101,32,110,101,101,100,32,116,111,10,32,32,32, + 32,105,110,115,116,97,110,116,105,97,116,101,32,116,104,101, + 32,99,108,97,115,115,46,10,10,32,32,32,32,122,8,98, + 117,105,108,116,45,105,110,99,1,0,0,0,0,0,0,0, + 0,0,0,0,1,0,0,0,5,0,0,0,67,0,0,0, + 115,22,0,0,0,100,1,124,0,106,0,155,2,100,2,116, + 1,106,2,155,0,100,3,157,5,83,0,169,4,122,115,82, + 101,116,117,114,110,32,114,101,112,114,32,102,111,114,32,116, + 104,101,32,109,111,100,117,108,101,46,10,10,32,32,32,32, + 32,32,32,32,84,104,101,32,109,101,116,104,111,100,32,105, + 115,32,100,101,112,114,101,99,97,116,101,100,46,32,32,84, + 104,101,32,105,109,112,111,114,116,32,109,97,99,104,105,110, + 101,114,121,32,100,111,101,115,32,116,104,101,32,106,111,98, + 32,105,116,115,101,108,102,46,10,10,32,32,32,32,32,32, + 32,32,114,101,0,0,0,114,103,0,0,0,114,104,0,0, + 0,41,3,114,1,0,0,0,114,163,0,0,0,114,142,0, + 0,0,41,1,114,96,0,0,0,114,10,0,0,0,114,10, + 0,0,0,114,11,0,0,0,114,99,0,0,0,205,2,0, + 0,115,2,0,0,0,0,7,122,27,66,117,105,108,116,105, + 110,73,109,112,111,114,116,101,114,46,109,111,100,117,108,101, + 95,114,101,112,114,78,99,4,0,0,0,0,0,0,0,0, + 0,0,0,4,0,0,0,5,0,0,0,67,0,0,0,115, + 46,0,0,0,124,2,100,0,107,9,114,12,100,0,83,0, + 116,0,160,1,124,1,161,1,114,38,116,2,124,1,124,0, + 124,0,106,3,100,1,141,3,83,0,100,0,83,0,100,0, + 83,0,169,2,78,114,141,0,0,0,41,4,114,56,0,0, + 0,90,10,105,115,95,98,117,105,108,116,105,110,114,91,0, + 0,0,114,142,0,0,0,169,4,218,3,99,108,115,114,81, + 0,0,0,218,4,112,97,116,104,218,6,116,97,114,103,101, + 116,114,10,0,0,0,114,10,0,0,0,114,11,0,0,0, + 218,9,102,105,110,100,95,115,112,101,99,214,2,0,0,115, + 10,0,0,0,0,2,8,1,4,1,10,1,16,2,122,25, + 66,117,105,108,116,105,110,73,109,112,111,114,116,101,114,46, + 102,105,110,100,95,115,112,101,99,99,3,0,0,0,0,0, + 0,0,0,0,0,0,4,0,0,0,4,0,0,0,67,0, + 0,0,115,30,0,0,0,124,0,160,0,124,1,124,2,161, + 2,125,3,124,3,100,1,107,9,114,26,124,3,106,1,83, + 0,100,1,83,0,41,2,122,175,70,105,110,100,32,116,104, + 101,32,98,117,105,108,116,45,105,110,32,109,111,100,117,108, + 101,46,10,10,32,32,32,32,32,32,32,32,73,102,32,39, + 112,97,116,104,39,32,105,115,32,101,118,101,114,32,115,112, + 101,99,105,102,105,101,100,32,116,104,101,110,32,116,104,101, + 32,115,101,97,114,99,104,32,105,115,32,99,111,110,115,105, + 100,101,114,101,100,32,97,32,102,97,105,108,117,114,101,46, + 10,10,32,32,32,32,32,32,32,32,84,104,105,115,32,109, + 101,116,104,111,100,32,105,115,32,100,101,112,114,101,99,97, + 116,101,100,46,32,32,85,115,101,32,102,105,110,100,95,115, + 112,101,99,40,41,32,105,110,115,116,101,97,100,46,10,10, + 32,32,32,32,32,32,32,32,78,41,2,114,170,0,0,0, + 114,111,0,0,0,41,4,114,167,0,0,0,114,81,0,0, + 0,114,168,0,0,0,114,95,0,0,0,114,10,0,0,0, + 114,10,0,0,0,114,11,0,0,0,218,11,102,105,110,100, + 95,109,111,100,117,108,101,223,2,0,0,115,4,0,0,0, + 0,9,12,1,122,27,66,117,105,108,116,105,110,73,109,112, + 111,114,116,101,114,46,102,105,110,100,95,109,111,100,117,108, + 101,99,2,0,0,0,0,0,0,0,0,0,0,0,2,0, + 0,0,4,0,0,0,67,0,0,0,115,46,0,0,0,124, + 1,106,0,116,1,106,2,107,7,114,34,116,3,124,1,106, + 0,155,2,100,1,157,2,124,1,106,0,100,2,141,2,130, + 1,116,4,116,5,106,6,124,1,131,2,83,0,41,3,122, + 24,67,114,101,97,116,101,32,97,32,98,117,105,108,116,45, + 105,110,32,109,111,100,117,108,101,114,77,0,0,0,114,16, + 0,0,0,41,7,114,17,0,0,0,114,15,0,0,0,114, + 78,0,0,0,114,79,0,0,0,114,66,0,0,0,114,56, + 0,0,0,90,14,99,114,101,97,116,101,95,98,117,105,108, + 116,105,110,41,2,114,30,0,0,0,114,95,0,0,0,114, + 10,0,0,0,114,10,0,0,0,114,11,0,0,0,114,153, + 0,0,0,235,2,0,0,115,10,0,0,0,0,3,12,1, + 12,1,4,255,6,2,122,29,66,117,105,108,116,105,110,73, + 109,112,111,114,116,101,114,46,99,114,101,97,116,101,95,109, + 111,100,117,108,101,99,2,0,0,0,0,0,0,0,0,0, + 0,0,2,0,0,0,3,0,0,0,67,0,0,0,115,16, + 0,0,0,116,0,116,1,106,2,124,1,131,2,1,0,100, + 1,83,0,41,2,122,22,69,120,101,99,32,97,32,98,117, + 105,108,116,45,105,110,32,109,111,100,117,108,101,78,41,3, + 114,66,0,0,0,114,56,0,0,0,90,12,101,120,101,99, + 95,98,117,105,108,116,105,110,41,2,114,30,0,0,0,114, + 96,0,0,0,114,10,0,0,0,114,10,0,0,0,114,11, + 0,0,0,114,154,0,0,0,243,2,0,0,115,2,0,0, + 0,0,3,122,27,66,117,105,108,116,105,110,73,109,112,111, + 114,116,101,114,46,101,120,101,99,95,109,111,100,117,108,101, + 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,1,0,0,0,67,0,0,0,115,4,0,0,0,100,1, + 83,0,41,2,122,57,82,101,116,117,114,110,32,78,111,110, + 101,32,97,115,32,98,117,105,108,116,45,105,110,32,109,111, + 100,117,108,101,115,32,100,111,32,110,111,116,32,104,97,118, + 101,32,99,111,100,101,32,111,98,106,101,99,116,115,46,78, + 114,10,0,0,0,169,2,114,167,0,0,0,114,81,0,0, + 0,114,10,0,0,0,114,10,0,0,0,114,11,0,0,0, + 218,8,103,101,116,95,99,111,100,101,248,2,0,0,115,2, + 0,0,0,0,4,122,24,66,117,105,108,116,105,110,73,109, + 112,111,114,116,101,114,46,103,101,116,95,99,111,100,101,99, + 2,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, + 1,0,0,0,67,0,0,0,115,4,0,0,0,100,1,83, + 0,41,2,122,56,82,101,116,117,114,110,32,78,111,110,101, + 32,97,115,32,98,117,105,108,116,45,105,110,32,109,111,100, + 117,108,101,115,32,100,111,32,110,111,116,32,104,97,118,101, + 32,115,111,117,114,99,101,32,99,111,100,101,46,78,114,10, + 0,0,0,114,172,0,0,0,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,218,10,103,101,116,95,115,111,117, + 114,99,101,254,2,0,0,115,2,0,0,0,0,4,122,26, + 66,117,105,108,116,105,110,73,109,112,111,114,116,101,114,46, + 103,101,116,95,115,111,117,114,99,101,99,2,0,0,0,0, + 0,0,0,0,0,0,0,2,0,0,0,1,0,0,0,67, + 0,0,0,115,4,0,0,0,100,1,83,0,41,2,122,52, + 82,101,116,117,114,110,32,70,97,108,115,101,32,97,115,32, + 98,117,105,108,116,45,105,110,32,109,111,100,117,108,101,115, + 32,97,114,101,32,110,101,118,101,114,32,112,97,99,107,97, + 103,101,115,46,70,114,10,0,0,0,114,172,0,0,0,114, + 10,0,0,0,114,10,0,0,0,114,11,0,0,0,114,117, + 0,0,0,4,3,0,0,115,2,0,0,0,0,4,122,26, + 66,117,105,108,116,105,110,73,109,112,111,114,116,101,114,46, + 105,115,95,112,97,99,107,97,103,101,41,2,78,78,41,1, + 78,41,18,114,1,0,0,0,114,0,0,0,0,114,2,0, + 0,0,114,3,0,0,0,114,142,0,0,0,218,12,115,116, + 97,116,105,99,109,101,116,104,111,100,114,99,0,0,0,218, + 11,99,108,97,115,115,109,101,116,104,111,100,114,170,0,0, + 0,114,171,0,0,0,114,153,0,0,0,114,154,0,0,0, + 114,86,0,0,0,114,173,0,0,0,114,174,0,0,0,114, + 117,0,0,0,114,97,0,0,0,114,159,0,0,0,114,10, + 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, + 0,0,114,163,0,0,0,194,2,0,0,115,44,0,0,0, + 8,2,4,7,4,2,2,1,10,8,2,1,12,8,2,1, + 12,11,2,1,10,7,2,1,10,4,2,1,2,1,12,4, + 2,1,2,1,12,4,2,1,2,1,12,4,114,163,0,0, + 0,99,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,4,0,0,0,64,0,0,0,115,144,0,0,0,101, + 0,90,1,100,0,90,2,100,1,90,3,100,2,90,4,101, + 5,100,3,100,4,132,0,131,1,90,6,101,7,100,22,100, + 6,100,7,132,1,131,1,90,8,101,7,100,23,100,8,100, + 9,132,1,131,1,90,9,101,7,100,10,100,11,132,0,131, + 1,90,10,101,5,100,12,100,13,132,0,131,1,90,11,101, + 7,100,14,100,15,132,0,131,1,90,12,101,7,101,13,100, + 16,100,17,132,0,131,1,131,1,90,14,101,7,101,13,100, + 18,100,19,132,0,131,1,131,1,90,15,101,7,101,13,100, + 20,100,21,132,0,131,1,131,1,90,16,100,5,83,0,41, + 24,218,14,70,114,111,122,101,110,73,109,112,111,114,116,101, + 114,122,142,77,101,116,97,32,112,97,116,104,32,105,109,112, + 111,114,116,32,102,111,114,32,102,114,111,122,101,110,32,109, + 111,100,117,108,101,115,46,10,10,32,32,32,32,65,108,108, + 32,109,101,116,104,111,100,115,32,97,114,101,32,101,105,116, + 104,101,114,32,99,108,97,115,115,32,111,114,32,115,116,97, + 116,105,99,32,109,101,116,104,111,100,115,32,116,111,32,97, + 118,111,105,100,32,116,104,101,32,110,101,101,100,32,116,111, + 10,32,32,32,32,105,110,115,116,97,110,116,105,97,116,101, + 32,116,104,101,32,99,108,97,115,115,46,10,10,32,32,32, + 32,90,6,102,114,111,122,101,110,99,1,0,0,0,0,0, + 0,0,0,0,0,0,1,0,0,0,5,0,0,0,67,0, + 0,0,115,22,0,0,0,100,1,124,0,106,0,155,2,100, + 2,116,1,106,2,155,0,100,3,157,5,83,0,114,164,0, + 0,0,41,3,114,1,0,0,0,114,177,0,0,0,114,142, + 0,0,0,41,1,218,1,109,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,114,99,0,0,0,24,3,0,0, + 115,2,0,0,0,0,7,122,26,70,114,111,122,101,110,73, + 109,112,111,114,116,101,114,46,109,111,100,117,108,101,95,114, + 101,112,114,78,99,4,0,0,0,0,0,0,0,0,0,0, + 0,4,0,0,0,5,0,0,0,67,0,0,0,115,34,0, + 0,0,116,0,160,1,124,1,161,1,114,26,116,2,124,1, + 124,0,124,0,106,3,100,1,141,3,83,0,100,0,83,0, + 100,0,83,0,114,165,0,0,0,41,4,114,56,0,0,0, + 114,88,0,0,0,114,91,0,0,0,114,142,0,0,0,114, + 166,0,0,0,114,10,0,0,0,114,10,0,0,0,114,11, + 0,0,0,114,170,0,0,0,33,3,0,0,115,6,0,0, + 0,0,2,10,1,16,2,122,24,70,114,111,122,101,110,73, + 109,112,111,114,116,101,114,46,102,105,110,100,95,115,112,101, + 99,99,3,0,0,0,0,0,0,0,0,0,0,0,3,0, + 0,0,3,0,0,0,67,0,0,0,115,18,0,0,0,116, + 0,160,1,124,1,161,1,114,14,124,0,83,0,100,1,83, + 0,41,2,122,93,70,105,110,100,32,97,32,102,114,111,122, + 101,110,32,109,111,100,117,108,101,46,10,10,32,32,32,32, + 32,32,32,32,84,104,105,115,32,109,101,116,104,111,100,32, + 105,115,32,100,101,112,114,101,99,97,116,101,100,46,32,32, + 85,115,101,32,102,105,110,100,95,115,112,101,99,40,41,32, + 105,110,115,116,101,97,100,46,10,10,32,32,32,32,32,32, + 32,32,78,41,2,114,56,0,0,0,114,88,0,0,0,41, + 3,114,167,0,0,0,114,81,0,0,0,114,168,0,0,0, + 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,114, + 171,0,0,0,40,3,0,0,115,2,0,0,0,0,7,122, + 26,70,114,111,122,101,110,73,109,112,111,114,116,101,114,46, + 102,105,110,100,95,109,111,100,117,108,101,99,2,0,0,0, + 0,0,0,0,0,0,0,0,2,0,0,0,1,0,0,0, + 67,0,0,0,115,4,0,0,0,100,1,83,0,41,2,122, + 42,85,115,101,32,100,101,102,97,117,108,116,32,115,101,109, + 97,110,116,105,99,115,32,102,111,114,32,109,111,100,117,108, + 101,32,99,114,101,97,116,105,111,110,46,78,114,10,0,0, + 0,41,2,114,167,0,0,0,114,95,0,0,0,114,10,0, + 0,0,114,10,0,0,0,114,11,0,0,0,114,153,0,0, + 0,49,3,0,0,115,2,0,0,0,0,2,122,28,70,114, + 111,122,101,110,73,109,112,111,114,116,101,114,46,99,114,101, + 97,116,101,95,109,111,100,117,108,101,99,1,0,0,0,0, + 0,0,0,0,0,0,0,3,0,0,0,4,0,0,0,67, + 0,0,0,115,64,0,0,0,124,0,106,0,106,1,125,1, + 116,2,160,3,124,1,161,1,115,36,116,4,124,1,155,2, + 100,1,157,2,124,1,100,2,141,2,130,1,116,5,116,2, + 106,6,124,1,131,2,125,2,116,7,124,2,124,0,106,8, + 131,2,1,0,100,0,83,0,114,87,0,0,0,41,9,114, + 107,0,0,0,114,17,0,0,0,114,56,0,0,0,114,88, + 0,0,0,114,79,0,0,0,114,66,0,0,0,218,17,103, + 101,116,95,102,114,111,122,101,110,95,111,98,106,101,99,116, + 218,4,101,120,101,99,114,7,0,0,0,41,3,114,96,0, + 0,0,114,17,0,0,0,218,4,99,111,100,101,114,10,0, + 0,0,114,10,0,0,0,114,11,0,0,0,114,154,0,0, + 0,53,3,0,0,115,10,0,0,0,0,2,8,1,10,1, + 18,1,12,1,122,26,70,114,111,122,101,110,73,109,112,111, + 114,116,101,114,46,101,120,101,99,95,109,111,100,117,108,101, + 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,3,0,0,0,67,0,0,0,115,10,0,0,0,116,0, + 124,0,124,1,131,2,83,0,41,1,122,95,76,111,97,100, + 32,97,32,102,114,111,122,101,110,32,109,111,100,117,108,101, + 46,10,10,32,32,32,32,32,32,32,32,84,104,105,115,32, + 109,101,116,104,111,100,32,105,115,32,100,101,112,114,101,99, + 97,116,101,100,46,32,32,85,115,101,32,101,120,101,99,95, + 109,111,100,117,108,101,40,41,32,105,110,115,116,101,97,100, + 46,10,10,32,32,32,32,32,32,32,32,41,1,114,97,0, + 0,0,114,172,0,0,0,114,10,0,0,0,114,10,0,0, + 0,114,11,0,0,0,114,159,0,0,0,61,3,0,0,115, + 2,0,0,0,0,7,122,26,70,114,111,122,101,110,73,109, + 112,111,114,116,101,114,46,108,111,97,100,95,109,111,100,117, + 108,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, + 0,0,0,3,0,0,0,67,0,0,0,115,10,0,0,0, + 116,0,160,1,124,1,161,1,83,0,41,1,122,45,82,101, + 116,117,114,110,32,116,104,101,32,99,111,100,101,32,111,98, + 106,101,99,116,32,102,111,114,32,116,104,101,32,102,114,111, + 122,101,110,32,109,111,100,117,108,101,46,41,2,114,56,0, + 0,0,114,179,0,0,0,114,172,0,0,0,114,10,0,0, + 0,114,10,0,0,0,114,11,0,0,0,114,173,0,0,0, + 70,3,0,0,115,2,0,0,0,0,4,122,23,70,114,111, + 122,101,110,73,109,112,111,114,116,101,114,46,103,101,116,95, + 99,111,100,101,99,2,0,0,0,0,0,0,0,0,0,0, + 0,2,0,0,0,1,0,0,0,67,0,0,0,115,4,0, + 0,0,100,1,83,0,41,2,122,54,82,101,116,117,114,110, + 32,78,111,110,101,32,97,115,32,102,114,111,122,101,110,32, + 109,111,100,117,108,101,115,32,100,111,32,110,111,116,32,104, + 97,118,101,32,115,111,117,114,99,101,32,99,111,100,101,46, + 78,114,10,0,0,0,114,172,0,0,0,114,10,0,0,0, + 114,10,0,0,0,114,11,0,0,0,114,174,0,0,0,76, + 3,0,0,115,2,0,0,0,0,4,122,25,70,114,111,122, + 101,110,73,109,112,111,114,116,101,114,46,103,101,116,95,115, + 111,117,114,99,101,99,2,0,0,0,0,0,0,0,0,0, + 0,0,2,0,0,0,3,0,0,0,67,0,0,0,115,10, + 0,0,0,116,0,160,1,124,1,161,1,83,0,41,1,122, + 46,82,101,116,117,114,110,32,84,114,117,101,32,105,102,32, + 116,104,101,32,102,114,111,122,101,110,32,109,111,100,117,108, + 101,32,105,115,32,97,32,112,97,99,107,97,103,101,46,41, + 2,114,56,0,0,0,90,17,105,115,95,102,114,111,122,101, + 110,95,112,97,99,107,97,103,101,114,172,0,0,0,114,10, + 0,0,0,114,10,0,0,0,114,11,0,0,0,114,117,0, + 0,0,82,3,0,0,115,2,0,0,0,0,4,122,25,70, + 114,111,122,101,110,73,109,112,111,114,116,101,114,46,105,115, + 95,112,97,99,107,97,103,101,41,2,78,78,41,1,78,41, + 17,114,1,0,0,0,114,0,0,0,0,114,2,0,0,0, + 114,3,0,0,0,114,142,0,0,0,114,175,0,0,0,114, + 99,0,0,0,114,176,0,0,0,114,170,0,0,0,114,171, + 0,0,0,114,153,0,0,0,114,154,0,0,0,114,159,0, + 0,0,114,90,0,0,0,114,173,0,0,0,114,174,0,0, + 0,114,117,0,0,0,114,10,0,0,0,114,10,0,0,0, + 114,10,0,0,0,114,11,0,0,0,114,177,0,0,0,13, + 3,0,0,115,46,0,0,0,8,2,4,7,4,2,2,1, + 10,8,2,1,12,6,2,1,12,8,2,1,10,3,2,1, + 10,7,2,1,10,8,2,1,2,1,12,4,2,1,2,1, + 12,4,2,1,2,1,114,177,0,0,0,99,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, + 64,0,0,0,115,32,0,0,0,101,0,90,1,100,0,90, + 2,100,1,90,3,100,2,100,3,132,0,90,4,100,4,100, + 5,132,0,90,5,100,6,83,0,41,7,218,18,95,73,109, + 112,111,114,116,76,111,99,107,67,111,110,116,101,120,116,122, + 36,67,111,110,116,101,120,116,32,109,97,110,97,103,101,114, + 32,102,111,114,32,116,104,101,32,105,109,112,111,114,116,32, + 108,111,99,107,46,99,1,0,0,0,0,0,0,0,0,0, + 0,0,1,0,0,0,2,0,0,0,67,0,0,0,115,12, + 0,0,0,116,0,160,1,161,0,1,0,100,1,83,0,41, + 2,122,24,65,99,113,117,105,114,101,32,116,104,101,32,105, + 109,112,111,114,116,32,108,111,99,107,46,78,41,2,114,56, + 0,0,0,114,57,0,0,0,114,46,0,0,0,114,10,0, + 0,0,114,10,0,0,0,114,11,0,0,0,114,53,0,0, + 0,95,3,0,0,115,2,0,0,0,0,2,122,28,95,73, + 109,112,111,114,116,76,111,99,107,67,111,110,116,101,120,116, + 46,95,95,101,110,116,101,114,95,95,99,4,0,0,0,0, + 0,0,0,0,0,0,0,4,0,0,0,2,0,0,0,67, + 0,0,0,115,12,0,0,0,116,0,160,1,161,0,1,0, + 100,1,83,0,41,2,122,60,82,101,108,101,97,115,101,32, + 116,104,101,32,105,109,112,111,114,116,32,108,111,99,107,32, + 114,101,103,97,114,100,108,101,115,115,32,111,102,32,97,110, + 121,32,114,97,105,115,101,100,32,101,120,99,101,112,116,105, + 111,110,115,46,78,41,2,114,56,0,0,0,114,58,0,0, + 0,41,4,114,30,0,0,0,218,8,101,120,99,95,116,121, + 112,101,218,9,101,120,99,95,118,97,108,117,101,218,13,101, + 120,99,95,116,114,97,99,101,98,97,99,107,114,10,0,0, + 0,114,10,0,0,0,114,11,0,0,0,114,55,0,0,0, + 99,3,0,0,115,2,0,0,0,0,2,122,27,95,73,109, + 112,111,114,116,76,111,99,107,67,111,110,116,101,120,116,46, + 95,95,101,120,105,116,95,95,78,41,6,114,1,0,0,0, + 114,0,0,0,0,114,2,0,0,0,114,3,0,0,0,114, + 53,0,0,0,114,55,0,0,0,114,10,0,0,0,114,10, + 0,0,0,114,10,0,0,0,114,11,0,0,0,114,182,0, + 0,0,91,3,0,0,115,6,0,0,0,8,2,4,2,8, + 4,114,182,0,0,0,99,3,0,0,0,0,0,0,0,0, + 0,0,0,5,0,0,0,5,0,0,0,67,0,0,0,115, + 66,0,0,0,124,1,160,0,100,1,124,2,100,2,24,0, + 161,2,125,3,116,1,124,3,131,1,124,2,107,0,114,36, + 116,2,100,3,131,1,130,1,124,3,100,4,25,0,125,4, + 124,0,114,62,124,4,155,0,100,1,124,0,155,0,157,3, + 83,0,124,4,83,0,41,5,122,50,82,101,115,111,108,118, + 101,32,97,32,114,101,108,97,116,105,118,101,32,109,111,100, + 117,108,101,32,110,97,109,101,32,116,111,32,97,110,32,97, + 98,115,111,108,117,116,101,32,111,110,101,46,114,132,0,0, + 0,114,37,0,0,0,122,50,97,116,116,101,109,112,116,101, + 100,32,114,101,108,97,116,105,118,101,32,105,109,112,111,114, + 116,32,98,101,121,111,110,100,32,116,111,112,45,108,101,118, + 101,108,32,112,97,99,107,97,103,101,114,22,0,0,0,41, + 3,218,6,114,115,112,108,105,116,218,3,108,101,110,114,79, + 0,0,0,41,5,114,17,0,0,0,218,7,112,97,99,107, + 97,103,101,218,5,108,101,118,101,108,90,4,98,105,116,115, + 90,4,98,97,115,101,114,10,0,0,0,114,10,0,0,0, + 114,11,0,0,0,218,13,95,114,101,115,111,108,118,101,95, + 110,97,109,101,104,3,0,0,115,10,0,0,0,0,2,16, + 1,12,1,8,1,8,1,114,190,0,0,0,99,3,0,0, + 0,0,0,0,0,0,0,0,0,4,0,0,0,4,0,0, + 0,67,0,0,0,115,34,0,0,0,124,0,160,0,124,1, + 124,2,161,2,125,3,124,3,100,0,107,8,114,24,100,0, + 83,0,116,1,124,1,124,3,131,2,83,0,114,13,0,0, + 0,41,2,114,171,0,0,0,114,91,0,0,0,41,4,218, + 6,102,105,110,100,101,114,114,17,0,0,0,114,168,0,0, + 0,114,111,0,0,0,114,10,0,0,0,114,10,0,0,0, + 114,11,0,0,0,218,17,95,102,105,110,100,95,115,112,101, + 99,95,108,101,103,97,99,121,113,3,0,0,115,8,0,0, + 0,0,3,12,1,8,1,4,1,114,192,0,0,0,99,3, + 0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,10, + 0,0,0,67,0,0,0,115,12,1,0,0,116,0,106,1, + 125,3,124,3,100,1,107,8,114,22,116,2,100,2,131,1, + 130,1,124,3,115,38,116,3,160,4,100,3,116,5,161,2, + 1,0,124,0,116,0,106,6,107,6,125,4,124,3,68,0, + 93,210,125,5,116,7,131,0,143,84,1,0,122,10,124,5, + 106,8,125,6,87,0,110,54,4,0,116,9,107,10,114,128, + 1,0,1,0,1,0,116,10,124,5,124,0,124,1,131,3, + 125,7,124,7,100,1,107,8,114,124,89,0,87,0,53,0, + 81,0,82,0,163,0,113,52,89,0,110,14,88,0,124,6, + 124,0,124,1,124,2,131,3,125,7,87,0,53,0,81,0, + 82,0,88,0,124,7,100,1,107,9,114,52,124,4,144,0, + 115,254,124,0,116,0,106,6,107,6,144,0,114,254,116,0, + 106,6,124,0,25,0,125,8,122,10,124,8,106,11,125,9, + 87,0,110,28,4,0,116,9,107,10,114,226,1,0,1,0, + 1,0,124,7,6,0,89,0,2,0,1,0,83,0,88,0, + 124,9,100,1,107,8,114,244,124,7,2,0,1,0,83,0, + 124,9,2,0,1,0,83,0,113,52,124,7,2,0,1,0, + 83,0,113,52,100,1,83,0,41,4,122,21,70,105,110,100, + 32,97,32,109,111,100,117,108,101,39,115,32,115,112,101,99, + 46,78,122,53,115,121,115,46,109,101,116,97,95,112,97,116, + 104,32,105,115,32,78,111,110,101,44,32,80,121,116,104,111, + 110,32,105,115,32,108,105,107,101,108,121,32,115,104,117,116, + 116,105,110,103,32,100,111,119,110,122,22,115,121,115,46,109, + 101,116,97,95,112,97,116,104,32,105,115,32,101,109,112,116, + 121,41,12,114,15,0,0,0,218,9,109,101,116,97,95,112, + 97,116,104,114,79,0,0,0,218,9,95,119,97,114,110,105, + 110,103,115,218,4,119,97,114,110,218,13,73,109,112,111,114, + 116,87,97,114,110,105,110,103,114,92,0,0,0,114,182,0, + 0,0,114,170,0,0,0,114,108,0,0,0,114,192,0,0, + 0,114,107,0,0,0,41,10,114,17,0,0,0,114,168,0, + 0,0,114,169,0,0,0,114,193,0,0,0,90,9,105,115, + 95,114,101,108,111,97,100,114,191,0,0,0,114,170,0,0, + 0,114,95,0,0,0,114,96,0,0,0,114,107,0,0,0, + 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,218, + 10,95,102,105,110,100,95,115,112,101,99,122,3,0,0,115, + 54,0,0,0,0,2,6,1,8,2,8,3,4,1,12,5, + 10,1,8,1,8,1,2,1,10,1,14,1,12,1,8,1, + 20,2,22,1,8,2,18,1,10,1,2,1,10,1,14,4, + 14,2,8,1,8,2,10,2,10,2,114,197,0,0,0,99, + 3,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0, + 4,0,0,0,67,0,0,0,115,108,0,0,0,116,0,124, + 0,116,1,131,2,115,28,116,2,100,1,116,3,124,0,131, + 1,155,0,157,2,131,1,130,1,124,2,100,2,107,0,114, + 44,116,4,100,3,131,1,130,1,124,2,100,2,107,4,114, + 84,116,0,124,1,116,1,131,2,115,72,116,2,100,4,131, + 1,130,1,110,12,124,1,115,84,116,5,100,5,131,1,130, + 1,124,0,115,104,124,2,100,2,107,2,114,104,116,4,100, + 6,131,1,130,1,100,7,83,0,41,8,122,28,86,101,114, + 105,102,121,32,97,114,103,117,109,101,110,116,115,32,97,114, + 101,32,34,115,97,110,101,34,46,122,29,109,111,100,117,108, + 101,32,110,97,109,101,32,109,117,115,116,32,98,101,32,115, + 116,114,44,32,110,111,116,32,114,22,0,0,0,122,18,108, + 101,118,101,108,32,109,117,115,116,32,98,101,32,62,61,32, + 48,122,31,95,95,112,97,99,107,97,103,101,95,95,32,110, + 111,116,32,115,101,116,32,116,111,32,97,32,115,116,114,105, + 110,103,122,54,97,116,116,101,109,112,116,101,100,32,114,101, + 108,97,116,105,118,101,32,105,109,112,111,114,116,32,119,105, + 116,104,32,110,111,32,107,110,111,119,110,32,112,97,114,101, + 110,116,32,112,97,99,107,97,103,101,122,17,69,109,112,116, + 121,32,109,111,100,117,108,101,32,110,97,109,101,78,41,6, + 218,10,105,115,105,110,115,116,97,110,99,101,218,3,115,116, + 114,218,9,84,121,112,101,69,114,114,111,114,114,14,0,0, + 0,218,10,86,97,108,117,101,69,114,114,111,114,114,79,0, + 0,0,169,3,114,17,0,0,0,114,188,0,0,0,114,189, + 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, + 0,0,218,13,95,115,97,110,105,116,121,95,99,104,101,99, + 107,169,3,0,0,115,22,0,0,0,0,2,10,1,18,1, + 8,1,8,1,8,1,10,1,10,1,4,1,8,2,12,1, + 114,203,0,0,0,250,16,78,111,32,109,111,100,117,108,101, + 32,110,97,109,101,100,32,122,4,123,33,114,125,99,2,0, + 0,0,0,0,0,0,0,0,0,0,8,0,0,0,8,0, + 0,0,67,0,0,0,115,222,0,0,0,100,0,125,2,124, + 0,160,0,100,1,161,1,100,2,25,0,125,3,124,3,114, + 136,124,3,116,1,106,2,107,7,114,42,116,3,124,1,124, + 3,131,2,1,0,124,0,116,1,106,2,107,6,114,62,116, + 1,106,2,124,0,25,0,83,0,116,1,106,2,124,3,25, + 0,125,4,122,10,124,4,106,4,125,2,87,0,110,52,4, + 0,116,5,107,10,114,134,1,0,1,0,1,0,100,3,124, + 0,155,2,100,4,124,3,155,2,100,5,157,5,125,5,116, + 6,124,5,124,0,100,6,141,2,100,0,130,2,89,0,110, + 2,88,0,116,7,124,0,124,2,131,2,125,6,124,6,100, + 0,107,8,114,174,116,6,100,3,124,0,155,2,157,2,124, + 0,100,6,141,2,130,1,110,8,116,8,124,6,131,1,125, + 7,124,3,114,218,116,1,106,2,124,3,25,0,125,4,116, + 9,124,4,124,0,160,0,100,1,161,1,100,7,25,0,124, + 7,131,3,1,0,124,7,83,0,41,8,78,114,132,0,0, + 0,114,22,0,0,0,114,204,0,0,0,122,2,59,32,122, + 17,32,105,115,32,110,111,116,32,97,32,112,97,99,107,97, + 103,101,114,16,0,0,0,233,2,0,0,0,41,10,114,133, + 0,0,0,114,15,0,0,0,114,92,0,0,0,114,66,0, + 0,0,114,145,0,0,0,114,108,0,0,0,218,19,77,111, + 100,117,108,101,78,111,116,70,111,117,110,100,69,114,114,111, + 114,114,197,0,0,0,114,162,0,0,0,114,5,0,0,0, + 41,8,114,17,0,0,0,218,7,105,109,112,111,114,116,95, + 114,168,0,0,0,114,134,0,0,0,90,13,112,97,114,101, + 110,116,95,109,111,100,117,108,101,114,160,0,0,0,114,95, + 0,0,0,114,96,0,0,0,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,218,23,95,102,105,110,100,95,97, + 110,100,95,108,111,97,100,95,117,110,108,111,99,107,101,100, + 188,3,0,0,115,42,0,0,0,0,1,4,1,14,1,4, + 1,10,1,10,2,10,1,10,1,10,1,2,1,10,1,14, + 1,18,1,20,1,10,1,8,1,20,2,8,1,4,2,10, + 1,22,1,114,208,0,0,0,99,2,0,0,0,0,0,0, + 0,0,0,0,0,4,0,0,0,10,0,0,0,67,0,0, + 0,115,108,0,0,0,116,0,124,0,131,1,143,50,1,0, + 116,1,106,2,160,3,124,0,116,4,161,2,125,2,124,2, + 116,4,107,8,114,54,116,5,124,0,124,1,131,2,87,0, + 2,0,53,0,81,0,82,0,163,0,83,0,87,0,53,0, + 81,0,82,0,88,0,124,2,100,1,107,8,114,96,100,2, + 124,0,155,0,100,3,157,3,125,3,116,6,124,3,124,0, + 100,4,141,2,130,1,116,7,124,0,131,1,1,0,124,2, + 83,0,41,5,122,25,70,105,110,100,32,97,110,100,32,108, + 111,97,100,32,116,104,101,32,109,111,100,117,108,101,46,78, + 122,10,105,109,112,111,114,116,32,111,102,32,122,28,32,104, + 97,108,116,101,100,59,32,78,111,110,101,32,105,110,32,115, + 121,115,46,109,111,100,117,108,101,115,114,16,0,0,0,41, + 8,114,49,0,0,0,114,15,0,0,0,114,92,0,0,0, + 114,34,0,0,0,218,14,95,78,69,69,68,83,95,76,79, + 65,68,73,78,71,114,208,0,0,0,114,206,0,0,0,114, + 64,0,0,0,41,4,114,17,0,0,0,114,207,0,0,0, + 114,96,0,0,0,114,75,0,0,0,114,10,0,0,0,114, + 10,0,0,0,114,11,0,0,0,218,14,95,102,105,110,100, + 95,97,110,100,95,108,111,97,100,218,3,0,0,115,18,0, + 0,0,0,2,10,1,14,1,8,1,32,2,8,1,12,2, + 12,2,8,1,114,210,0,0,0,114,22,0,0,0,99,3, + 0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,4, + 0,0,0,67,0,0,0,115,42,0,0,0,116,0,124,0, + 124,1,124,2,131,3,1,0,124,2,100,1,107,4,114,32, + 116,1,124,0,124,1,124,2,131,3,125,0,116,2,124,0, + 116,3,131,2,83,0,41,2,97,50,1,0,0,73,109,112, + 111,114,116,32,97,110,100,32,114,101,116,117,114,110,32,116, + 104,101,32,109,111,100,117,108,101,32,98,97,115,101,100,32, + 111,110,32,105,116,115,32,110,97,109,101,44,32,116,104,101, + 32,112,97,99,107,97,103,101,32,116,104,101,32,99,97,108, + 108,32,105,115,10,32,32,32,32,98,101,105,110,103,32,109, + 97,100,101,32,102,114,111,109,44,32,97,110,100,32,116,104, + 101,32,108,101,118,101,108,32,97,100,106,117,115,116,109,101, + 110,116,46,10,10,32,32,32,32,84,104,105,115,32,102,117, + 110,99,116,105,111,110,32,114,101,112,114,101,115,101,110,116, + 115,32,116,104,101,32,103,114,101,97,116,101,115,116,32,99, + 111,109,109,111,110,32,100,101,110,111,109,105,110,97,116,111, + 114,32,111,102,32,102,117,110,99,116,105,111,110,97,108,105, + 116,121,10,32,32,32,32,98,101,116,119,101,101,110,32,105, + 109,112,111,114,116,95,109,111,100,117,108,101,32,97,110,100, + 32,95,95,105,109,112,111,114,116,95,95,46,32,84,104,105, + 115,32,105,110,99,108,117,100,101,115,32,115,101,116,116,105, + 110,103,32,95,95,112,97,99,107,97,103,101,95,95,32,105, + 102,10,32,32,32,32,116,104,101,32,108,111,97,100,101,114, + 32,100,105,100,32,110,111,116,46,10,10,32,32,32,32,114, + 22,0,0,0,41,4,114,203,0,0,0,114,190,0,0,0, + 114,210,0,0,0,218,11,95,103,99,100,95,105,109,112,111, + 114,116,114,202,0,0,0,114,10,0,0,0,114,10,0,0, + 0,114,11,0,0,0,114,211,0,0,0,234,3,0,0,115, + 8,0,0,0,0,9,12,1,8,1,12,1,114,211,0,0, + 0,169,1,218,9,114,101,99,117,114,115,105,118,101,99,3, + 0,0,0,0,0,0,0,1,0,0,0,8,0,0,0,11, + 0,0,0,67,0,0,0,115,228,0,0,0,124,1,68,0, + 93,218,125,4,116,0,124,4,116,1,131,2,115,66,124,3, + 114,34,124,0,106,2,100,1,23,0,125,5,110,4,100,2, + 125,5,116,3,100,3,124,5,155,0,100,4,116,4,124,4, + 131,1,106,2,155,0,157,4,131,1,130,1,113,4,124,4, + 100,5,107,2,114,108,124,3,115,222,116,5,124,0,100,6, + 131,2,114,222,116,6,124,0,124,0,106,7,124,2,100,7, + 100,8,141,4,1,0,113,4,116,5,124,0,124,4,131,2, + 115,4,124,0,106,2,155,0,100,9,124,4,155,0,157,3, + 125,6,122,14,116,8,124,2,124,6,131,2,1,0,87,0, + 113,4,4,0,116,9,107,10,114,220,1,0,125,7,1,0, + 122,42,124,7,106,10,124,6,107,2,114,202,116,11,106,12, + 160,13,124,6,116,14,161,2,100,10,107,9,114,202,87,0, + 89,0,162,8,113,4,130,0,87,0,53,0,100,10,125,7, + 126,7,88,0,89,0,113,4,88,0,113,4,124,0,83,0, + 41,11,122,238,70,105,103,117,114,101,32,111,117,116,32,119, + 104,97,116,32,95,95,105,109,112,111,114,116,95,95,32,115, + 104,111,117,108,100,32,114,101,116,117,114,110,46,10,10,32, + 32,32,32,84,104,101,32,105,109,112,111,114,116,95,32,112, + 97,114,97,109,101,116,101,114,32,105,115,32,97,32,99,97, + 108,108,97,98,108,101,32,119,104,105,99,104,32,116,97,107, + 101,115,32,116,104,101,32,110,97,109,101,32,111,102,32,109, + 111,100,117,108,101,32,116,111,10,32,32,32,32,105,109,112, + 111,114,116,46,32,73,116,32,105,115,32,114,101,113,117,105, + 114,101,100,32,116,111,32,100,101,99,111,117,112,108,101,32, + 116,104,101,32,102,117,110,99,116,105,111,110,32,102,114,111, + 109,32,97,115,115,117,109,105,110,103,32,105,109,112,111,114, + 116,108,105,98,39,115,10,32,32,32,32,105,109,112,111,114, + 116,32,105,109,112,108,101,109,101,110,116,97,116,105,111,110, + 32,105,115,32,100,101,115,105,114,101,100,46,10,10,32,32, + 32,32,122,8,46,95,95,97,108,108,95,95,122,13,96,96, + 102,114,111,109,32,108,105,115,116,39,39,122,8,73,116,101, + 109,32,105,110,32,122,18,32,109,117,115,116,32,98,101,32, + 115,116,114,44,32,110,111,116,32,250,1,42,218,7,95,95, + 97,108,108,95,95,84,114,212,0,0,0,114,132,0,0,0, + 78,41,15,114,198,0,0,0,114,199,0,0,0,114,1,0, + 0,0,114,200,0,0,0,114,14,0,0,0,114,4,0,0, + 0,218,16,95,104,97,110,100,108,101,95,102,114,111,109,108, + 105,115,116,114,215,0,0,0,114,66,0,0,0,114,206,0, + 0,0,114,17,0,0,0,114,15,0,0,0,114,92,0,0, + 0,114,34,0,0,0,114,209,0,0,0,41,8,114,96,0, + 0,0,218,8,102,114,111,109,108,105,115,116,114,207,0,0, + 0,114,213,0,0,0,218,1,120,90,5,119,104,101,114,101, + 90,9,102,114,111,109,95,110,97,109,101,90,3,101,120,99, + 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,114, + 216,0,0,0,249,3,0,0,115,44,0,0,0,0,10,8, + 1,10,1,4,1,12,2,4,1,28,2,8,1,14,1,10, + 1,2,255,8,2,10,1,16,1,2,1,14,1,16,4,10, + 1,16,255,2,2,8,1,22,1,114,216,0,0,0,99,1, + 0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,6, + 0,0,0,67,0,0,0,115,146,0,0,0,124,0,160,0, + 100,1,161,1,125,1,124,0,160,0,100,2,161,1,125,2, + 124,1,100,3,107,9,114,82,124,2,100,3,107,9,114,78, + 124,1,124,2,106,1,107,3,114,78,116,2,106,3,100,4, + 124,1,155,2,100,5,124,2,106,1,155,2,100,6,157,5, + 116,4,100,7,100,8,141,3,1,0,124,1,83,0,124,2, + 100,3,107,9,114,96,124,2,106,1,83,0,116,2,106,3, + 100,9,116,4,100,7,100,8,141,3,1,0,124,0,100,10, + 25,0,125,1,100,11,124,0,107,7,114,142,124,1,160,5, + 100,12,161,1,100,13,25,0,125,1,124,1,83,0,41,14, + 122,167,67,97,108,99,117,108,97,116,101,32,119,104,97,116, + 32,95,95,112,97,99,107,97,103,101,95,95,32,115,104,111, + 117,108,100,32,98,101,46,10,10,32,32,32,32,95,95,112, + 97,99,107,97,103,101,95,95,32,105,115,32,110,111,116,32, + 103,117,97,114,97,110,116,101,101,100,32,116,111,32,98,101, + 32,100,101,102,105,110,101,100,32,111,114,32,99,111,117,108, + 100,32,98,101,32,115,101,116,32,116,111,32,78,111,110,101, + 10,32,32,32,32,116,111,32,114,101,112,114,101,115,101,110, + 116,32,116,104,97,116,32,105,116,115,32,112,114,111,112,101, + 114,32,118,97,108,117,101,32,105,115,32,117,110,107,110,111, + 119,110,46,10,10,32,32,32,32,114,149,0,0,0,114,107, + 0,0,0,78,122,32,95,95,112,97,99,107,97,103,101,95, + 95,32,33,61,32,95,95,115,112,101,99,95,95,46,112,97, + 114,101,110,116,32,40,122,4,32,33,61,32,114,122,0,0, + 0,233,3,0,0,0,41,1,90,10,115,116,97,99,107,108, + 101,118,101,108,122,89,99,97,110,39,116,32,114,101,115,111, + 108,118,101,32,112,97,99,107,97,103,101,32,102,114,111,109, + 32,95,95,115,112,101,99,95,95,32,111,114,32,95,95,112, + 97,99,107,97,103,101,95,95,44,32,102,97,108,108,105,110, + 103,32,98,97,99,107,32,111,110,32,95,95,110,97,109,101, + 95,95,32,97,110,100,32,95,95,112,97,116,104,95,95,114, + 1,0,0,0,114,145,0,0,0,114,132,0,0,0,114,22, + 0,0,0,41,6,114,34,0,0,0,114,134,0,0,0,114, + 194,0,0,0,114,195,0,0,0,114,196,0,0,0,114,133, + 0,0,0,41,3,218,7,103,108,111,98,97,108,115,114,188, + 0,0,0,114,95,0,0,0,114,10,0,0,0,114,10,0, + 0,0,114,11,0,0,0,218,17,95,99,97,108,99,95,95, + 95,112,97,99,107,97,103,101,95,95,30,4,0,0,115,38, + 0,0,0,0,7,10,1,10,1,8,1,18,1,22,2,2, + 0,2,254,6,3,4,1,8,1,6,2,6,2,2,0,2, + 254,6,3,8,1,8,1,14,1,114,221,0,0,0,114,10, + 0,0,0,99,5,0,0,0,0,0,0,0,0,0,0,0, + 9,0,0,0,5,0,0,0,67,0,0,0,115,180,0,0, + 0,124,4,100,1,107,2,114,18,116,0,124,0,131,1,125, + 5,110,36,124,1,100,2,107,9,114,30,124,1,110,2,105, + 0,125,6,116,1,124,6,131,1,125,7,116,0,124,0,124, + 7,124,4,131,3,125,5,124,3,115,150,124,4,100,1,107, + 2,114,84,116,0,124,0,160,2,100,3,161,1,100,1,25, + 0,131,1,83,0,124,0,115,92,124,5,83,0,116,3,124, + 0,131,1,116,3,124,0,160,2,100,3,161,1,100,1,25, + 0,131,1,24,0,125,8,116,4,106,5,124,5,106,6,100, + 2,116,3,124,5,106,6,131,1,124,8,24,0,133,2,25, + 0,25,0,83,0,110,26,116,7,124,5,100,4,131,2,114, + 172,116,8,124,5,124,3,116,0,131,3,83,0,124,5,83, + 0,100,2,83,0,41,5,97,215,1,0,0,73,109,112,111, + 114,116,32,97,32,109,111,100,117,108,101,46,10,10,32,32, + 32,32,84,104,101,32,39,103,108,111,98,97,108,115,39,32, + 97,114,103,117,109,101,110,116,32,105,115,32,117,115,101,100, + 32,116,111,32,105,110,102,101,114,32,119,104,101,114,101,32, + 116,104,101,32,105,109,112,111,114,116,32,105,115,32,111,99, + 99,117,114,114,105,110,103,32,102,114,111,109,10,32,32,32, + 32,116,111,32,104,97,110,100,108,101,32,114,101,108,97,116, + 105,118,101,32,105,109,112,111,114,116,115,46,32,84,104,101, + 32,39,108,111,99,97,108,115,39,32,97,114,103,117,109,101, + 110,116,32,105,115,32,105,103,110,111,114,101,100,46,32,84, + 104,101,10,32,32,32,32,39,102,114,111,109,108,105,115,116, + 39,32,97,114,103,117,109,101,110,116,32,115,112,101,99,105, + 102,105,101,115,32,119,104,97,116,32,115,104,111,117,108,100, + 32,101,120,105,115,116,32,97,115,32,97,116,116,114,105,98, + 117,116,101,115,32,111,110,32,116,104,101,32,109,111,100,117, + 108,101,10,32,32,32,32,98,101,105,110,103,32,105,109,112, + 111,114,116,101,100,32,40,101,46,103,46,32,96,96,102,114, + 111,109,32,109,111,100,117,108,101,32,105,109,112,111,114,116, + 32,60,102,114,111,109,108,105,115,116,62,96,96,41,46,32, + 32,84,104,101,32,39,108,101,118,101,108,39,10,32,32,32, + 32,97,114,103,117,109,101,110,116,32,114,101,112,114,101,115, + 101,110,116,115,32,116,104,101,32,112,97,99,107,97,103,101, + 32,108,111,99,97,116,105,111,110,32,116,111,32,105,109,112, + 111,114,116,32,102,114,111,109,32,105,110,32,97,32,114,101, + 108,97,116,105,118,101,10,32,32,32,32,105,109,112,111,114, + 116,32,40,101,46,103,46,32,96,96,102,114,111,109,32,46, + 46,112,107,103,32,105,109,112,111,114,116,32,109,111,100,96, + 96,32,119,111,117,108,100,32,104,97,118,101,32,97,32,39, + 108,101,118,101,108,39,32,111,102,32,50,41,46,10,10,32, + 32,32,32,114,22,0,0,0,78,114,132,0,0,0,114,145, + 0,0,0,41,9,114,211,0,0,0,114,221,0,0,0,218, + 9,112,97,114,116,105,116,105,111,110,114,187,0,0,0,114, + 15,0,0,0,114,92,0,0,0,114,1,0,0,0,114,4, + 0,0,0,114,216,0,0,0,41,9,114,17,0,0,0,114, + 220,0,0,0,218,6,108,111,99,97,108,115,114,217,0,0, + 0,114,189,0,0,0,114,96,0,0,0,90,8,103,108,111, + 98,97,108,115,95,114,188,0,0,0,90,7,99,117,116,95, + 111,102,102,114,10,0,0,0,114,10,0,0,0,114,11,0, + 0,0,218,10,95,95,105,109,112,111,114,116,95,95,57,4, + 0,0,115,30,0,0,0,0,11,8,1,10,2,16,1,8, + 1,12,1,4,3,8,1,18,1,4,1,4,4,26,3,32, + 1,10,1,12,2,114,224,0,0,0,99,1,0,0,0,0, + 0,0,0,0,0,0,0,2,0,0,0,3,0,0,0,67, + 0,0,0,115,38,0,0,0,116,0,160,1,124,0,161,1, + 125,1,124,1,100,0,107,8,114,30,116,2,100,1,124,0, + 23,0,131,1,130,1,116,3,124,1,131,1,83,0,41,2, + 78,122,25,110,111,32,98,117,105,108,116,45,105,110,32,109, + 111,100,117,108,101,32,110,97,109,101,100,32,41,4,114,163, + 0,0,0,114,170,0,0,0,114,79,0,0,0,114,162,0, + 0,0,41,2,114,17,0,0,0,114,95,0,0,0,114,10, + 0,0,0,114,10,0,0,0,114,11,0,0,0,218,18,95, + 98,117,105,108,116,105,110,95,102,114,111,109,95,110,97,109, + 101,94,4,0,0,115,8,0,0,0,0,1,10,1,8,1, + 12,1,114,225,0,0,0,99,2,0,0,0,0,0,0,0, + 0,0,0,0,10,0,0,0,5,0,0,0,67,0,0,0, + 115,166,0,0,0,124,1,97,0,124,0,97,1,116,2,116, + 1,131,1,125,2,116,1,106,3,160,4,161,0,68,0,93, + 72,92,2,125,3,125,4,116,5,124,4,124,2,131,2,114, + 26,124,3,116,1,106,6,107,6,114,60,116,7,125,5,110, + 18,116,0,160,8,124,3,161,1,114,26,116,9,125,5,110, + 2,113,26,116,10,124,4,124,5,131,2,125,6,116,11,124, + 6,124,4,131,2,1,0,113,26,116,1,106,3,116,12,25, + 0,125,7,100,1,68,0,93,46,125,8,124,8,116,1,106, + 3,107,7,114,138,116,13,124,8,131,1,125,9,110,10,116, + 1,106,3,124,8,25,0,125,9,116,14,124,7,124,8,124, + 9,131,3,1,0,113,114,100,2,83,0,41,3,122,250,83, + 101,116,117,112,32,105,109,112,111,114,116,108,105,98,32,98, + 121,32,105,109,112,111,114,116,105,110,103,32,110,101,101,100, + 101,100,32,98,117,105,108,116,45,105,110,32,109,111,100,117, + 108,101,115,32,97,110,100,32,105,110,106,101,99,116,105,110, + 103,32,116,104,101,109,10,32,32,32,32,105,110,116,111,32, + 116,104,101,32,103,108,111,98,97,108,32,110,97,109,101,115, + 112,97,99,101,46,10,10,32,32,32,32,65,115,32,115,121, + 115,32,105,115,32,110,101,101,100,101,100,32,102,111,114,32, + 115,121,115,46,109,111,100,117,108,101,115,32,97,99,99,101, + 115,115,32,97,110,100,32,95,105,109,112,32,105,115,32,110, + 101,101,100,101,100,32,116,111,32,108,111,97,100,32,98,117, + 105,108,116,45,105,110,10,32,32,32,32,109,111,100,117,108, + 101,115,44,32,116,104,111,115,101,32,116,119,111,32,109,111, + 100,117,108,101,115,32,109,117,115,116,32,98,101,32,101,120, + 112,108,105,99,105,116,108,121,32,112,97,115,115,101,100,32, + 105,110,46,10,10,32,32,32,32,41,3,114,23,0,0,0, + 114,194,0,0,0,114,63,0,0,0,78,41,15,114,56,0, + 0,0,114,15,0,0,0,114,14,0,0,0,114,92,0,0, + 0,218,5,105,116,101,109,115,114,198,0,0,0,114,78,0, + 0,0,114,163,0,0,0,114,88,0,0,0,114,177,0,0, + 0,114,146,0,0,0,114,152,0,0,0,114,1,0,0,0, + 114,225,0,0,0,114,5,0,0,0,41,10,218,10,115,121, + 115,95,109,111,100,117,108,101,218,11,95,105,109,112,95,109, + 111,100,117,108,101,90,11,109,111,100,117,108,101,95,116,121, + 112,101,114,17,0,0,0,114,96,0,0,0,114,111,0,0, + 0,114,95,0,0,0,90,11,115,101,108,102,95,109,111,100, + 117,108,101,90,12,98,117,105,108,116,105,110,95,110,97,109, + 101,90,14,98,117,105,108,116,105,110,95,109,111,100,117,108, + 101,114,10,0,0,0,114,10,0,0,0,114,11,0,0,0, + 218,6,95,115,101,116,117,112,101,4,0,0,115,36,0,0, + 0,0,9,4,1,4,3,8,1,18,1,10,1,10,1,6, + 1,10,1,6,2,2,1,10,1,12,3,10,1,8,1,10, + 1,10,2,10,1,114,229,0,0,0,99,2,0,0,0,0, + 0,0,0,0,0,0,0,2,0,0,0,3,0,0,0,67, + 0,0,0,115,38,0,0,0,116,0,124,0,124,1,131,2, + 1,0,116,1,106,2,160,3,116,4,161,1,1,0,116,1, + 106,2,160,3,116,5,161,1,1,0,100,1,83,0,41,2, + 122,48,73,110,115,116,97,108,108,32,105,109,112,111,114,116, + 101,114,115,32,102,111,114,32,98,117,105,108,116,105,110,32, + 97,110,100,32,102,114,111,122,101,110,32,109,111,100,117,108, + 101,115,78,41,6,114,229,0,0,0,114,15,0,0,0,114, + 193,0,0,0,114,123,0,0,0,114,163,0,0,0,114,177, + 0,0,0,41,2,114,227,0,0,0,114,228,0,0,0,114, + 10,0,0,0,114,10,0,0,0,114,11,0,0,0,218,8, + 95,105,110,115,116,97,108,108,136,4,0,0,115,6,0,0, + 0,0,2,10,2,12,1,114,230,0,0,0,99,0,0,0, + 0,0,0,0,0,0,0,0,0,1,0,0,0,4,0,0, + 0,67,0,0,0,115,32,0,0,0,100,1,100,2,108,0, + 125,0,124,0,97,1,124,0,160,2,116,3,106,4,116,5, + 25,0,161,1,1,0,100,2,83,0,41,3,122,57,73,110, + 115,116,97,108,108,32,105,109,112,111,114,116,101,114,115,32, + 116,104,97,116,32,114,101,113,117,105,114,101,32,101,120,116, + 101,114,110,97,108,32,102,105,108,101,115,121,115,116,101,109, + 32,97,99,99,101,115,115,114,22,0,0,0,78,41,6,218, + 26,95,102,114,111,122,101,110,95,105,109,112,111,114,116,108, + 105,98,95,101,120,116,101,114,110,97,108,114,130,0,0,0, + 114,230,0,0,0,114,15,0,0,0,114,92,0,0,0,114, + 1,0,0,0,41,1,114,231,0,0,0,114,10,0,0,0, + 114,10,0,0,0,114,11,0,0,0,218,27,95,105,110,115, + 116,97,108,108,95,101,120,116,101,114,110,97,108,95,105,109, + 112,111,114,116,101,114,115,144,4,0,0,115,6,0,0,0, + 0,3,8,1,4,1,114,232,0,0,0,41,2,78,78,41, + 1,78,41,2,78,114,22,0,0,0,41,4,78,78,114,10, + 0,0,0,114,22,0,0,0,41,50,114,3,0,0,0,114, + 130,0,0,0,114,12,0,0,0,114,18,0,0,0,114,59, + 0,0,0,114,33,0,0,0,114,42,0,0,0,114,19,0, + 0,0,114,20,0,0,0,114,48,0,0,0,114,49,0,0, + 0,114,52,0,0,0,114,64,0,0,0,114,66,0,0,0, + 114,76,0,0,0,114,86,0,0,0,114,90,0,0,0,114, + 97,0,0,0,114,113,0,0,0,114,114,0,0,0,114,91, + 0,0,0,114,146,0,0,0,114,152,0,0,0,114,156,0, + 0,0,114,109,0,0,0,114,93,0,0,0,114,161,0,0, + 0,114,162,0,0,0,114,94,0,0,0,114,163,0,0,0, + 114,177,0,0,0,114,182,0,0,0,114,190,0,0,0,114, + 192,0,0,0,114,197,0,0,0,114,203,0,0,0,90,15, + 95,69,82,82,95,77,83,71,95,80,82,69,70,73,88,90, + 8,95,69,82,82,95,77,83,71,114,208,0,0,0,218,6, + 111,98,106,101,99,116,114,209,0,0,0,114,210,0,0,0, + 114,211,0,0,0,114,216,0,0,0,114,221,0,0,0,114, + 224,0,0,0,114,225,0,0,0,114,229,0,0,0,114,230, + 0,0,0,114,232,0,0,0,114,10,0,0,0,114,10,0, + 0,0,114,10,0,0,0,114,11,0,0,0,218,8,60,109, + 111,100,117,108,101,62,1,0,0,0,115,94,0,0,0,4, + 24,4,2,8,8,8,8,4,2,4,3,16,4,14,68,14, + 21,14,16,8,37,8,17,8,11,14,8,8,11,8,12,8, + 16,8,36,14,100,16,26,10,45,14,72,8,17,8,17,8, + 30,8,37,8,42,8,15,14,75,14,78,14,13,8,9,8, + 9,10,47,8,16,4,1,8,2,8,27,6,3,8,16,10, + 15,14,37,8,27,10,37,8,7,8,35,8,8, +}; diff --git a/Python/initconfig.c b/Python/initconfig.c index bbc2ebb09fd039..67f6777d3b1d9e 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -241,7 +241,7 @@ _Py_COMP_DIAG_IGNORE_DEPR_DECLS #define FROM_STRING(STR) \ ((STR != NULL) ? \ PyUnicode_FromString(STR) \ - : (Py_INCREF(Py_None), Py_None)) + : Py_NewRef(Py_None)) #define SET_ITEM_STR(VAR) \ SET_ITEM(#VAR, FROM_STRING(VAR)) @@ -780,7 +780,6 @@ _PyConfig_InitCompatConfig(PyConfig *config) config->check_hash_pycs_mode = NULL; config->pathconfig_warnings = -1; config->_init_main = 1; - config->_isolated_interpreter = 0; #ifdef MS_WINDOWS config->legacy_windows_stdio = -1; #endif @@ -1015,7 +1014,6 @@ _PyConfig_Copy(PyConfig *config, const PyConfig *config2) COPY_WSTR_ATTR(check_hash_pycs_mode); COPY_ATTR(pathconfig_warnings); COPY_ATTR(_init_main); - COPY_ATTR(_isolated_interpreter); COPY_ATTR(use_frozen_modules); COPY_ATTR(safe_path); COPY_WSTRLIST(orig_argv); @@ -1056,7 +1054,7 @@ _PyConfig_AsDict(const PyConfig *config) #define FROM_WSTRING(STR) \ ((STR != NULL) ? \ PyUnicode_FromWideChar(STR, -1) \ - : (Py_INCREF(Py_None), Py_None)) + : Py_NewRef(Py_None)) #define SET_ITEM_WSTR(ATTR) \ SET_ITEM(#ATTR, FROM_WSTRING(config->ATTR)) #define SET_ITEM_WSTRLIST(LIST) \ @@ -1123,7 +1121,6 @@ _PyConfig_AsDict(const PyConfig *config) SET_ITEM_WSTR(check_hash_pycs_mode); SET_ITEM_INT(pathconfig_warnings); SET_ITEM_INT(_init_main); - SET_ITEM_INT(_isolated_interpreter); SET_ITEM_WSTRLIST(orig_argv); SET_ITEM_INT(use_frozen_modules); SET_ITEM_INT(safe_path); @@ -1418,7 +1415,6 @@ _PyConfig_FromDict(PyConfig *config, PyObject *dict) GET_UINT(_install_importlib); GET_UINT(_init_main); - GET_UINT(_isolated_interpreter); GET_UINT(use_frozen_modules); GET_UINT(safe_path); GET_UINT(_is_python_build); diff --git a/Python/marshal.c b/Python/marshal.c index 90a44050918006..5f392d9e1ecfff 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -34,6 +34,8 @@ module marshal */ #if defined(MS_WINDOWS) #define MAX_MARSHAL_STACK_DEPTH 1000 +#elif defined(__wasi__) +#define MAX_MARSHAL_STACK_DEPTH 1500 #else #define MAX_MARSHAL_STACK_DEPTH 2000 #endif @@ -324,8 +326,8 @@ w_ref(PyObject *v, char *flag, WFILE *p) goto err; } w = (int)s; - Py_INCREF(v); - if (_Py_hashtable_set(p->hashtable, v, (void *)(uintptr_t)w) < 0) { + if (_Py_hashtable_set(p->hashtable, Py_NewRef(v), + (void *)(uintptr_t)w) < 0) { Py_DECREF(v); goto err; } @@ -949,8 +951,7 @@ r_ref_insert(PyObject *o, Py_ssize_t idx, int flag, RFILE *p) { if (o != NULL && flag) { /* currently only FLAG_REF is defined */ PyObject *tmp = PyList_GET_ITEM(p->refs, idx); - Py_INCREF(o); - PyList_SET_ITEM(p->refs, idx, o); + PyList_SET_ITEM(p->refs, idx, Py_NewRef(o)); Py_DECREF(tmp); } return o; @@ -1013,28 +1014,23 @@ r_object(RFILE *p) break; case TYPE_NONE: - Py_INCREF(Py_None); - retval = Py_None; + retval = Py_NewRef(Py_None); break; case TYPE_STOPITER: - Py_INCREF(PyExc_StopIteration); - retval = PyExc_StopIteration; + retval = Py_NewRef(PyExc_StopIteration); break; case TYPE_ELLIPSIS: - Py_INCREF(Py_Ellipsis); - retval = Py_Ellipsis; + retval = Py_NewRef(Py_Ellipsis); break; case TYPE_FALSE: - Py_INCREF(Py_False); - retval = Py_False; + retval = Py_NewRef(Py_False); break; case TYPE_TRUE: - Py_INCREF(Py_True); - retval = Py_True; + retval = Py_NewRef(Py_True); break; case TYPE_INT: @@ -1221,8 +1217,7 @@ r_object(RFILE *p) if (!PyErr_Occurred()) PyErr_SetString(PyExc_TypeError, "NULL object in marshal data for tuple"); - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); break; } PyTuple_SET_ITEM(v, i, v2); @@ -1248,8 +1243,7 @@ r_object(RFILE *p) if (!PyErr_Occurred()) PyErr_SetString(PyExc_TypeError, "NULL object in marshal data for list"); - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); break; } PyList_SET_ITEM(v, i, v2); @@ -1281,8 +1275,7 @@ r_object(RFILE *p) Py_DECREF(val); } if (PyErr_Occurred()) { - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); } retval = v; break; @@ -1326,8 +1319,7 @@ r_object(RFILE *p) if (!PyErr_Occurred()) PyErr_SetString(PyExc_TypeError, "NULL object in marshal data for set"); - Py_DECREF(v); - v = NULL; + Py_SETREF(v, NULL); break; } if (PySet_Add(v, v2) == -1) { @@ -1484,8 +1476,7 @@ r_object(RFILE *p) PyErr_SetString(PyExc_ValueError, "bad marshal data (invalid reference)"); break; } - Py_INCREF(v); - retval = v; + retval = Py_NewRef(v); break; default: diff --git a/Python/modsupport.c b/Python/modsupport.c index 8655daa1fc5e0e..b9a10dc157e7e5 100644 --- a/Python/modsupport.c +++ b/Python/modsupport.c @@ -10,9 +10,6 @@ typedef double va_double; static PyObject *va_build_value(const char *, va_list, int); static PyObject **va_build_stack(PyObject **small_stack, Py_ssize_t small_stack_len, const char *, va_list, int, Py_ssize_t*); -/* Package context -- the full module name for package imports */ -const char *_Py_PackageContext = NULL; - int _Py_convert_optional_to_ssize_t(PyObject *obj, void *result) @@ -359,8 +356,7 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) else n = -1; if (u == NULL) { - v = Py_None; - Py_INCREF(v); + v = Py_NewRef(Py_None); } else { if (n < 0) @@ -410,8 +406,7 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) else n = -1; if (str == NULL) { - v = Py_None; - Py_INCREF(v); + v = Py_NewRef(Py_None); } else { if (n < 0) { @@ -446,8 +441,7 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) else n = -1; if (str == NULL) { - v = Py_None; - Py_INCREF(v); + v = Py_NewRef(Py_None); } else { if (n < 0) { diff --git a/Python/mysnprintf.c b/Python/mysnprintf.c index cd69198011e3c9..2a505d14f82c99 100644 --- a/Python/mysnprintf.c +++ b/Python/mysnprintf.c @@ -9,6 +9,7 @@ would have been written had the buffer not been too small, and to set the last byte of the buffer to \0. At least MS _vsnprintf returns a negative value instead, and fills the entire buffer with non-\0 data. + Unlike C99, our wrappers do not support passing a null buffer. The wrappers ensure that str[size-1] is always \0 upon return. diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index c1ff367d4fd38d..3aba4e7556a65f 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -2,42 +2,41 @@ static void *opcode_targets[256] = { &&TARGET_CACHE, &&TARGET_POP_TOP, &&TARGET_PUSH_NULL, - &&TARGET_BINARY_OP_ADAPTIVE, + &&TARGET_INTERPRETER_EXIT, + &&TARGET_END_FOR, &&TARGET_BINARY_OP_ADD_FLOAT, &&TARGET_BINARY_OP_ADD_INT, &&TARGET_BINARY_OP_ADD_UNICODE, &&TARGET_BINARY_OP_INPLACE_ADD_UNICODE, - &&TARGET_BINARY_OP_MULTIPLY_FLOAT, &&TARGET_NOP, &&TARGET_UNARY_POSITIVE, &&TARGET_UNARY_NEGATIVE, &&TARGET_UNARY_NOT, + &&TARGET_BINARY_OP_MULTIPLY_FLOAT, &&TARGET_BINARY_OP_MULTIPLY_INT, - &&TARGET_BINARY_OP_SUBTRACT_FLOAT, &&TARGET_UNARY_INVERT, + &&TARGET_BINARY_OP_SUBTRACT_FLOAT, &&TARGET_BINARY_OP_SUBTRACT_INT, - &&TARGET_BINARY_SUBSCR_ADAPTIVE, &&TARGET_BINARY_SUBSCR_DICT, &&TARGET_BINARY_SUBSCR_GETITEM, &&TARGET_BINARY_SUBSCR_LIST_INT, &&TARGET_BINARY_SUBSCR_TUPLE_INT, - &&TARGET_CALL_ADAPTIVE, &&TARGET_CALL_PY_EXACT_ARGS, &&TARGET_CALL_PY_WITH_DEFAULTS, + &&TARGET_CALL_BOUND_METHOD_EXACT_ARGS, &&TARGET_BINARY_SUBSCR, &&TARGET_BINARY_SLICE, &&TARGET_STORE_SLICE, - &&TARGET_CALL_BOUND_METHOD_EXACT_ARGS, &&TARGET_CALL_BUILTIN_CLASS, + &&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS, &&TARGET_GET_LEN, &&TARGET_MATCH_MAPPING, &&TARGET_MATCH_SEQUENCE, &&TARGET_MATCH_KEYS, - &&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS, + &&TARGET_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, &&TARGET_PUSH_EXC_INFO, &&TARGET_CHECK_EXC_MATCH, &&TARGET_CHECK_EG_MATCH, - &&TARGET_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, &&TARGET_CALL_NO_KW_BUILTIN_FAST, &&TARGET_CALL_NO_KW_BUILTIN_O, &&TARGET_CALL_NO_KW_ISINSTANCE, @@ -48,6 +47,7 @@ static void *opcode_targets[256] = { &&TARGET_CALL_NO_KW_METHOD_DESCRIPTOR_O, &&TARGET_CALL_NO_KW_STR_1, &&TARGET_CALL_NO_KW_TUPLE_1, + &&TARGET_CALL_NO_KW_TYPE_1, &&TARGET_WITH_EXCEPT_START, &&TARGET_GET_AITER, &&TARGET_GET_ANEXT, @@ -55,37 +55,37 @@ static void *opcode_targets[256] = { &&TARGET_BEFORE_WITH, &&TARGET_END_ASYNC_FOR, &&TARGET_CLEANUP_THROW, - &&TARGET_CALL_NO_KW_TYPE_1, - &&TARGET_COMPARE_OP_ADAPTIVE, &&TARGET_COMPARE_OP_FLOAT_JUMP, &&TARGET_COMPARE_OP_INT_JUMP, - &&TARGET_STORE_SUBSCR, - &&TARGET_DELETE_SUBSCR, &&TARGET_COMPARE_OP_STR_JUMP, - &&TARGET_EXTENDED_ARG_QUICK, - &&TARGET_FOR_ITER_ADAPTIVE, &&TARGET_FOR_ITER_LIST, + &&TARGET_STORE_SUBSCR, + &&TARGET_DELETE_SUBSCR, &&TARGET_FOR_ITER_RANGE, - &&TARGET_JUMP_BACKWARD_QUICK, + &&TARGET_STOPITERATION_ERROR, + &&TARGET_FOR_ITER_GEN, + &&TARGET_LOAD_ATTR_CLASS, + &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, + &&TARGET_LOAD_ATTR_INSTANCE_VALUE, &&TARGET_GET_ITER, &&TARGET_GET_YIELD_FROM_ITER, &&TARGET_PRINT_EXPR, &&TARGET_LOAD_BUILD_CLASS, - &&TARGET_LOAD_ATTR_ADAPTIVE, - &&TARGET_LOAD_ATTR_CLASS, - &&TARGET_LOAD_ASSERTION_ERROR, - &&TARGET_RETURN_GENERATOR, - &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, - &&TARGET_LOAD_ATTR_INSTANCE_VALUE, &&TARGET_LOAD_ATTR_MODULE, &&TARGET_LOAD_ATTR_PROPERTY, + &&TARGET_LOAD_ASSERTION_ERROR, + &&TARGET_RETURN_GENERATOR, &&TARGET_LOAD_ATTR_SLOT, &&TARGET_LOAD_ATTR_WITH_HINT, + &&TARGET_LOAD_ATTR_METHOD_LAZY_DICT, + &&TARGET_LOAD_ATTR_METHOD_NO_DICT, + &&TARGET_LOAD_ATTR_METHOD_WITH_DICT, + &&TARGET_LOAD_ATTR_METHOD_WITH_VALUES, &&TARGET_LIST_TO_TUPLE, &&TARGET_RETURN_VALUE, &&TARGET_IMPORT_STAR, &&TARGET_SETUP_ANNOTATIONS, - &&TARGET_LOAD_ATTR_METHOD_LAZY_DICT, + &&TARGET_LOAD_CONST__LOAD_FAST, &&TARGET_ASYNC_GEN_WRAP, &&TARGET_PREP_RERAISE_STAR, &&TARGET_POP_EXCEPT, @@ -112,7 +112,7 @@ static void *opcode_targets[256] = { &&TARGET_JUMP_FORWARD, &&TARGET_JUMP_IF_FALSE_OR_POP, &&TARGET_JUMP_IF_TRUE_OR_POP, - &&TARGET_LOAD_ATTR_METHOD_NO_DICT, + &&TARGET_LOAD_FAST__LOAD_CONST, &&TARGET_POP_JUMP_IF_FALSE, &&TARGET_POP_JUMP_IF_TRUE, &&TARGET_LOAD_GLOBAL, @@ -120,7 +120,7 @@ static void *opcode_targets[256] = { &&TARGET_CONTAINS_OP, &&TARGET_RERAISE, &&TARGET_COPY, - &&TARGET_LOAD_ATTR_METHOD_WITH_DICT, + &&TARGET_LOAD_FAST__LOAD_FAST, &&TARGET_BINARY_OP, &&TARGET_SEND, &&TARGET_LOAD_FAST, @@ -140,9 +140,9 @@ static void *opcode_targets[256] = { &&TARGET_STORE_DEREF, &&TARGET_DELETE_DEREF, &&TARGET_JUMP_BACKWARD, - &&TARGET_LOAD_ATTR_METHOD_WITH_VALUES, + &&TARGET_LOAD_GLOBAL_BUILTIN, &&TARGET_CALL_FUNCTION_EX, - &&TARGET_LOAD_CONST__LOAD_FAST, + &&TARGET_LOAD_GLOBAL_MODULE, &&TARGET_EXTENDED_ARG, &&TARGET_LIST_APPEND, &&TARGET_SET_ADD, @@ -152,35 +152,35 @@ static void *opcode_targets[256] = { &&TARGET_YIELD_VALUE, &&TARGET_RESUME, &&TARGET_MATCH_CLASS, - &&TARGET_LOAD_FAST__LOAD_CONST, - &&TARGET_LOAD_FAST__LOAD_FAST, + &&TARGET_STORE_ATTR_INSTANCE_VALUE, + &&TARGET_STORE_ATTR_SLOT, &&TARGET_FORMAT_VALUE, &&TARGET_BUILD_CONST_KEY_MAP, &&TARGET_BUILD_STRING, - &&TARGET_LOAD_GLOBAL_ADAPTIVE, - &&TARGET_LOAD_GLOBAL_BUILTIN, - &&TARGET_LOAD_GLOBAL_MODULE, - &&TARGET_RESUME_QUICK, - &&TARGET_LIST_EXTEND, - &&TARGET_SET_UPDATE, - &&TARGET_DICT_MERGE, - &&TARGET_DICT_UPDATE, - &&TARGET_STORE_ATTR_ADAPTIVE, - &&TARGET_STORE_ATTR_INSTANCE_VALUE, - &&TARGET_STORE_ATTR_SLOT, &&TARGET_STORE_ATTR_WITH_HINT, &&TARGET_STORE_FAST__LOAD_FAST, - &&TARGET_CALL, - &&TARGET_KW_NAMES, &&TARGET_STORE_FAST__STORE_FAST, - &&TARGET_STORE_SUBSCR_ADAPTIVE, &&TARGET_STORE_SUBSCR_DICT, + &&TARGET_LIST_EXTEND, + &&TARGET_SET_UPDATE, + &&TARGET_DICT_MERGE, + &&TARGET_DICT_UPDATE, &&TARGET_STORE_SUBSCR_LIST_INT, - &&TARGET_UNPACK_SEQUENCE_ADAPTIVE, &&TARGET_UNPACK_SEQUENCE_LIST, &&TARGET_UNPACK_SEQUENCE_TUPLE, &&TARGET_UNPACK_SEQUENCE_TWO_TUPLE, &&_unknown_opcode, + &&TARGET_CALL, + &&TARGET_KW_NAMES, + &&_unknown_opcode, + &&_unknown_opcode, + &&_unknown_opcode, + &&_unknown_opcode, + &&_unknown_opcode, + &&_unknown_opcode, + &&_unknown_opcode, + &&_unknown_opcode, + &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, diff --git a/Python/pathconfig.c b/Python/pathconfig.c index 69b7e10a3b0288..be0f97c4b204a9 100644 --- a/Python/pathconfig.c +++ b/Python/pathconfig.c @@ -261,6 +261,8 @@ Py_SetPythonHome(const wchar_t *home) _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); PyMem_RawFree(_Py_path_config.home); + _Py_path_config.home = NULL; + if (has_value) { _Py_path_config.home = _PyMem_RawWcsdup(home); } @@ -282,6 +284,8 @@ Py_SetProgramName(const wchar_t *program_name) _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); PyMem_RawFree(_Py_path_config.program_name); + _Py_path_config.program_name = NULL; + if (has_value) { _Py_path_config.program_name = _PyMem_RawWcsdup(program_name); } @@ -302,6 +306,8 @@ _Py_SetProgramFullPath(const wchar_t *program_full_path) _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); PyMem_RawFree(_Py_path_config.program_full_path); + _Py_path_config.program_full_path = NULL; + if (has_value) { _Py_path_config.program_full_path = _PyMem_RawWcsdup(program_full_path); } diff --git a/Objects/perf_trampoline.c b/Python/perf_trampoline.c similarity index 100% rename from Objects/perf_trampoline.c rename to Python/perf_trampoline.c diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index c550f13bc14820..8209132ebc6c27 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -10,6 +10,7 @@ #include "pycore_fileutils.h" // _Py_ResetForceASCII() #include "pycore_floatobject.h" // _PyFloat_InitTypes() #include "pycore_genobject.h" // _PyAsyncGen_Fini() +#include "pycore_global_objects_fini_generated.h" // "_PyStaticObjects_CheckRefcnt() #include "pycore_import.h" // _PyImport_BootstrapImp() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_list.h" // _PyList_Fini() @@ -28,6 +29,7 @@ #include "pycore_tuple.h" // _PyTuple_InitTypes() #include "pycore_typeobject.h" // _PyTypes_InitTypes() #include "pycore_unicodeobject.h" // _PyUnicode_InitTypes() +#include "opcode.h" extern void _PyIO_Fini(void); @@ -75,13 +77,15 @@ static PyStatus init_sys_streams(PyThreadState *tstate); static void wait_for_thread_shutdown(PyThreadState *tstate); static void call_ll_exitfuncs(_PyRuntimeState *runtime); -int _Py_UnhandledKeyboardInterrupt = 0; - /* The following places the `_PyRuntime` structure in a location that can be * found without any external information. This is meant to ease access to the * interpreter state for various runtime debugging tools, but is *not* an * officially supported feature */ +/* Suppress deprecation warning for PyBytesObject.ob_shash */ +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS + #if defined(MS_WINDOWS) #pragma section("PyRuntime", read, write) @@ -95,14 +99,11 @@ __attribute__(( #endif -/* Suppress deprecation warning for PyBytesObject.ob_shash */ -_Py_COMP_DIAG_PUSH -_Py_COMP_DIAG_IGNORE_DEPR_DECLS _PyRuntimeState _PyRuntime #if defined(__linux__) && (defined(__GNUC__) || defined(__clang__)) __attribute__ ((section (".PyRuntime"))) #endif -= _PyRuntimeState_INIT; += _PyRuntimeState_INIT(_PyRuntime); _Py_COMP_DIAG_POP static int runtime_initialized = 0; @@ -480,6 +481,8 @@ interpreter_update_config(PyThreadState *tstate, int only_update_path_config) } } + tstate->interp->long_state.max_str_digits = config->int_max_str_digits; + // Update the sys module for the new configuration if (_PySys_UpdateConfig(tstate) < 0) { return -1; @@ -596,11 +599,18 @@ pycore_init_runtime(_PyRuntimeState *runtime, */ _PyRuntimeState_SetFinalizing(runtime, NULL); + _Py_InitVersion(); + status = _Py_HashRandomization_Init(config); if (_PyStatus_EXCEPTION(status)) { return status; } + status = _PyImport_Init(); + if (_PyStatus_EXCEPTION(status)) { + return status; + } + status = _PyInterpreterState_Enable(runtime); if (_PyStatus_EXCEPTION(status)) { return status; @@ -609,6 +619,28 @@ pycore_init_runtime(_PyRuntimeState *runtime, } +static void +init_interp_settings(PyInterpreterState *interp, const _PyInterpreterConfig *config) +{ + assert(interp->feature_flags == 0); + + if (config->allow_fork) { + interp->feature_flags |= Py_RTFLAGS_FORK; + } + if (config->allow_exec) { + interp->feature_flags |= Py_RTFLAGS_EXEC; + } + // Note that fork+exec is always allowed. + + if (config->allow_threads) { + interp->feature_flags |= Py_RTFLAGS_THREADS; + } + if (config->allow_daemon_threads) { + interp->feature_flags |= Py_RTFLAGS_DAEMON_THREADS; + } +} + + static PyStatus init_interp_create_gil(PyThreadState *tstate) { @@ -636,7 +668,7 @@ init_interp_create_gil(PyThreadState *tstate) static PyStatus pycore_create_interpreter(_PyRuntimeState *runtime, - const PyConfig *config, + const PyConfig *src_config, PyThreadState **tstate_p) { /* Auto-thread-state API */ @@ -651,11 +683,14 @@ pycore_create_interpreter(_PyRuntimeState *runtime, } assert(_Py_IsMainInterpreter(interp)); - status = _PyConfig_Copy(&interp->config, config); + status = _PyConfig_Copy(&interp->config, src_config); if (_PyStatus_EXCEPTION(status)) { return status; } + const _PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT; + init_interp_settings(interp, &config); + PyThreadState *tstate = PyThreadState_New(interp); if (tstate == NULL) { return _PyStatus_ERR("can't make first thread"); @@ -751,6 +786,21 @@ pycore_init_types(PyInterpreterState *interp) return _PyStatus_OK(); } +static const uint8_t INTERPRETER_TRAMPOLINE_INSTRUCTIONS[] = { + /* Put a NOP at the start, so that the IP points into + * the code, rather than before it */ + NOP, 0, + INTERPRETER_EXIT, 0, + /* RESUME at end makes sure that the frame appears incomplete */ + RESUME, 0 +}; + +static const _PyShimCodeDef INTERPRETER_TRAMPOLINE_CODEDEF = { + INTERPRETER_TRAMPOLINE_INSTRUCTIONS, + sizeof(INTERPRETER_TRAMPOLINE_INSTRUCTIONS), + 1, + "" +}; static PyStatus pycore_init_builtins(PyThreadState *tstate) @@ -770,8 +820,7 @@ pycore_init_builtins(PyThreadState *tstate) if (builtins_dict == NULL) { goto error; } - Py_INCREF(builtins_dict); - interp->builtins = builtins_dict; + interp->builtins = Py_NewRef(builtins_dict); PyObject *isinstance = PyDict_GetItem(builtins_dict, &_Py_ID(isinstance)); assert(isinstance); @@ -785,7 +834,10 @@ pycore_init_builtins(PyThreadState *tstate) PyObject *object__getattribute__ = _PyType_Lookup(&PyBaseObject_Type, &_Py_ID(__getattribute__)); assert(object__getattribute__); interp->callable_cache.object__getattribute__ = object__getattribute__; - + interp->interpreter_trampoline = _Py_MakeShimCode(&INTERPRETER_TRAMPOLINE_CODEDEF); + if (interp->interpreter_trampoline == NULL) { + return _PyStatus_ERR("failed to create interpreter trampoline."); + } if (_PyBuiltins_AddExceptions(bimod) < 0) { return _PyStatus_ERR("failed to add exceptions to builtins"); } @@ -1292,6 +1344,7 @@ Py_InitializeEx(int install_sigs) config.install_signal_handlers = install_sigs; status = Py_InitializeFromConfig(&config); + PyConfig_Clear(&config); if (_PyStatus_EXCEPTION(status)) { Py_ExitStatusException(status); } @@ -1697,7 +1750,7 @@ finalize_interp_types(PyInterpreterState *interp) _PyUnicode_Fini(interp); _PyFloat_Fini(interp); #ifdef Py_DEBUG - _PyStaticObjects_CheckRefcnt(); + _PyStaticObjects_CheckRefcnt(interp); #endif } @@ -1959,7 +2012,7 @@ Py_Finalize(void) */ static PyStatus -new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) +new_interpreter(PyThreadState **tstate_p, const _PyInterpreterConfig *config) { PyStatus status; @@ -1993,23 +2046,23 @@ new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) PyThreadState *save_tstate = PyThreadState_Swap(tstate); /* Copy the current interpreter config into the new interpreter */ - const PyConfig *config; + const PyConfig *src_config; if (save_tstate != NULL) { - config = _PyInterpreterState_GetConfig(save_tstate->interp); + src_config = _PyInterpreterState_GetConfig(save_tstate->interp); } else { /* No current thread state, copy from the main interpreter */ PyInterpreterState *main_interp = _PyInterpreterState_Main(); - config = _PyInterpreterState_GetConfig(main_interp); + src_config = _PyInterpreterState_GetConfig(main_interp); } - - status = _PyConfig_Copy(&interp->config, config); + status = _PyConfig_Copy(&interp->config, src_config); if (_PyStatus_EXCEPTION(status)) { goto error; } - interp->config._isolated_interpreter = isolated_subinterpreter; + + init_interp_settings(interp, config); status = init_interp_create_gil(tstate); if (_PyStatus_EXCEPTION(status)) { @@ -2043,21 +2096,21 @@ new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) } PyThreadState * -_Py_NewInterpreter(int isolated_subinterpreter) +_Py_NewInterpreterFromConfig(const _PyInterpreterConfig *config) { PyThreadState *tstate = NULL; - PyStatus status = new_interpreter(&tstate, isolated_subinterpreter); + PyStatus status = new_interpreter(&tstate, config); if (_PyStatus_EXCEPTION(status)) { Py_ExitStatusException(status); } return tstate; - } PyThreadState * Py_NewInterpreter(void) { - return _Py_NewInterpreter(0); + const _PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT; + return _Py_NewInterpreterFromConfig(&config); } /* Delete an interpreter and its last thread. This requires that the @@ -2260,8 +2313,7 @@ create_stdio(const PyConfig *config, PyObject* io, goto error; } else { - raw = buf; - Py_INCREF(raw); + raw = Py_NewRef(buf); } #ifdef MS_WINDOWS @@ -2523,8 +2575,7 @@ _Py_FatalError_PrintExc(PyThreadState *tstate) _PyErr_NormalizeException(tstate, &exception, &v, &tb); if (tb == NULL) { - tb = Py_None; - Py_INCREF(tb); + tb = Py_NewRef(Py_None); } PyException_SetTraceback(v, tb); if (exception == NULL) { diff --git a/Python/pystate.c b/Python/pystate.c index 50ae0ce682170b..19fd9a6ae4497b 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -51,8 +51,11 @@ static void _PyThreadState_Delete(PyThreadState *tstate, int check_current); _Py_COMP_DIAG_PUSH _Py_COMP_DIAG_IGNORE_DEPR_DECLS /* We use "initial" if the runtime gets re-used - (e.g. Py_Finalize() followed by Py_Initialize(). */ -static const _PyRuntimeState initial = _PyRuntimeState_INIT; + (e.g. Py_Finalize() followed by Py_Initialize(). + Note that we initialize "initial" relative to _PyRuntime, + to ensure pre-initialized pointers point to the active + runtime state (and not "initial"). */ +static const _PyRuntimeState initial = _PyRuntimeState_INIT(_PyRuntime); _Py_COMP_DIAG_POP static int @@ -134,8 +137,8 @@ init_runtime(_PyRuntimeState *runtime, // Set it to the ID of the main thread of the main interpreter. runtime->main_thread = PyThread_get_thread_ident(); - runtime->unicode_ids.next_index = unicode_next_index; - runtime->unicode_ids.lock = unicode_ids_mutex; + runtime->unicode_state.ids.next_index = unicode_next_index; + runtime->unicode_state.ids.lock = unicode_ids_mutex; runtime->_initialized = 1; } @@ -151,7 +154,7 @@ _PyRuntimeState_Init(_PyRuntimeState *runtime) _Py_AuditHookEntry *audit_hook_head = runtime->audit_hook_head; // bpo-42882: Preserve next_index value if Py_Initialize()/Py_Finalize() // is called multiple times. - Py_ssize_t unicode_next_index = runtime->unicode_ids.next_index; + Py_ssize_t unicode_next_index = runtime->unicode_state.ids.next_index; PyThread_type_lock lock1, lock2, lock3, lock4; if (alloc_for_runtime(&lock1, &lock2, &lock3, &lock4) != 0) { @@ -183,7 +186,7 @@ _PyRuntimeState_Fini(_PyRuntimeState *runtime) FREE_LOCK(runtime->interpreters.mutex); FREE_LOCK(runtime->xidregistry.mutex); - FREE_LOCK(runtime->unicode_ids.lock); + FREE_LOCK(runtime->unicode_state.ids.lock); FREE_LOCK(runtime->getargs.mutex); #undef FREE_LOCK @@ -206,7 +209,7 @@ _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime) int reinit_interp = _PyThread_at_fork_reinit(&runtime->interpreters.mutex); int reinit_xidregistry = _PyThread_at_fork_reinit(&runtime->xidregistry.mutex); - int reinit_unicode_ids = _PyThread_at_fork_reinit(&runtime->unicode_ids.lock); + int reinit_unicode_ids = _PyThread_at_fork_reinit(&runtime->unicode_state.ids.lock); int reinit_getargs = _PyThread_at_fork_reinit(&runtime->getargs.mutex); PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); @@ -272,7 +275,9 @@ alloc_interpreter(void) static void free_interpreter(PyInterpreterState *interp) { - if (!interp->_static) { + // The main interpreter is statically allocated so + // should not be freed. + if (interp != &_PyRuntime._main_interpreter) { PyMem_RawFree(interp); } } @@ -450,6 +455,16 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) PyDict_Clear(interp->builtins); Py_CLEAR(interp->sysdict); Py_CLEAR(interp->builtins); + Py_CLEAR(interp->interpreter_trampoline); + + for (int i=0; i < DICT_MAX_WATCHERS; i++) { + interp->dict_state.watchers[i] = NULL; + } + + for (int i=0; i < FUNC_MAX_WATCHERS; i++) { + interp->func_watchers[i] = NULL; + } + interp->active_func_watchers = 0; // XXX Once we have one allocator per interpreter (i.e. // per-interpreter GC) we must ensure that all of the interpreter's @@ -750,7 +765,9 @@ alloc_threadstate(void) static void free_threadstate(PyThreadState *tstate) { - if (!tstate->_static) { + // The initial thread state of the interpreter is allocated + // as part of the interpreter state so should not be freed. + if (tstate != &tstate->interp->_initial_thread) { PyMem_RawFree(tstate); } } @@ -861,7 +878,9 @@ PyThreadState * PyThreadState_New(PyInterpreterState *interp) { PyThreadState *tstate = new_threadstate(interp); - _PyThreadState_SetCurrent(tstate); + if (tstate) { + _PyThreadState_SetCurrent(tstate); + } return tstate; } @@ -932,9 +951,8 @@ _PyState_AddModule(PyThreadState *tstate, PyObject* module, PyModuleDef* def) } } - Py_INCREF(module); return PyList_SetItem(interp->modules_by_index, - def->m_base.m_index, module); + def->m_base.m_index, Py_NewRef(module)); } int @@ -982,8 +1000,7 @@ PyState_RemoveModule(PyModuleDef* def) Py_FatalError("Module index out of bounds."); } - Py_INCREF(Py_None); - return PyList_SetItem(interp->modules_by_index, index, Py_None); + return PyList_SetItem(interp->modules_by_index, index, Py_NewRef(Py_None)); } // Used by finalize_modules() @@ -1287,8 +1304,7 @@ PyThreadState_GetFrame(PyThreadState *tstate) if (frame == NULL) { PyErr_Clear(); } - Py_XINCREF(frame); - return frame; + return (PyFrameObject*)Py_XNewRef(frame); } @@ -1334,8 +1350,7 @@ PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) * the decref. */ PyObject *old_exc = tstate->async_exc; - Py_XINCREF(exc); - tstate->async_exc = exc; + tstate->async_exc = Py_XNewRef(exc); HEAD_UNLOCK(runtime); Py_XDECREF(old_exc); @@ -1537,16 +1552,16 @@ _PyGILState_Init(_PyRuntimeState *runtime) PyStatus _PyGILState_SetTstate(PyThreadState *tstate) { + /* must init with valid states */ + assert(tstate != NULL); + assert(tstate->interp != NULL); + if (!_Py_IsMainInterpreter(tstate->interp)) { /* Currently, PyGILState is shared by all interpreters. The main * interpreter is responsible to initialize it. */ return _PyStatus_OK(); } - /* must init with valid states */ - assert(tstate != NULL); - assert(tstate->interp != NULL); - struct _gilstate_runtime_state *gilstate = &tstate->interp->runtime->gilstate; gilstate->autoInterpreterState = tstate->interp; @@ -2015,8 +2030,7 @@ _bytes_shared(PyObject *obj, _PyCrossInterpreterData *data) return -1; } data->data = (void *)shared; - Py_INCREF(obj); - data->obj = obj; // Will be "released" (decref'ed) when data released. + data->obj = Py_NewRef(obj); // Will be "released" (decref'ed) when data released. data->new_object = _new_bytes_object; data->free = PyMem_Free; return 0; @@ -2043,8 +2057,7 @@ _str_shared(PyObject *obj, _PyCrossInterpreterData *data) shared->buffer = PyUnicode_DATA(obj); shared->len = PyUnicode_GET_LENGTH(obj); data->data = (void *)shared; - Py_INCREF(obj); - data->obj = obj; // Will be "released" (decref'ed) when data released. + data->obj = Py_NewRef(obj); // Will be "released" (decref'ed) when data released. data->new_object = _new_str_object; data->free = PyMem_Free; return 0; @@ -2081,8 +2094,7 @@ static PyObject * _new_none_object(_PyCrossInterpreterData *data) { // XXX Singleton refcounts are problematic across interpreters... - Py_INCREF(Py_None); - return Py_None; + return Py_NewRef(Py_None); } static int @@ -2173,6 +2185,14 @@ _Py_GetConfig(void) return _PyInterpreterState_GetConfig(tstate->interp); } + +int +_PyInterpreterState_HasFeature(PyInterpreterState *interp, unsigned long feature) +{ + return ((interp->feature_flags & feature) != 0); +} + + #define MINIMUM_OVERHEAD 1000 static PyObject ** diff --git a/Python/pythonrun.c b/Python/pythonrun.c index acb1330b85fb75..35292b6478a833 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -515,8 +515,7 @@ parse_syntax_error(PyObject *err, PyObject **message, PyObject **filename, if (v == Py_None) { Py_DECREF(v); _Py_DECLARE_STR(anon_string, ""); - *filename = &_Py_STR(anon_string); - Py_INCREF(*filename); + *filename = Py_NewRef(&_Py_STR(anon_string)); } else { *filename = v; @@ -719,8 +718,7 @@ _Py_HandleSystemExit(int *exitcode_p) /* The error code should be in the `code' attribute. */ PyObject *code = PyObject_GetAttr(value, &_Py_ID(code)); if (code) { - Py_DECREF(value); - value = code; + Py_SETREF(value, code); if (value == Py_None) goto done; } @@ -786,8 +784,7 @@ _PyErr_PrintEx(PyThreadState *tstate, int set_sys_last_vars) _PyErr_NormalizeException(tstate, &exception, &v, &tb); if (tb == NULL) { - tb = Py_None; - Py_INCREF(tb); + tb = Py_NewRef(Py_None); } PyException_SetTraceback(v, tb); if (exception == NULL) { @@ -833,12 +830,10 @@ _PyErr_PrintEx(PyThreadState *tstate, int set_sys_last_vars) to be NULL. However PyErr_Display() can't tolerate NULLs, so just be safe. */ if (exception2 == NULL) { - exception2 = Py_None; - Py_INCREF(exception2); + exception2 = Py_NewRef(Py_None); } if (v2 == NULL) { - v2 = Py_None; - Py_INCREF(v2); + v2 = Py_NewRef(Py_None); } fflush(stdout); PySys_WriteStderr("Error in sys.excepthook:\n"); @@ -1107,16 +1102,9 @@ print_exception_suggestions(struct exception_print_context *ctx, PyObject *f = ctx->file; PyObject *suggestions = _Py_Offer_Suggestions(value); if (suggestions) { - // Add a trailer ". Did you mean: (...)?" - if (PyFile_WriteString(". Did you mean: '", f) < 0) { - goto error; - } if (PyFile_WriteObject(suggestions, f, Py_PRINT_RAW) < 0) { goto error; } - if (PyFile_WriteString("'?", f) < 0) { - goto error; - } Py_DECREF(suggestions); } else if (PyErr_Occurred()) { @@ -1698,7 +1686,8 @@ run_eval_code_obj(PyThreadState *tstate, PyCodeObject *co, PyObject *globals, Py * uncaught exception to trigger an unexplained signal exit from a future * Py_Main() based one. */ - _Py_UnhandledKeyboardInterrupt = 0; + // XXX Isn't this dealt with by the move to _PyRuntimeState? + _PyRuntime.signals.unhandled_keyboard_interrupt = 0; /* Set globals['__builtins__'] if it doesn't exist */ if (globals != NULL && _PyDict_GetItemStringWithError(globals, "__builtins__") == NULL) { @@ -1712,7 +1701,7 @@ run_eval_code_obj(PyThreadState *tstate, PyCodeObject *co, PyObject *globals, Py v = PyEval_EvalCode((PyObject*)co, globals, locals); if (!v && _PyErr_Occurred(tstate) == PyExc_KeyboardInterrupt) { - _Py_UnhandledKeyboardInterrupt = 1; + _PyRuntime.signals.unhandled_keyboard_interrupt = 1; } return v; } diff --git a/Python/specialize.c b/Python/specialize.c index b7c321e4878b98..cd09b188b7fa97 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -16,22 +16,6 @@ * ./adaptive.md */ -/* Map from opcode to adaptive opcode. - Values of zero are ignored. */ -uint8_t _PyOpcode_Adaptive[256] = { - [LOAD_ATTR] = LOAD_ATTR_ADAPTIVE, - [LOAD_GLOBAL] = LOAD_GLOBAL_ADAPTIVE, - [BINARY_SUBSCR] = BINARY_SUBSCR_ADAPTIVE, - [STORE_SUBSCR] = STORE_SUBSCR_ADAPTIVE, - [CALL] = CALL_ADAPTIVE, - [STORE_ATTR] = STORE_ATTR_ADAPTIVE, - [BINARY_OP] = BINARY_OP_ADAPTIVE, - [COMPARE_OP] = COMPARE_OP_ADAPTIVE, - [UNPACK_SEQUENCE] = UNPACK_SEQUENCE_ADAPTIVE, - [FOR_ITER] = FOR_ITER_ADAPTIVE, -}; - -Py_ssize_t _Py_QuickenedCount = 0; #ifdef Py_STATS PyStats _py_stats_struct = { 0 }; PyStats *_py_stats = &_py_stats_struct; @@ -144,7 +128,7 @@ print_spec_stats(FILE *out, OpcodeStats *stats) fprintf(out, "opcode[%d].specializable : 1\n", BINARY_SLICE); fprintf(out, "opcode[%d].specializable : 1\n", STORE_SLICE); for (int i = 0; i < 256; i++) { - if (_PyOpcode_Adaptive[i]) { + if (_PyOpcode_Caches[i]) { fprintf(out, "opcode[%d].specializable : 1\n", i); } PRINT_STAT(i, specialization.success); @@ -276,82 +260,42 @@ do { \ #define SPECIALIZATION_FAIL(opcode, kind) ((void)0) #endif -// Insert adaptive instructions and superinstructions. This cannot fail. +// Initialize warmup counters and insert superinstructions. This cannot fail. void _PyCode_Quicken(PyCodeObject *code) { - _Py_QuickenedCount++; - int previous_opcode = -1; + int previous_opcode = 0; _Py_CODEUNIT *instructions = _PyCode_CODE(code); for (int i = 0; i < Py_SIZE(code); i++) { - int opcode = _Py_OPCODE(instructions[i]); - uint8_t adaptive_opcode = _PyOpcode_Adaptive[opcode]; - if (adaptive_opcode) { - _Py_SET_OPCODE(instructions[i], adaptive_opcode); - // Make sure the adaptive counter is zero: - assert(instructions[i + 1] == 0); - previous_opcode = -1; - i += _PyOpcode_Caches[opcode]; - } - else { - assert(!_PyOpcode_Caches[opcode]); - switch (opcode) { - case EXTENDED_ARG: - _Py_SET_OPCODE(instructions[i], EXTENDED_ARG_QUICK); - break; - case JUMP_BACKWARD: - _Py_SET_OPCODE(instructions[i], JUMP_BACKWARD_QUICK); - break; - case RESUME: - _Py_SET_OPCODE(instructions[i], RESUME_QUICK); - break; - case LOAD_FAST: - switch(previous_opcode) { - case LOAD_FAST: - _Py_SET_OPCODE(instructions[i - 1], - LOAD_FAST__LOAD_FAST); - break; - case STORE_FAST: - _Py_SET_OPCODE(instructions[i - 1], - STORE_FAST__LOAD_FAST); - break; - case LOAD_CONST: - _Py_SET_OPCODE(instructions[i - 1], - LOAD_CONST__LOAD_FAST); - break; - } - break; - case STORE_FAST: - if (previous_opcode == STORE_FAST) { - _Py_SET_OPCODE(instructions[i - 1], - STORE_FAST__STORE_FAST); - } - break; - case LOAD_CONST: - if (previous_opcode == LOAD_FAST) { - _Py_SET_OPCODE(instructions[i - 1], - LOAD_FAST__LOAD_CONST); - } - break; - } - previous_opcode = opcode; + int opcode = _PyOpcode_Deopt[_Py_OPCODE(instructions[i])]; + int caches = _PyOpcode_Caches[opcode]; + if (caches) { + instructions[i + 1] = adaptive_counter_warmup(); + previous_opcode = 0; + i += caches; + continue; + } + switch (previous_opcode << 8 | opcode) { + case LOAD_CONST << 8 | LOAD_FAST: + _Py_SET_OPCODE(instructions[i - 1], LOAD_CONST__LOAD_FAST); + break; + case LOAD_FAST << 8 | LOAD_CONST: + _Py_SET_OPCODE(instructions[i - 1], LOAD_FAST__LOAD_CONST); + break; + case LOAD_FAST << 8 | LOAD_FAST: + _Py_SET_OPCODE(instructions[i - 1], LOAD_FAST__LOAD_FAST); + break; + case STORE_FAST << 8 | LOAD_FAST: + _Py_SET_OPCODE(instructions[i - 1], STORE_FAST__LOAD_FAST); + break; + case STORE_FAST << 8 | STORE_FAST: + _Py_SET_OPCODE(instructions[i - 1], STORE_FAST__STORE_FAST); + break; } + previous_opcode = opcode; } } -static inline int -miss_counter_start(void) { - /* Starting value for the counter. - * This value needs to be not too low, otherwise - * it would cause excessive de-optimization. - * Neither should it be too high, or that would delay - * de-optimization excessively when it is needed. - * A value around 50 seems to work, and we choose a - * prime number to avoid artifacts. - */ - return 53; -} - #define SIMPLE_FUNCTION 0 /* Common */ @@ -719,32 +663,33 @@ static int specialize_attr_loadmethod(PyObject* owner, _Py_CODEUNIT* instr, PyOb PyObject* descr, DescriptorClassification kind); static int specialize_class_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name); -int +void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) { assert(_PyOpcode_Caches[LOAD_ATTR] == INLINE_CACHE_ENTRIES_LOAD_ATTR); _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); + PyTypeObject *type = Py_TYPE(owner); + if (!_PyType_IsReady(type)) { + // We *might* not really need this check, but we inherited it from + // PyObject_GenericGetAttr and friends... and this way we still do the + // right thing if someone forgets to call PyType_Ready(type): + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER); + goto fail; + } if (PyModule_CheckExact(owner)) { - int err = specialize_module_load_attr(owner, instr, name, LOAD_ATTR, - LOAD_ATTR_MODULE); - if (err) { + if (specialize_module_load_attr(owner, instr, name, LOAD_ATTR, + LOAD_ATTR_MODULE)) + { goto fail; } goto success; } if (PyType_Check(owner)) { - int err = specialize_class_load_attr(owner, instr, name); - if (err) { + if (specialize_class_load_attr(owner, instr, name)) { goto fail; } goto success; } - PyTypeObject *type = Py_TYPE(owner); - if (type->tp_dict == NULL) { - if (PyType_Ready(type) < 0) { - return -1; - } - } PyObject *descr = NULL; DescriptorClassification kind = analyze_descriptor(type, name, &descr, 0); assert(descr != NULL || kind == ABSENT || kind == GETSET_OVERRIDDEN); @@ -796,6 +741,10 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) PyMemberDescrObject *member = (PyMemberDescrObject *)descr; struct PyMemberDef *dmem = member->d_member; Py_ssize_t offset = dmem->offset; + if (!PyObject_TypeCheck(owner, member->d_common.d_type)) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_EXPECTED_ERROR); + goto fail; + } if (dmem->flags & PY_AUDIT_READ) { SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_AUDITED_SLOT); goto fail; @@ -855,34 +804,36 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) case ABSENT: break; } - int err = specialize_dict_access( - owner, instr, type, kind, name, - LOAD_ATTR, LOAD_ATTR_INSTANCE_VALUE, LOAD_ATTR_WITH_HINT - ); - if (err < 0) { - return -1; - } - if (err) { + if (specialize_dict_access(owner, instr, type, kind, name, LOAD_ATTR, + LOAD_ATTR_INSTANCE_VALUE, LOAD_ATTR_WITH_HINT)) + { goto success; } fail: STAT_INC(LOAD_ATTR, failure); assert(!PyErr_Occurred()); + _Py_SET_OPCODE(*instr, LOAD_ATTR); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(LOAD_ATTR, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } -int +void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) { assert(_PyOpcode_Caches[STORE_ATTR] == INLINE_CACHE_ENTRIES_STORE_ATTR); _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); PyTypeObject *type = Py_TYPE(owner); + if (!_PyType_IsReady(type)) { + // We *might* not really need this check, but we inherited it from + // PyObject_GenericSetAttr and friends... and this way we still do the + // right thing if someone forgets to call PyType_Ready(type): + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER); + goto fail; + } if (PyModule_CheckExact(owner)) { SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_OVERRIDDEN); goto fail; @@ -904,6 +855,10 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) PyMemberDescrObject *member = (PyMemberDescrObject *)descr; struct PyMemberDef *dmem = member->d_member; Py_ssize_t offset = dmem->offset; + if (!PyObject_TypeCheck(owner, member->d_common.d_type)) { + SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_EXPECTED_ERROR); + goto fail; + } if (dmem->flags & READONLY) { SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_ATTR_READ_ONLY); goto fail; @@ -937,27 +892,21 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) case ABSENT: break; } - - int err = specialize_dict_access( - owner, instr, type, kind, name, - STORE_ATTR, STORE_ATTR_INSTANCE_VALUE, STORE_ATTR_WITH_HINT - ); - if (err < 0) { - return -1; - } - if (err) { + if (specialize_dict_access(owner, instr, type, kind, name, STORE_ATTR, + STORE_ATTR_INSTANCE_VALUE, STORE_ATTR_WITH_HINT)) + { goto success; } fail: STAT_INC(STORE_ATTR, failure); assert(!PyErr_Occurred()); + _Py_SET_OPCODE(*instr, STORE_ATTR); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(STORE_ATTR, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } @@ -1136,7 +1085,7 @@ PyObject *descr, DescriptorClassification kind) return 0; } -int +void _Py_Specialize_LoadGlobal( PyObject *globals, PyObject *builtins, _Py_CODEUNIT *instr, PyObject *name) @@ -1209,13 +1158,13 @@ _Py_Specialize_LoadGlobal( fail: STAT_INC(LOAD_GLOBAL, failure); assert(!PyErr_Occurred()); + _Py_SET_OPCODE(*instr, LOAD_GLOBAL); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(LOAD_GLOBAL, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } #ifdef Py_STATS @@ -1303,7 +1252,7 @@ function_get_version(PyObject *o, int opcode) return version; } -int +void _Py_Specialize_BinarySubscr( PyObject *container, PyObject *sub, _Py_CODEUNIT *instr) { @@ -1369,16 +1318,16 @@ _Py_Specialize_BinarySubscr( fail: STAT_INC(BINARY_SUBSCR, failure); assert(!PyErr_Occurred()); + _Py_SET_OPCODE(*instr, BINARY_SUBSCR); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(BINARY_SUBSCR, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } -int +void _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *instr) { _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)(instr + 1); @@ -1473,20 +1422,19 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins fail: STAT_INC(STORE_SUBSCR, failure); assert(!PyErr_Occurred()); + _Py_SET_OPCODE(*instr, STORE_SUBSCR); cache->counter = adaptive_counter_backoff(cache->counter); - return 0; + return; success: STAT_INC(STORE_SUBSCR, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); - return 0; + cache->counter = adaptive_counter_cooldown(); } static int specialize_class_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames) { - assert(_Py_OPCODE(*instr) == CALL_ADAPTIVE); PyTypeObject *tp = _PyType_CAST(callable); if (tp->tp_new == PyBaseObject_Type.tp_new) { SPECIALIZATION_FAIL(CALL, SPEC_FAIL_CALL_PYTHON_CLASS); @@ -1548,7 +1496,6 @@ static int specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames) { - assert(_Py_OPCODE(*instr) == CALL_ADAPTIVE); if (kwnames) { SPECIALIZATION_FAIL(CALL, SPEC_FAIL_CALL_KWNAMES); return -1; @@ -1600,7 +1547,6 @@ specialize_py_call(PyFunctionObject *func, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames, bool bound_method) { _PyCallCache *cache = (_PyCallCache *)(instr + 1); - assert(_Py_OPCODE(*instr) == CALL_ADAPTIVE); PyCodeObject *code = (PyCodeObject *)func->func_code; int kind = function_kind(code); /* Don't specialize if PEP 523 is active */ @@ -1655,7 +1601,6 @@ static int specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames) { - assert(_Py_OPCODE(*instr) == CALL_ADAPTIVE); if (PyCFunction_GET_FUNCTION(callable) == NULL) { return 1; } @@ -1752,7 +1697,7 @@ call_fail_kind(PyObject *callable) /* TODO: - Specialize calling classes. */ -int +void _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames) { @@ -1790,14 +1735,14 @@ _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, if (fail) { STAT_INC(CALL, failure); assert(!PyErr_Occurred()); + _Py_SET_OPCODE(*instr, CALL); cache->counter = adaptive_counter_backoff(cache->counter); } else { STAT_INC(CALL, success); assert(!PyErr_Occurred()); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } - return 0; } #ifdef Py_STATS @@ -1930,24 +1875,15 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, goto success; } break; -#ifndef Py_STATS - default: - // These operators don't have any available specializations. Rather - // than repeatedly attempting to specialize them, just convert them - // back to BINARY_OP (unless we're collecting stats, where it's more - // important to get accurate hit counts for the unadaptive version - // and each of the different failure types): - _Py_SET_OPCODE(*instr, BINARY_OP); - return; -#endif } SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs)); STAT_INC(BINARY_OP, failure); + _Py_SET_OPCODE(*instr, BINARY_OP); cache->counter = adaptive_counter_backoff(cache->counter); return; success: STAT_INC(BINARY_OP, success); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } @@ -2006,23 +1942,13 @@ _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, assert(_PyOpcode_Caches[COMPARE_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP); _PyCompareOpCache *cache = (_PyCompareOpCache *)(instr + 1); int next_opcode = _Py_OPCODE(instr[INLINE_CACHE_ENTRIES_COMPARE_OP + 1]); - if (next_opcode != POP_JUMP_IF_FALSE && - next_opcode != POP_JUMP_IF_TRUE) { - // Can't ever combine, so don't don't bother being adaptive (unless - // we're collecting stats, where it's more important to get accurate hit - // counts for the unadaptive version and each of the different failure - // types): -#ifndef Py_STATS - _Py_SET_OPCODE(*instr, COMPARE_OP); - return; -#else + if (next_opcode != POP_JUMP_IF_FALSE && next_opcode != POP_JUMP_IF_TRUE) { if (next_opcode == EXTENDED_ARG) { SPECIALIZATION_FAIL(COMPARE_OP, SPEC_FAIL_COMPARE_OP_EXTENDED_ARG); goto failure; } SPECIALIZATION_FAIL(COMPARE_OP, SPEC_FAIL_COMPARE_OP_NOT_FOLLOWED_BY_COND_JUMP); goto failure; -#endif } assert(oparg <= Py_GE); int when_to_jump_mask = compare_masks[oparg]; @@ -2063,11 +1989,12 @@ _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, SPECIALIZATION_FAIL(COMPARE_OP, compare_op_fail_kind(lhs, rhs)); failure: STAT_INC(COMPARE_OP, failure); + _Py_SET_OPCODE(*instr, COMPARE_OP); cache->counter = adaptive_counter_backoff(cache->counter); return; success: STAT_INC(COMPARE_OP, success); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } #ifdef Py_STATS @@ -2113,11 +2040,12 @@ _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg) SPECIALIZATION_FAIL(UNPACK_SEQUENCE, unpack_sequence_fail_kind(seq)); failure: STAT_INC(UNPACK_SEQUENCE, failure); + _Py_SET_OPCODE(*instr, UNPACK_SEQUENCE); cache->counter = adaptive_counter_backoff(cache->counter); return; success: STAT_INC(UNPACK_SEQUENCE, success); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } #ifdef Py_STATS @@ -2193,7 +2121,7 @@ int #endif void -_Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr) +_Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg) { assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER); _PyForIterCache *cache = (_PyForIterCache *)(instr + 1); @@ -2208,16 +2136,18 @@ _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr) _Py_SET_OPCODE(*instr, FOR_ITER_RANGE); goto success; } - else { - SPECIALIZATION_FAIL(FOR_ITER, - _PySpecialization_ClassifyIterator(iter)); - goto failure; + else if (tp == &PyGen_Type && oparg <= SHRT_MAX) { + assert(_Py_OPCODE(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1]) == END_FOR); + _Py_SET_OPCODE(*instr, FOR_ITER_GEN); + goto success; } -failure: + SPECIALIZATION_FAIL(FOR_ITER, + _PySpecialization_ClassifyIterator(iter)); STAT_INC(FOR_ITER, failure); + _Py_SET_OPCODE(*instr, FOR_ITER); cache->counter = adaptive_counter_backoff(cache->counter); return; success: STAT_INC(FOR_ITER, success); - cache->counter = miss_counter_start(); + cache->counter = adaptive_counter_cooldown(); } diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h index b28156608d1b80..4e7dfb14d19dec 100644 --- a/Python/stdlib_module_names.h +++ b/Python/stdlib_module_names.h @@ -1,4 +1,4 @@ -// Auto-generated by Tools/scripts/generate_stdlib_module_names.py. +// Auto-generated by Tools/build/generate_stdlib_module_names.py. // List used to create sys.stdlib_module_names. static const char* _Py_stdlib_module_names[] = { @@ -58,6 +58,7 @@ static const char* _Py_stdlib_module_names[] = { "_py_abc", "_pydecimal", "_pyio", +"_pylong", "_queue", "_random", "_scproxy", @@ -95,9 +96,7 @@ static const char* _Py_stdlib_module_names[] = { "argparse", "array", "ast", -"asynchat", "asyncio", -"asyncore", "atexit", "audioop", "base64", @@ -135,7 +134,6 @@ static const char* _Py_stdlib_module_names[] = { "decimal", "difflib", "dis", -"distutils", "doctest", "email", "encodings", diff --git a/Python/structmember.c b/Python/structmember.c index c7e318811d82b8..1b8be28dcf2eb2 100644 --- a/Python/structmember.c +++ b/Python/structmember.c @@ -49,8 +49,7 @@ PyMember_GetOne(const char *obj_addr, PyMemberDef *l) break; case T_STRING: if (*(char**)addr == NULL) { - Py_INCREF(Py_None); - v = Py_None; + v = Py_NewRef(Py_None); } else v = PyUnicode_FromString(*(char**)addr); @@ -75,7 +74,7 @@ PyMember_GetOne(const char *obj_addr, PyMemberDef *l) PyErr_Format(PyExc_AttributeError, "'%.200s' object has no attribute '%s'", tp->tp_name, l->name); - } + } Py_XINCREF(v); break; case T_LONGLONG: @@ -85,8 +84,7 @@ PyMember_GetOne(const char *obj_addr, PyMemberDef *l) v = PyLong_FromUnsignedLongLong(*(unsigned long long *)addr); break; case T_NONE: - v = Py_None; - Py_INCREF(v); + v = Py_NewRef(Py_None); break; default: PyErr_SetString(PyExc_SystemError, "bad memberdescr type"); @@ -247,9 +245,8 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) break; case T_OBJECT: case T_OBJECT_EX: - Py_XINCREF(v); oldv = *(PyObject **)addr; - *(PyObject **)addr = v; + *(PyObject **)addr = Py_XNewRef(v); Py_XDECREF(oldv); break; case T_CHAR: { diff --git a/Python/suggestions.c b/Python/suggestions.c index c336ec8ffffc9c..ad645c7d96fa57 100644 --- a/Python/suggestions.c +++ b/Python/suggestions.c @@ -1,8 +1,11 @@ #include "Python.h" #include "pycore_frame.h" +#include "pycore_runtime.h" // _PyRuntime +#include "pycore_global_objects.h" // _Py_ID() #include "pycore_pyerrors.h" #include "pycore_code.h" // _PyCode_GetVarnames() +#include "stdlib_module_names.h" // _Py_stdlib_module_names #define MAX_CANDIDATE_ITEMS 750 #define MAX_STRING_SIZE 40 @@ -170,12 +173,11 @@ calculate_suggestions(PyObject *dir, suggestion_distance = current_distance; } } - Py_XINCREF(suggestion); - return suggestion; + return Py_XNewRef(suggestion); } static PyObject * -offer_suggestions_for_attribute_error(PyAttributeErrorObject *exc) +get_suggestions_for_attribute_error(PyAttributeErrorObject *exc) { PyObject *name = exc->name; // borrowed reference PyObject *obj = exc->obj; // borrowed reference @@ -195,35 +197,25 @@ offer_suggestions_for_attribute_error(PyAttributeErrorObject *exc) return suggestions; } - static PyObject * -offer_suggestions_for_name_error(PyNameErrorObject *exc) +offer_suggestions_for_attribute_error(PyAttributeErrorObject *exc) { - PyObject *name = exc->name; // borrowed reference - PyTracebackObject *traceback = (PyTracebackObject *) exc->traceback; // borrowed reference - // Abort if we don't have a variable name or we have an invalid one - // or if we don't have a traceback to work with - if (name == NULL || !PyUnicode_CheckExact(name) || - traceback == NULL || !Py_IS_TYPE(traceback, &PyTraceBack_Type) - ) { + PyObject* suggestion = get_suggestions_for_attribute_error(exc); + if (suggestion == NULL) { return NULL; } + // Add a trailer ". Did you mean: (...)?" + PyObject* result = PyUnicode_FromFormat(". Did you mean: %R?", suggestion); + Py_DECREF(suggestion); + return result; +} - // Move to the traceback of the exception - while (1) { - PyTracebackObject *next = traceback->tb_next; - if (next == NULL || !Py_IS_TYPE(next, &PyTraceBack_Type)) { - break; - } - else { - traceback = next; - } - } - - PyFrameObject *frame = traceback->tb_frame; - assert(frame != NULL); +static PyObject * +get_suggestions_for_name_error(PyObject* name, PyFrameObject* frame) +{ PyCodeObject *code = PyFrame_GetCode(frame); assert(code != NULL && code->co_localsplusnames != NULL); + PyObject *varnames = _PyCode_GetVarnames(code); if (varnames == NULL) { return NULL; @@ -235,6 +227,24 @@ offer_suggestions_for_name_error(PyNameErrorObject *exc) return NULL; } + // Are we inside a method and the instance has an attribute called 'name'? + if (PySequence_Contains(dir, &_Py_ID(self)) > 0) { + PyObject* locals = PyFrame_GetLocals(frame); + if (!locals) { + goto error; + } + PyObject* self = PyDict_GetItem(locals, &_Py_ID(self)); /* borrowed */ + Py_DECREF(locals); + if (!self) { + goto error; + } + + if (PyObject_HasAttr(self, name)) { + Py_DECREF(dir); + return PyUnicode_FromFormat("self.%S", name); + } + } + PyObject *suggestions = calculate_suggestions(dir, name); Py_DECREF(dir); if (suggestions != NULL) { @@ -259,6 +269,102 @@ offer_suggestions_for_name_error(PyNameErrorObject *exc) Py_DECREF(dir); return suggestions; + +error: + Py_DECREF(dir); + return NULL; +} + +static bool +is_name_stdlib_module(PyObject* name) +{ + const char* the_name = PyUnicode_AsUTF8(name); + Py_ssize_t len = Py_ARRAY_LENGTH(_Py_stdlib_module_names); + for (Py_ssize_t i = 0; i < len; i++) { + if (strcmp(the_name, _Py_stdlib_module_names[i]) == 0) { + return 1; + } + } + return 0; +} + +static PyObject * +offer_suggestions_for_name_error(PyNameErrorObject *exc) +{ + PyObject *name = exc->name; // borrowed reference + PyTracebackObject *traceback = (PyTracebackObject *) exc->traceback; // borrowed reference + // Abort if we don't have a variable name or we have an invalid one + // or if we don't have a traceback to work with + if (name == NULL || !PyUnicode_CheckExact(name) || + traceback == NULL || !Py_IS_TYPE(traceback, &PyTraceBack_Type) + ) { + return NULL; + } + + // Move to the traceback of the exception + while (1) { + PyTracebackObject *next = traceback->tb_next; + if (next == NULL || !Py_IS_TYPE(next, &PyTraceBack_Type)) { + break; + } + else { + traceback = next; + } + } + + PyFrameObject *frame = traceback->tb_frame; + assert(frame != NULL); + + PyObject* suggestion = get_suggestions_for_name_error(name, frame); + bool is_stdlib_module = is_name_stdlib_module(name); + + if (suggestion == NULL && !is_stdlib_module) { + return NULL; + } + + // Add a trailer ". Did you mean: (...)?" + PyObject* result = NULL; + if (!is_stdlib_module) { + result = PyUnicode_FromFormat(". Did you mean: %R?", suggestion); + } else if (suggestion == NULL) { + result = PyUnicode_FromFormat(". Did you forget to import %R?", name); + } else { + result = PyUnicode_FromFormat(". Did you mean: %R? Or did you forget to import %R?", suggestion, name); + } + Py_XDECREF(suggestion); + return result; +} + +static PyObject * +offer_suggestions_for_import_error(PyImportErrorObject *exc) +{ + PyObject *mod_name = exc->name; // borrowed reference + PyObject *name = exc->name_from; // borrowed reference + if (name == NULL || mod_name == NULL || name == Py_None || + !PyUnicode_CheckExact(name) || !PyUnicode_CheckExact(mod_name)) { + return NULL; + } + + PyObject* mod = PyImport_GetModule(mod_name); + if (mod == NULL) { + return NULL; + } + + PyObject *dir = PyObject_Dir(mod); + Py_DECREF(mod); + if (dir == NULL) { + return NULL; + } + + PyObject *suggestion = calculate_suggestions(dir, name); + Py_DECREF(dir); + if (!suggestion) { + return NULL; + } + + PyObject* result = PyUnicode_FromFormat(". Did you mean: %R?", suggestion); + Py_DECREF(suggestion); + return result; } // Offer suggestions for a given exception. Returns a python string object containing the @@ -273,6 +379,8 @@ _Py_Offer_Suggestions(PyObject *exception) result = offer_suggestions_for_attribute_error((PyAttributeErrorObject *) exception); } else if (Py_IS_TYPE(exception, (PyTypeObject*)PyExc_NameError)) { result = offer_suggestions_for_name_error((PyNameErrorObject *) exception); + } else if (Py_IS_TYPE(exception, (PyTypeObject*)PyExc_ImportError)) { + result = offer_suggestions_for_import_error((PyImportErrorObject *) exception); } return result; } diff --git a/Python/symtable.c b/Python/symtable.c index 342f5a080d3ddc..fb2bb7d83835de 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -74,8 +74,7 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block, ste->ste_table = st; ste->ste_id = k; /* ste owns reference to k */ - Py_INCREF(name); - ste->ste_name = name; + ste->ste_name = Py_NewRef(name); ste->ste_symbols = NULL; ste->ste_varnames = NULL; @@ -286,8 +285,7 @@ _PySymtable_Build(mod_ty mod, PyObject *filename, PyFutureFeatures *future) _PySymtable_Free(st); return NULL; } - Py_INCREF(filename); - st->st_filename = filename; + st->st_filename = Py_NewRef(filename); st->st_future = future; /* Setup recursion depth check counters */ @@ -375,17 +373,17 @@ PySymtable_Lookup(struct symtable *st, void *key) if (k == NULL) return NULL; v = PyDict_GetItemWithError(st->st_blocks, k); + Py_DECREF(k); + if (v) { assert(PySTEntry_Check(v)); - Py_INCREF(v); } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_KeyError, "unknown symbol table entry"); } - Py_DECREF(k); - return (PySTEntryObject *)v; + return (PySTEntryObject *)Py_XNewRef(v); } long @@ -1949,8 +1947,7 @@ symtable_visit_alias(struct symtable *st, alias_ty a) return 0; } else { - store_name = name; - Py_INCREF(store_name); + store_name = Py_NewRef(name); } if (!_PyUnicode_EqualToASCIIString(name, "*")) { int r = symtable_add_def(st, store_name, DEF_IMPORT, LOCATION(a)); @@ -2144,14 +2141,13 @@ _Py_SymtableStringObjectFlags(const char *str, PyObject *filename, _PyArena_Free(arena); return NULL; } - PyFutureFeatures *future = _PyFuture_FromAST(mod, filename); - if (future == NULL) { + PyFutureFeatures future; + if (!_PyFuture_FromAST(mod, filename, &future)) { _PyArena_Free(arena); return NULL; } - future->ff_features |= flags->cf_flags; - st = _PySymtable_Build(mod, filename, future); - PyObject_Free((void *)future); + future.ff_features |= flags->cf_flags; + st = _PySymtable_Build(mod, filename, &future); _PyArena_Free(arena); return st; } diff --git a/Python/sysmodule.c b/Python/sysmodule.c index 2c66415ee3d3f4..88f806e616f27e 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -17,7 +17,6 @@ Data members: #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _PyEval_SetAsyncGenFinalizer() -#include "pycore_code.h" // _Py_QuickenedCount #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_initconfig.h" // _PyStatus_EXCEPTION() #include "pycore_long.h" // _PY_LONG_MAX_STR_DIGITS_THRESHOLD @@ -199,8 +198,7 @@ sys_audit_tstate(PyThreadState *ts, const char *event, eventArgs = _Py_VaBuildValue_SizeT(argFormat, vargs); if (eventArgs && !PyTuple_Check(eventArgs)) { PyObject *argTuple = PyTuple_Pack(1, eventArgs); - Py_DECREF(eventArgs); - eventArgs = argTuple; + Py_SETREF(eventArgs, argTuple); } } else { @@ -432,6 +430,8 @@ sys_addaudithook_impl(PyObject *module, PyObject *hook) if (interp->audit_hooks == NULL) { return NULL; } + /* Avoid having our list of hooks show up in the GC module */ + PyObject_GC_UnTrack(interp->audit_hooks); } if (PyList_Append(interp->audit_hooks, hook) < 0) { @@ -839,8 +839,7 @@ sys_getdefaultencoding_impl(PyObject *module) { _Py_DECLARE_STR(utf_8, "utf-8"); PyObject *ret = &_Py_STR(utf_8); - Py_INCREF(ret); - return ret; + return Py_NewRef(ret); } /*[clinic input] @@ -1069,8 +1068,7 @@ sys_gettrace_impl(PyObject *module) if (temp == NULL) temp = Py_None; - Py_INCREF(temp); - return temp; + return Py_NewRef(temp); } static PyObject * @@ -1143,8 +1141,7 @@ sys_getprofile_impl(PyObject *module) if (temp == NULL) temp = Py_None; - Py_INCREF(temp); - return temp; + return Py_NewRef(temp); } @@ -1369,11 +1366,8 @@ sys_get_asyncgen_hooks_impl(PyObject *module) finalizer = Py_None; } - Py_INCREF(firstiter); - PyStructSequence_SET_ITEM(res, 0, firstiter); - - Py_INCREF(finalizer); - PyStructSequence_SET_ITEM(res, 1, finalizer); + PyStructSequence_SET_ITEM(res, 0, Py_NewRef(firstiter)); + PyStructSequence_SET_ITEM(res, 1, Py_NewRef(finalizer)); return res; } @@ -1717,7 +1711,7 @@ sys_get_int_max_str_digits_impl(PyObject *module) /*[clinic end generated code: output=0042f5e8ae0e8631 input=8dab13e2023e60d5]*/ { PyInterpreterState *interp = _PyInterpreterState_GET(); - return PyLong_FromLong(interp->config.int_max_str_digits); + return PyLong_FromLong(interp->long_state.max_str_digits); } /*[clinic input] @@ -1734,7 +1728,7 @@ sys_set_int_max_str_digits_impl(PyObject *module, int maxdigits) { PyThreadState *tstate = _PyThreadState_GET(); if ((!maxdigits) || (maxdigits >= _PY_LONG_MAX_STR_DIGITS_THRESHOLD)) { - tstate->interp->config.int_max_str_digits = maxdigits; + tstate->interp->long_state.max_str_digits = maxdigits; Py_RETURN_NONE; } else { PyErr_Format( @@ -1806,8 +1800,7 @@ sys_getsizeof(PyObject *self, PyObject *args, PyObject *kwds) /* Has a default value been given */ if (dflt != NULL && _PyErr_ExceptionMatches(tstate, PyExc_TypeError)) { _PyErr_Clear(tstate); - Py_INCREF(dflt); - return dflt; + return Py_NewRef(dflt); } else return NULL; @@ -1855,17 +1848,6 @@ sys_gettotalrefcount_impl(PyObject *module) #endif /* Py_REF_DEBUG */ -/*[clinic input] -sys._getquickenedcount -> Py_ssize_t -[clinic start generated code]*/ - -static Py_ssize_t -sys__getquickenedcount_impl(PyObject *module) -/*[clinic end generated code: output=1ab259e7f91248a2 input=249d448159eca912]*/ -{ - return _Py_QuickenedCount; -} - /*[clinic input] sys.getallocatedblocks -> Py_ssize_t @@ -2127,12 +2109,12 @@ sys.activate_stack_trampoline backend: str / -Activate the perf profiler trampoline. +Activate stack profiler trampoline *backend*. [clinic start generated code]*/ static PyObject * sys_activate_stack_trampoline_impl(PyObject *module, const char *backend) -/*[clinic end generated code: output=5783cdeb51874b43 input=b09020e3a17c78c5]*/ +/*[clinic end generated code: output=5783cdeb51874b43 input=a12df928758a82b4]*/ { #ifdef PY_HAVE_PERF_TRAMPOLINE if (strcmp(backend, "perf") == 0) { @@ -2163,12 +2145,14 @@ sys_activate_stack_trampoline_impl(PyObject *module, const char *backend) /*[clinic input] sys.deactivate_stack_trampoline -Dectivate the perf profiler trampoline. +Deactivate the current stack profiler trampoline backend. + +If no stack profiler is activated, this function has no effect. [clinic start generated code]*/ static PyObject * sys_deactivate_stack_trampoline_impl(PyObject *module) -/*[clinic end generated code: output=b50da25465df0ef1 input=491f4fc1ed615736]*/ +/*[clinic end generated code: output=b50da25465df0ef1 input=9f629a6be9fe7fc8]*/ { if (_PyPerfTrampoline_Init(0) < 0) { return NULL; @@ -2179,12 +2163,12 @@ sys_deactivate_stack_trampoline_impl(PyObject *module) /*[clinic input] sys.is_stack_trampoline_active -Returns *True* if the perf profiler trampoline is active. +Return *True* if a stack profiler trampoline is active. [clinic start generated code]*/ static PyObject * sys_is_stack_trampoline_active_impl(PyObject *module) -/*[clinic end generated code: output=ab2746de0ad9d293 input=061fa5776ac9dd59]*/ +/*[clinic end generated code: output=ab2746de0ad9d293 input=29616b7bf6a0b703]*/ { #ifdef PY_HAVE_PERF_TRAMPOLINE if (_PyIsPerfTrampolineActive()) { @@ -2214,7 +2198,6 @@ static PyMethodDef sys_methods[] = { SYS_GETALLOCATEDBLOCKS_METHODDEF SYS_GETFILESYSTEMENCODING_METHODDEF SYS_GETFILESYSTEMENCODEERRORS_METHODDEF - SYS__GETQUICKENEDCOUNT_METHODDEF #ifdef Py_TRACE_REFS {"getobjects", _Py_GetObjects, METH_VARARGS}, #endif @@ -2270,8 +2253,9 @@ list_builtin_module_names(void) if (list == NULL) { return NULL; } - for (Py_ssize_t i = 0; PyImport_Inittab[i].name != NULL; i++) { - PyObject *name = PyUnicode_FromString(PyImport_Inittab[i].name); + struct _inittab *inittab = _PyRuntime.imports.inittab; + for (Py_ssize_t i = 0; inittab[i].name != NULL; i++) { + PyObject *name = PyUnicode_FromString(inittab[i].name); if (name == NULL) { goto error; } @@ -2583,8 +2567,7 @@ _PySys_AddXOptionWithError(const wchar_t *s) const wchar_t *name_end = wcschr(s, L'='); if (!name_end) { name = PyUnicode_FromWideChar(s, -1); - value = Py_True; - Py_INCREF(value); + value = Py_NewRef(Py_True); } else { name = PyUnicode_FromWideChar(s, name_end - s); @@ -3039,8 +3022,7 @@ make_emscripten_info(void) } PyStructSequence_SET_ITEM(emscripten_info, pos++, oua); } else { - Py_INCREF(Py_None); - PyStructSequence_SET_ITEM(emscripten_info, pos++, Py_None); + PyStructSequence_SET_ITEM(emscripten_info, pos++, Py_NewRef(Py_None)); } #define SetBoolItem(flag) \ @@ -3237,8 +3219,7 @@ sys_add_xoption(PyObject *opts, const wchar_t *s) const wchar_t *name_end = wcschr(s, L'='); if (!name_end) { name = PyUnicode_FromWideChar(s, -1); - value = Py_True; - Py_INCREF(value); + value = Py_NewRef(Py_True); } else { name = PyUnicode_FromWideChar(s, name_end - s); @@ -3415,8 +3396,7 @@ _PySys_Create(PyThreadState *tstate, PyObject **sysmod_p) if (sysdict == NULL) { goto error; } - Py_INCREF(sysdict); - interp->sysdict = sysdict; + interp->sysdict = Py_NewRef(sysdict); if (PyDict_SetItemString(sysdict, "modules", interp->modules) < 0) { goto error; diff --git a/Python/thread.c b/Python/thread.c index 8c8a4e81895eb6..3c1e78ed1bca83 100644 --- a/Python/thread.c +++ b/Python/thread.c @@ -207,8 +207,7 @@ PyThread_GetInfo(void) if (value == NULL) #endif { - Py_INCREF(Py_None); - value = Py_None; + value = Py_NewRef(Py_None); } PyStructSequence_SET_ITEM(threadinfo, pos++, value); return threadinfo; diff --git a/Python/traceback.c b/Python/traceback.c index de658b9103180e..da26c9b260a3bd 100644 --- a/Python/traceback.c +++ b/Python/traceback.c @@ -52,10 +52,8 @@ tb_create_raw(PyTracebackObject *next, PyFrameObject *frame, int lasti, } tb = PyObject_GC_New(PyTracebackObject, &PyTraceBack_Type); if (tb != NULL) { - Py_XINCREF(next); - tb->tb_next = next; - Py_XINCREF(frame); - tb->tb_frame = frame; + tb->tb_next = (PyTracebackObject*)Py_XNewRef(next); + tb->tb_frame = (PyFrameObject*)Py_XNewRef(frame); tb->tb_lasti = lasti; tb->tb_lineno = lineno; PyObject_GC_Track(tb); @@ -106,8 +104,7 @@ tb_next_get(PyTracebackObject *self, void *Py_UNUSED(_)) if (!ret) { ret = Py_None; } - Py_INCREF(ret); - return ret; + return Py_NewRef(ret); } static int @@ -139,10 +136,7 @@ tb_next_set(PyTracebackObject *self, PyObject *new_next, void *Py_UNUSED(_)) cursor = cursor->tb_next; } - PyObject *old_next = (PyObject*)self->tb_next; - Py_XINCREF(new_next); - self->tb_next = (PyTracebackObject *)new_next; - Py_XDECREF(old_next); + Py_XSETREF(self->tb_next, (PyTracebackObject *)Py_XNewRef(new_next)); return 0; } @@ -522,8 +516,7 @@ display_source_line_with_margin(PyObject *f, PyObject *filename, int lineno, int } if (line) { - Py_INCREF(lineobj); - *line = lineobj; + *line = Py_NewRef(lineobj); } /* remove the indentation of the line */ @@ -538,8 +531,7 @@ display_source_line_with_margin(PyObject *f, PyObject *filename, int lineno, int PyObject *truncated; truncated = PyUnicode_Substring(lineobj, i, PyUnicode_GET_LENGTH(lineobj)); if (truncated) { - Py_DECREF(lineobj); - lineobj = truncated; + Py_SETREF(lineobj, truncated); } else { PyErr_Clear(); } @@ -705,8 +697,13 @@ extract_anchors_from_line(PyObject *filename, PyObject *line, done: if (res > 0) { - *left_anchor += start_offset; - *right_anchor += start_offset; + // Normalize the AST offsets to byte offsets and adjust them with the + // start of the actual line (instead of the source code segment). + assert(segment != NULL); + assert(*left_anchor >= 0); + assert(*right_anchor >= 0); + *left_anchor = _PyPegen_byte_offset_to_character_offset(segment, *left_anchor) + start_offset; + *right_anchor = _PyPegen_byte_offset_to_character_offset(segment, *right_anchor) + start_offset; } Py_XDECREF(segment); if (arena) { @@ -1229,6 +1226,15 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header) if (frame == NULL) { break; } + if (frame->owner == FRAME_OWNED_BY_CSTACK) { + /* Trampoline frame */ + frame = frame->previous; + } + if (frame == NULL) { + break; + } + /* Can't have more than one shim frame in a row */ + assert(frame->owner != FRAME_OWNED_BY_CSTACK); depth++; } } diff --git a/README.rst b/README.rst index 4ae5dfd1bb2525..1fa019473643d9 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -This is Python version 3.12.0 alpha 0 +This is Python version 3.12.0 alpha 2 ===================================== .. image:: https://github.com/python/cpython/workflows/Tests/badge.svg @@ -65,7 +65,7 @@ Building a complete Python installation requires the use of various additional third-party libraries, depending on your build platform and configure options. Not all standard library modules are buildable or useable on all platforms. Refer to the -`Install dependencies `_ +`Install dependencies `_ section of the `Developer Guide`_ for current detailed information on dependencies for various Linux distributions and macOS. @@ -135,7 +135,7 @@ What's New We have a comprehensive overview of the changes in the `What's New in Python 3.12 `_ document. For a more detailed change log, read `Misc/NEWS -`_, but a full +`_, but a full accounting of changes can only be gleaned from the `commit history `_. @@ -189,7 +189,7 @@ your environment, you can `file a bug report `_ and include relevant output from that command to show the issue. -See `Running & Writing Tests `_ +See `Running & Writing Tests `_ for more on running tests. Installing multiple versions diff --git a/Tools/scripts/check_extension_modules.py b/Tools/build/check_extension_modules.py similarity index 100% rename from Tools/scripts/check_extension_modules.py rename to Tools/build/check_extension_modules.py diff --git a/Tools/scripts/deepfreeze.py b/Tools/build/deepfreeze.py similarity index 95% rename from Tools/scripts/deepfreeze.py rename to Tools/build/deepfreeze.py index d9c6030fc17c07..2eef649437a680 100644 --- a/Tools/scripts/deepfreeze.py +++ b/Tools/build/deepfreeze.py @@ -114,9 +114,8 @@ def __init__(self, file: TextIO) -> None: self.file = file self.cache: Dict[tuple[type, object, str], str] = {} self.hits, self.misses = 0, 0 - self.patchups: list[str] = [] - self.deallocs: list[str] = [] - self.interns: list[str] = [] + self.finis: list[str] = [] + self.inits: list[str] = [] self.write('#include "Python.h"') self.write('#include "internal/pycore_gc.h"') self.write('#include "internal/pycore_code.h"') @@ -257,7 +256,6 @@ def generate_code(self, name: str, code: types.CodeType) -> str: self.write(f".co_names = {co_names},") self.write(f".co_exceptiontable = {co_exceptiontable},") self.field(code, "co_flags") - self.write(".co_warmup = QUICKENING_INITIAL_WARMUP_VALUE,") self.write("._co_linearray_entry_size = 0,") self.field(code, "co_argcount") self.field(code, "co_posonlyargcount") @@ -276,7 +274,7 @@ def generate_code(self, name: str, code: types.CodeType) -> str: self.write(f".co_name = {co_name},") self.write(f".co_qualname = {co_qualname},") self.write(f".co_linetable = {co_linetable},") - self.write(f"._co_code = NULL,") + self.write(f"._co_cached = NULL,") self.write("._co_linearray = NULL,") self.write(f".co_code_adaptive = {co_code_adaptive},") for i, op in enumerate(code.co_code[::2]): @@ -284,8 +282,8 @@ def generate_code(self, name: str, code: types.CodeType) -> str: self.write(f"._co_firsttraceable = {i},") break name_as_code = f"(PyCodeObject *)&{name}" - self.deallocs.append(f"_PyStaticCode_Dealloc({name_as_code});") - self.interns.append(f"_PyStaticCode_InternStrings({name_as_code})") + self.finis.append(f"_PyStaticCode_Fini({name_as_code});") + self.inits.append(f"_PyStaticCode_Init({name_as_code})") return f"& {name}.ob_base.ob_base" def generate_tuple(self, name: str, t: Tuple[object, ...]) -> str: @@ -373,11 +371,7 @@ def generate_frozenset(self, name: str, fs: FrozenSet[object]) -> str: def generate_file(self, module: str, code: object)-> None: module = module.replace(".", "_") self.generate(f"{module}_toplevel", code) - with self.block(f"static void {module}_do_patchups(void)"): - for p in self.patchups: - self.write(p) - self.patchups.clear() - self.write(EPILOGUE.replace("%%NAME%%", module)) + self.write(EPILOGUE.format(name=module)) def generate(self, name: str, obj: object) -> str: # Use repr() in the key to distinguish -0.0 from +0.0 @@ -421,11 +415,10 @@ def generate(self, name: str, obj: object) -> str: EPILOGUE = """ PyObject * -_Py_get_%%NAME%%_toplevel(void) -{ - %%NAME%%_do_patchups(); - return Py_NewRef((PyObject *) &%%NAME%%_toplevel); -} +_Py_get_{name}_toplevel(void) +{{ + return Py_NewRef((PyObject *) &{name}_toplevel); +}} """ FROZEN_COMMENT_C = "/* Auto-generated by Programs/_freeze_module.c */" @@ -461,10 +454,10 @@ def generate(args: list[str], output: TextIO) -> None: code = compile(fd.read(), f"", "exec") printer.generate_file(modname, code) with printer.block(f"void\n_Py_Deepfreeze_Fini(void)"): - for p in printer.deallocs: + for p in printer.finis: printer.write(p) with printer.block(f"int\n_Py_Deepfreeze_Init(void)"): - for p in printer.interns: + for p in printer.inits: with printer.block(f"if ({p} < 0)"): printer.write("return -1;") printer.write("return 0;") diff --git a/Tools/scripts/freeze_modules.py b/Tools/build/freeze_modules.py similarity index 99% rename from Tools/scripts/freeze_modules.py rename to Tools/build/freeze_modules.py index aa1e4fe2ea0f44..810224b28f2faa 100644 --- a/Tools/scripts/freeze_modules.py +++ b/Tools/build/freeze_modules.py @@ -581,7 +581,7 @@ def regen_makefile(modules): frozenfiles = [] rules = [''] deepfreezerules = ["Python/deepfreeze/deepfreeze.c: $(DEEPFREEZE_DEPS)", - "\t$(PYTHON_FOR_FREEZE) $(srcdir)/Tools/scripts/deepfreeze.py \\"] + "\t$(PYTHON_FOR_FREEZE) $(srcdir)/Tools/build/deepfreeze.py \\"] for src in _iter_sources(modules): frozen_header = relpath_for_posix_display(src.frozenfile, ROOT_DIR) frozenfiles.append(f'\t\t{frozen_header} \\') @@ -646,7 +646,7 @@ def regen_pcbuild(modules): projlines = [] filterlines = [] corelines = [] - deepfreezerules = ['\t None: identifiers, strings = get_identifiers_and_strings() generate_global_strings(identifiers, strings) - generate_runtime_init(identifiers, strings) + generated_immortal_objects = generate_runtime_init(identifiers, strings) + generate_static_strings_initializer(identifiers, strings) + generate_global_object_finalizers(generated_immortal_objects) if __name__ == '__main__': diff --git a/Tools/scripts/generate_levenshtein_examples.py b/Tools/build/generate_levenshtein_examples.py similarity index 97% rename from Tools/scripts/generate_levenshtein_examples.py rename to Tools/build/generate_levenshtein_examples.py index 5a8360fff7315a..778eb458c541c0 100644 --- a/Tools/scripts/generate_levenshtein_examples.py +++ b/Tools/build/generate_levenshtein_examples.py @@ -1,7 +1,7 @@ """Generate 10,000 unique examples for the Levenshtein short-circuit tests.""" import argparse -from functools import cache +from functools import lru_cache import json import os.path from random import choices, randrange @@ -22,7 +22,7 @@ def _substitution_cost(ch_a, ch_b): return _MOVE_COST -@cache +@lru_cache(None) def levenshtein(a, b): if not a or not b: return (len(a) + len(b)) * _MOVE_COST diff --git a/Tools/scripts/generate_opcode_h.py b/Tools/build/generate_opcode_h.py similarity index 97% rename from Tools/scripts/generate_opcode_h.py rename to Tools/build/generate_opcode_h.py index 9ff264af9cdc2b..174573a3c64b08 100644 --- a/Tools/scripts/generate_opcode_h.py +++ b/Tools/build/generate_opcode_h.py @@ -3,7 +3,7 @@ import sys import tokenize -SCRIPT_NAME = "Tools/scripts/generate_opcode_h.py" +SCRIPT_NAME = "Tools/build/generate_opcode_h.py" PYTHON_OPCODE = "Lib/opcode.py" header = f""" @@ -108,7 +108,7 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna opname_including_specialized[255] = 'DO_TRACING' used[255] = True - with (open(outfile, 'w') as fobj, open(internaloutfile, 'w') as iobj): + with open(outfile, 'w') as fobj, open(internaloutfile, 'w') as iobj: fobj.write(header) iobj.write(internal_header) diff --git a/Tools/scripts/generate_re_casefix.py b/Tools/build/generate_re_casefix.py similarity index 95% rename from Tools/scripts/generate_re_casefix.py rename to Tools/build/generate_re_casefix.py index 625b0658d97d1b..b57ac07426c27c 100755 --- a/Tools/scripts/generate_re_casefix.py +++ b/Tools/build/generate_re_casefix.py @@ -5,6 +5,8 @@ import sys import unicodedata +SCRIPT_NAME = 'Tools/build/generate_re_casefix.py' + def update_file(file, content): try: with open(file, 'r', encoding='utf-8') as fobj: @@ -16,8 +18,8 @@ def update_file(file, content): fobj.write(content) return True -re_casefix_template = """\ -# Auto-generated by Tools/scripts/generate_re_casefix.py. +re_casefix_template = f"""\ +# Auto-generated by {SCRIPT_NAME}. # Maps the code of lowercased character to codes of different lowercased # characters which have the same uppercase. diff --git a/Tools/scripts/generate_sre_constants.py b/Tools/build/generate_sre_constants.py similarity index 94% rename from Tools/scripts/generate_sre_constants.py rename to Tools/build/generate_sre_constants.py index 72715076d29ab6..abea069c8bc0c5 100755 --- a/Tools/scripts/generate_sre_constants.py +++ b/Tools/build/generate_sre_constants.py @@ -1,6 +1,8 @@ #! /usr/bin/env python3 # This script generates Modules/_sre/sre_constants.h from Lib/re/_constants.py. +SCRIPT_NAME = 'Tools/build/generate_sre_constants.py' + def update_file(file, content): try: @@ -13,13 +15,13 @@ def update_file(file, content): fobj.write(content) return True -sre_constants_header = """\ +sre_constants_header = f"""\ /* * Secret Labs' Regular Expression Engine * * regular expression matching engine * - * Auto-generated by Tools/scripts/generate_sre_constants.py from + * Auto-generated by {SCRIPT_NAME} from * Lib/re/_constants.py. * * Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved. diff --git a/Tools/scripts/generate_stdlib_module_names.py b/Tools/build/generate_stdlib_module_names.py similarity index 95% rename from Tools/scripts/generate_stdlib_module_names.py rename to Tools/build/generate_stdlib_module_names.py index 92100bd06509a5..c8a23f41fdd475 100644 --- a/Tools/scripts/generate_stdlib_module_names.py +++ b/Tools/build/generate_stdlib_module_names.py @@ -10,6 +10,8 @@ from check_extension_modules import ModuleChecker +SCRIPT_NAME = 'Tools/build/generate_stdlib_module_names.py' + SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) STDLIB_PATH = os.path.join(SRC_DIR, 'Lib') @@ -27,13 +29,14 @@ '_ctypes_test', '_testbuffer', '_testcapi', + '_testclinic', '_testconsole', '_testimportmultiple', '_testinternalcapi', '_testmultiphase', + '_testsinglephase', '_xxsubinterpreters', '_xxtestfuzz', - 'distutils.tests', 'idlelib.idle_test', 'test', 'xxlimited', @@ -112,7 +115,7 @@ def list_modules(): def write_modules(fp, names): - print("// Auto-generated by Tools/scripts/generate_stdlib_module_names.py.", + print(f"// Auto-generated by {SCRIPT_NAME}.", file=fp) print("// List used to create sys.stdlib_module_names.", file=fp) print(file=fp) diff --git a/Tools/scripts/generate_token.py b/Tools/build/generate_token.py similarity index 94% rename from Tools/scripts/generate_token.py rename to Tools/build/generate_token.py index d8be8b93de1416..fc12835b7762ad 100755 --- a/Tools/scripts/generate_token.py +++ b/Tools/build/generate_token.py @@ -7,6 +7,8 @@ # Lib/token.py +SCRIPT_NAME = 'Tools/build/generate_token.py' +AUTO_GENERATED_BY_SCRIPT = f'Auto-generated by {SCRIPT_NAME}' NT_OFFSET = 256 def load_tokens(path): @@ -47,8 +49,10 @@ def update_file(file, content): return True -token_h_template = """\ -/* Auto-generated by Tools/scripts/generate_token.py */ +token_h_template = f"""\ +/* {AUTO_GENERATED_BY_SCRIPT} */ +""" +token_h_template += """\ /* Token types */ #ifndef Py_INTERNAL_TOKEN_H @@ -105,8 +109,10 @@ def make_h(infile, outfile='Include/internal/pycore_token.h'): print("%s regenerated from %s" % (outfile, infile)) -token_c_template = """\ -/* Auto-generated by Tools/scripts/generate_token.py */ +token_c_template = f"""\ +/* {AUTO_GENERATED_BY_SCRIPT} */ +""" +token_c_template += """\ #include "Python.h" #include "pycore_token.h" @@ -189,8 +195,8 @@ def make_c(infile, outfile='Parser/token.c'): print("%s regenerated from %s" % (outfile, infile)) -token_inc_template = """\ -.. Auto-generated by Tools/scripts/generate_token.py +token_inc_template = f"""\ +.. {AUTO_GENERATED_BY_SCRIPT} %s .. data:: N_TOKENS @@ -213,10 +219,11 @@ def make_rst(infile, outfile='Doc/library/token-list.inc'): print("%s regenerated from %s" % (outfile, infile)) -token_py_template = '''\ +token_py_template = f'''\ """Token constants.""" -# Auto-generated by Tools/scripts/generate_token.py - +# {AUTO_GENERATED_BY_SCRIPT} +''' +token_py_template += ''' __all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF'] %s diff --git a/Tools/scripts/parse_html5_entities.py b/Tools/build/parse_html5_entities.py similarity index 97% rename from Tools/scripts/parse_html5_entities.py rename to Tools/build/parse_html5_entities.py index 1e5bdad2165548..d2bf29091030a5 100755 --- a/Tools/scripts/parse_html5_entities.py +++ b/Tools/build/parse_html5_entities.py @@ -18,6 +18,7 @@ from urllib.request import urlopen from html.entities import html5 +SCRIPT_NAME = 'Tools/build/parse_html5_entities.py' PAGE_URL = 'https://html.spec.whatwg.org/multipage/named-characters.html' ENTITIES_URL = 'https://html.spec.whatwg.org/entities.json' HTML5_SECTION_START = '# HTML5 named character references' @@ -69,7 +70,7 @@ def write_items(entities, file=sys.stdout): keys = sorted(entities.keys()) keys = sorted(keys, key=str.lower) print(HTML5_SECTION_START, file=file) - print(f'# Generated by {sys.argv[0]!r}\n' + print(f'# Generated by {SCRIPT_NAME}\n' f'# from {ENTITIES_URL} and\n' f'# {PAGE_URL}.\n' f'# Map HTML5 named character references to the ' diff --git a/Tools/scripts/smelly.py b/Tools/build/smelly.py similarity index 100% rename from Tools/scripts/smelly.py rename to Tools/build/smelly.py diff --git a/Tools/scripts/stable_abi.py b/Tools/build/stable_abi.py old mode 100755 new mode 100644 similarity index 99% rename from Tools/scripts/stable_abi.py rename to Tools/build/stable_abi.py index d557e102ea8e69..88db93e935e9be --- a/Tools/scripts/stable_abi.py +++ b/Tools/build/stable_abi.py @@ -24,6 +24,7 @@ import re import csv +SCRIPT_NAME = 'Tools/build/stable_abi.py' MISSING = object() EXCLUDED_HEADERS = { @@ -182,11 +183,12 @@ def _decorator(func): def gen_python3dll(manifest, args, outfile): """Generate/check the source for the Windows stable ABI library""" write = partial(print, file=outfile) - write(textwrap.dedent(r""" + content = f""" /* Re-export stable Python ABI */ - /* Generated by Tools/scripts/stable_abi.py */ - + /* Generated by {SCRIPT_NAME} */ + """ + content += r""" #ifdef _M_IX86 #define DECORATE "_" #else @@ -197,7 +199,8 @@ def gen_python3dll(manifest, args, outfile): __pragma(comment(linker, "/EXPORT:" DECORATE #name "=" PYTHON_DLL_NAME "." #name)) #define EXPORT_DATA(name) \ __pragma(comment(linker, "/EXPORT:" DECORATE #name "=" PYTHON_DLL_NAME "." #name ",DATA")) - """)) + """ + write(textwrap.dedent(content)) def sort_key(item): return item.name.lower() diff --git a/Tools/scripts/umarshal.py b/Tools/build/umarshal.py similarity index 100% rename from Tools/scripts/umarshal.py rename to Tools/build/umarshal.py diff --git a/Tools/scripts/update_file.py b/Tools/build/update_file.py similarity index 100% rename from Tools/scripts/update_file.py rename to Tools/build/update_file.py diff --git a/Tools/scripts/verify_ensurepip_wheels.py b/Tools/build/verify_ensurepip_wheels.py similarity index 100% rename from Tools/scripts/verify_ensurepip_wheels.py rename to Tools/build/verify_ensurepip_wheels.py diff --git a/Tools/c-analyzer/c_parser/_state_machine.py b/Tools/c-analyzer/c_parser/_state_machine.py index 53cbb13e7c4edb..875323188aadfd 100644 --- a/Tools/c-analyzer/c_parser/_state_machine.py +++ b/Tools/c-analyzer/c_parser/_state_machine.py @@ -96,7 +96,7 @@ def parse(srclines): # # end matched parens # ''') -''' +r''' # for loop (?: \s* \b for diff --git a/Tools/c-analyzer/cpython/_parser.py b/Tools/c-analyzer/cpython/_parser.py index 78241f0ea08ac8..769e27432f2837 100644 --- a/Tools/c-analyzer/cpython/_parser.py +++ b/Tools/c-analyzer/cpython/_parser.py @@ -82,6 +82,10 @@ def clean_lines(text): # generated Python/deepfreeze/*.c Python/frozen_modules/*.h +Python/generated_cases.c.h + +# not actually source +Python/bytecodes.c # @end=conf@ ''') @@ -285,6 +289,7 @@ def clean_lines(text): SAME = { _abs('Include/*.h'): [_abs('Include/cpython/')], + _abs('Python/ceval.c'): ['Python/generated_cases.c.h'], } MAX_SIZES = { @@ -311,6 +316,7 @@ def clean_lines(text): _abs('Python/frozen_modules/*.h'): (20_000, 500), _abs('Python/opcode_targets.h'): (10_000, 500), _abs('Python/stdlib_module_names.h'): (5_000, 500), + _abs('Python/importlib.h'): (200_000, 5000), # These large files are currently ignored (see above). _abs('Modules/_ssl_data.h'): (80_000, 10_000), diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index 196d62d361b679..cc465134a9e065 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -298,186 +298,34 @@ Objects/setobject.c - _dummy_struct - Objects/setobject.c - _PySet_Dummy - Objects/sliceobject.c - _Py_EllipsisObject - -#----------------------- -# cached - initialized once - -# manually cached PyUnicodeObject -Python/ast_unparse.c - _str_replace_inf - - -# holds statically-initialized strings -Objects/typeobject.c - slotdefs - - -# other -Objects/typeobject.c object___reduce_ex___impl objreduce - -Objects/unicodeobject.c - _string_module - -Objects/unicodeobject.c - interned - - -#----------------------- -# other - -# initialized once -Python/context.c - _token_missing - -Python/fileutils.c - _Py_open_cloexec_works - -Python/hamt.c - _empty_bitmap_node - -Python/hamt.c - _empty_hamt - - -# state -Objects/typeobject.c resolve_slotdups pname - -Python/import.c - extensions - - ################################## # global non-objects to fix in core code #----------------------- -# initialized/set once +# effectively-const but initialized lazily -# pre-allocated buffer -Modules/getbuildinfo.c Py_GetBuildInfo buildinfo - - -# during init -Objects/unicodeobject.c - bloom_linebreak - -Python/bootstrap_hash.c - _Py_HashSecret_Initialized - -Python/bootstrap_hash.c py_getrandom getrandom_works - -Python/initconfig.c - _Py_global_config_int_max_str_digits - -Python/initconfig.c - Py_DebugFlag - -Python/initconfig.c - Py_UTF8Mode - -Python/initconfig.c - Py_DebugFlag - -Python/initconfig.c - Py_VerboseFlag - -Python/initconfig.c - Py_QuietFlag - -Python/initconfig.c - Py_InteractiveFlag - -Python/initconfig.c - Py_InspectFlag - -Python/initconfig.c - Py_OptimizeFlag - -Python/initconfig.c - Py_NoSiteFlag - -Python/initconfig.c - Py_BytesWarningFlag - -Python/initconfig.c - Py_FrozenFlag - -Python/initconfig.c - Py_IgnoreEnvironmentFlag - -Python/initconfig.c - Py_DontWriteBytecodeFlag - -Python/initconfig.c - Py_NoUserSiteDirectory - -Python/initconfig.c - Py_UnbufferedStdioFlag - -Python/initconfig.c - Py_HashRandomizationFlag - -Python/initconfig.c - Py_IsolatedFlag - -Python/initconfig.c - Py_LegacyWindowsFSEncodingFlag - -Python/initconfig.c - Py_LegacyWindowsStdioFlag - -Python/initconfig.c - orig_argv - -Python/pyhash.c - _Py_HashSecret - -Python/pylifecycle.c - runtime_initialized - -Python/sysmodule.c - _PySys_ImplCacheTag - -Python/sysmodule.c - _PySys_ImplName - -Python/sysmodule.c - _preinit_warnoptions - -Python/sysmodule.c - _preinit_xoptions - -Python/thread.c - initialized - -Python/thread_pthread.h - condattr_monotonic - -Python/thread_pthread.h init_condattr ca - - -# set by embedders during init -Python/initconfig.c - _Py_StandardStreamEncoding - -Python/initconfig.c - _Py_StandardStreamErrors - - -# lazy -Objects/floatobject.c - double_format - -Objects/floatobject.c - float_format - -Objects/longobject.c long_from_non_binary_base log_base_BASE - -Objects/longobject.c long_from_non_binary_base convwidth_base - -Objects/longobject.c long_from_non_binary_base convmultmax_base - -Objects/perf_trampoline.c - perf_map_file - -Objects/unicodeobject.c - ucnhash_capi - -Parser/action_helpers.c _PyPegen_dummy_name cache - +# idempotent Python/dtoa.c - p5s - -Python/fileutils.c - force_ascii - -Python/fileutils.c set_inheritable ioctl_works - -Python/import.c - import_lock - -Python/import.c import_find_and_load header - +Objects/obmalloc.c new_arena debug_stats - -#----------------------- -# unlikely to change after init (or main thread) - -# through C-API -Python/frozen.c - PyImport_FrozenModules - -Python/frozen.c - _PyImport_FrozenAliases - -Python/frozen.c - _PyImport_FrozenBootstrap - -Python/frozen.c - _PyImport_FrozenStdlib - -Python/frozen.c - _PyImport_FrozenTest - -Python/import.c - inittab_copy - -Python/import.c - PyImport_Inittab - -Python/preconfig.c - Py_FileSystemDefaultEncoding - -Python/preconfig.c - Py_HasFileSystemDefaultEncoding - -Python/preconfig.c - Py_FileSystemDefaultEncodeErrors - -Python/preconfig.c - _Py_HasFileSystemDefaultEncodeErrors - - -# REPL -Parser/myreadline.c - _PyOS_ReadlineLock - -Parser/myreadline.c - _PyOS_ReadlineTState - -Parser/myreadline.c - PyOS_InputHook - -Parser/myreadline.c - PyOS_ReadlineFunctionPointer - - -# handling C argv -Python/getopt.c - _PyOS_optarg - -Python/getopt.c - _PyOS_opterr - -Python/getopt.c - _PyOS_optind - -Python/getopt.c - opt_ptr - -Python/pathconfig.c - _Py_path_config - +# others +Python/perf_trampoline.c - perf_map_file - +Objects/unicodeobject.c - ucnhash_capi - #----------------------- # state -# allocator -Objects/obmalloc.c - _PyObject_Arena - -Objects/obmalloc.c - _Py_tracemalloc_config - -Objects/obmalloc.c - arena_map_bot_count - -Objects/obmalloc.c - arena_map_mid_count - -Objects/obmalloc.c - arena_map_root - -Objects/obmalloc.c - arenas - -Objects/obmalloc.c - maxarenas - -Objects/obmalloc.c - narenas_currently_allocated - -Objects/obmalloc.c - narenas_highwater - -Objects/obmalloc.c - nfp2lasta - -Objects/obmalloc.c - ntimes_arena_allocated - -Objects/obmalloc.c - raw_allocated_blocks - -Objects/obmalloc.c - unused_arena_objects - -Objects/obmalloc.c - usable_arenas - -Objects/obmalloc.c new_arena debug_stats - - -# pre-allocated memory -Python/dtoa.c - freelist - -Python/dtoa.c - private_mem - - # local buffer -Python/getversion.c Py_GetVersion version - Python/suggestions.c levenshtein_distance buffer - -# linked list -Python/dtoa.c - pmem_next - -Python/getargs.c - static_arg_parsers - - # other -Objects/dictobject.c - _pydict_global_version - -Objects/dictobject.c - next_dict_keys_version - -Objects/funcobject.c - next_func_version - -Objects/moduleobject.c - max_module_number - Objects/object.c - _Py_RefTotal - -Objects/perf_trampoline.c - perf_status - -Objects/perf_trampoline.c - extra_code_index - -Objects/perf_trampoline.c - code_arena - -Objects/perf_trampoline.c - trampoline_api - -Objects/typeobject.c - next_version_tag - -Objects/typeobject.c resolve_slotdups ptrs - -Parser/pegen.c - memo_statistics - -Python/bootstrap_hash.c - urandom_cache - -Python/ceval_gil.c make_pending_calls busy - -Python/ceval.c _PyEval_SetProfile reentrant - -Python/ceval.c _PyEval_SetTrace reentrant - -Python/import.c - import_lock_level - -Python/import.c - import_lock_thread - -Python/import.c import_find_and_load accumulated - -Python/import.c import_find_and_load import_level - -Python/modsupport.c - _Py_PackageContext - +Python/perf_trampoline.c - perf_status - +Python/perf_trampoline.c - extra_code_index - +Python/perf_trampoline.c - code_arena - +Python/perf_trampoline.c - trampoline_api - Python/thread_pthread_stubs.h - py_tls_entries - -Python/pyfpe.c - PyFPE_counter - -Python/pylifecycle.c _Py_FatalErrorFormat reentrant - -Python/pylifecycle.c - _Py_UnhandledKeyboardInterrupt - -Python/pylifecycle.c fatal_error reentrant - -Python/specialize.c - _Py_QuickenedCount - ################################## @@ -512,6 +360,7 @@ Modules/_testcapi/vectorcall.c - MethodDescriptorNopGet_Type - Modules/_testcapi/vectorcall.c - MethodDescriptor2_Type - Modules/itertoolsmodule.c - _grouper_type - Modules/itertoolsmodule.c - accumulate_type - +Modules/itertoolsmodule.c - batched_type - Modules/itertoolsmodule.c - chain_type - Modules/itertoolsmodule.c - combinations_type - Modules/itertoolsmodule.c - compress_type - @@ -535,27 +384,12 @@ Modules/itertoolsmodule.c - ziplongest_type - #----------------------- # other -# statically initializd pointer to static type -# XXX should be const? -Modules/_io/winconsoleio.c - _PyWindowsConsoleIO_Type - - -# initialized once -Modules/_functoolsmodule.c - kwd_mark - -Modules/_io/_iomodule.c - _PyIO_empty_bytes - -Modules/_testcapi/heaptype.c - _testcapimodule - -Modules/_testcapi/unicode.c - _testcapimodule - -Modules/_tracemalloc.c - tracemalloc_empty_traceback - -Modules/signalmodule.c - DefaultHandler - -Modules/signalmodule.c - IgnoreHandler - -Modules/signalmodule.c - IntHandler - - # state Modules/faulthandler.c - fatal_error - Modules/faulthandler.c - thread - Modules/faulthandler.c - user_signals - Modules/faulthandler.c - stack - Modules/faulthandler.c - old_stack - -Modules/signalmodule.c - Handlers - ################################## @@ -576,6 +410,7 @@ Modules/timemodule.c _PyTime_GetProcessTimeWithInfo ticks_per_second - Modules/_tracemalloc.c - allocators - Modules/_tracemalloc.c - tables_lock - +Modules/_tracemalloc.c - tracemalloc_empty_traceback - Modules/_tracemalloc.c - tracemalloc_traced_memory - Modules/_tracemalloc.c - tracemalloc_peak_traced_memory - Modules/_tracemalloc.c - tracemalloc_filenames - @@ -589,6 +424,7 @@ Modules/posixmodule.c - environ - Modules/signalmodule.c - is_tripped - Modules/signalmodule.c - signal_global_state - Modules/signalmodule.c - wakeup - +Modules/signalmodule.c - Handlers - ################################## @@ -702,89 +538,6 @@ Modules/xxmodule.c - ErrorObject - #----------------------- # cached - initialized once -# _Py_IDENTIFIER (global) -Modules/_asynciomodule.c - PyId___asyncio_running_event_loop__ - -Modules/_asynciomodule.c - PyId__asyncio_future_blocking - -Modules/_asynciomodule.c - PyId_add_done_callback - -Modules/_asynciomodule.c - PyId_call_soon - -Modules/_asynciomodule.c - PyId_cancel - -Modules/_asynciomodule.c - PyId_get_event_loop - -Modules/_asynciomodule.c - PyId_throw - -Modules/_datetimemodule.c - PyId_as_integer_ratio - -Modules/_datetimemodule.c - PyId_fromutc - -Modules/_datetimemodule.c - PyId_isoformat - -Modules/_datetimemodule.c - PyId_strftime - - -# _Py_IDENTIFIER (local) -Modules/_asynciomodule.c FutureObj_finalize PyId_call_exception_handler - -Modules/_asynciomodule.c FutureObj_finalize PyId_exception - -Modules/_asynciomodule.c FutureObj_finalize PyId_future - -Modules/_asynciomodule.c FutureObj_finalize PyId_message - -Modules/_asynciomodule.c FutureObj_finalize PyId_source_traceback - -Modules/_asynciomodule.c FutureObj_get_state PyId_CANCELLED - -Modules/_asynciomodule.c FutureObj_get_state PyId_FINISHED - -Modules/_asynciomodule.c FutureObj_get_state PyId_PENDING - -Modules/_asynciomodule.c TaskObj_finalize PyId_call_exception_handler - -Modules/_asynciomodule.c TaskObj_finalize PyId_message - -Modules/_asynciomodule.c TaskObj_finalize PyId_source_traceback - -Modules/_asynciomodule.c TaskObj_finalize PyId_task - -Modules/_asynciomodule.c future_init PyId_get_debug - -Modules/_asynciomodule.c get_future_loop PyId__loop - -Modules/_asynciomodule.c get_future_loop PyId_get_loop - -Modules/_asynciomodule.c register_task PyId_add - -Modules/_asynciomodule.c unregister_task PyId_discard - -Modules/_ctypes/_ctypes.c CDataType_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c PyCArrayType_new PyId__length_ - -Modules/_ctypes/_ctypes.c PyCArrayType_new PyId__type_ - -Modules/_ctypes/_ctypes.c PyCFuncPtr_set_restype PyId__check_retval_ - -Modules/_ctypes/_ctypes.c PyCPointerType_new PyId__type_ - -Modules/_ctypes/_ctypes.c PyCPointerType_set_type PyId__type_ - -Modules/_ctypes/_ctypes.c PyCSimpleType_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c PyCSimpleType_new PyId__type_ - -Modules/_ctypes/_ctypes.c StructUnionType_new PyId__abstract_ - -Modules/_ctypes/_ctypes.c StructUnionType_new PyId__fields_ - -Modules/_ctypes/_ctypes.c _build_result PyId___ctypes_from_outparam__ - -Modules/_ctypes/_ctypes.c _init_pos_args PyId__fields_ - -Modules/_ctypes/_ctypes.c c_char_p_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c c_void_p_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c c_wchar_p_from_param PyId__as_parameter_ - -Modules/_ctypes/_ctypes.c converters_from_argtypes PyId_from_param - -Modules/_ctypes/_ctypes.c make_funcptrtype_dict PyId__argtypes_ - -Modules/_ctypes/_ctypes.c make_funcptrtype_dict PyId__check_retval_ - -Modules/_ctypes/_ctypes.c make_funcptrtype_dict PyId__flags_ - -Modules/_ctypes/_ctypes.c make_funcptrtype_dict PyId__restype_ - -Modules/_ctypes/callproc.c ConvParam PyId__as_parameter_ - -Modules/_ctypes/callproc.c unpickle PyId___new__ - -Modules/_ctypes/callproc.c unpickle PyId___setstate__ - -Modules/_ctypes/stgdict.c MakeAnonFields PyId__anonymous_ - -Modules/_ctypes/stgdict.c PyCStructUnionType_update_stgdict PyId__pack_ - -Modules/_ctypes/stgdict.c PyCStructUnionType_update_stgdict PyId__swappedbytes_ - -Modules/_ctypes/stgdict.c PyCStructUnionType_update_stgdict PyId__use_broken_old_ctypes_structure_semantics_ - -Modules/_cursesmodule.c _curses_getwin PyId_read - -Modules/_cursesmodule.c _curses_window_putwin PyId_write - -Modules/_cursesmodule.c update_lines_cols PyId_COLS - -Modules/_cursesmodule.c update_lines_cols PyId_LINES - -Modules/_datetimemodule.c call_tzname PyId_tzname - -Modules/_datetimemodule.c date_strftime PyId_timetuple - -Modules/_datetimemodule.c date_today PyId_fromtimestamp - -Modules/_datetimemodule.c datetime_strptime PyId__strptime_datetime - -Modules/_datetimemodule.c make_Zreplacement PyId_replace - -Modules/_datetimemodule.c tzinfo_reduce PyId___getinitargs__ - -Modules/_elementtree.c _elementtree_Element_find_impl PyId_find - -Modules/_elementtree.c _elementtree_Element_findall_impl PyId_findall - -Modules/_elementtree.c _elementtree_Element_findtext_impl PyId_findtext - -Modules/_elementtree.c _elementtree_Element_iterfind_impl PyId_iterfind - -Modules/_elementtree.c expat_start_doctype_handler PyId_doctype - -Modules/_elementtree.c treebuilder_add_subelement PyId_append - -Modules/_elementtree.c treebuilder_flush_data PyId_tail - -Modules/_elementtree.c treebuilder_flush_data PyId_text - -Modules/_json.c _encoded_const PyId_false - -Modules/_json.c _encoded_const PyId_null - -Modules/_json.c _encoded_const PyId_true - -Modules/_json.c raise_errmsg PyId_JSONDecodeError - -Modules/_json.c raise_errmsg PyId_decoder - -Modules/ossaudiodev.c oss_exit PyId_close - - # manually cached PyUnicodeOjbect Modules/_asynciomodule.c - context_kwname - Modules/_ctypes/callproc.c _ctypes_get_errobj error_object_name - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index 28c2325c263d29..242deace8c945d 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -1,23 +1,152 @@ filename funcname name reason #??? - somevar ??? +# All globals here are technically mutable but known to be safe. + + +################################## +# process-global resources + +# Initialization for these should be idempotent. + +#----------------------- +# effectively const, set once before/during first init + +Modules/getbuildinfo.c - buildinfo - +Modules/getbuildinfo.c - initialized - +Python/getversion.c - initialized - +Python/getversion.c - version - + +#----------------------- +# effectively const, set once during first init + +Python/bootstrap_hash.c - _Py_HashSecret_Initialized - +Python/pyhash.c - _Py_HashSecret - +Python/thread.c - initialized - +Python/thread_pthread.h - condattr_monotonic - + +# safe static buffer used during one-time initialization +Python/thread_pthread.h init_condattr ca - + +# indicators for process-global resource availability/capability +Python/bootstrap_hash.c py_getrandom getrandom_works - +Python/fileutils.c - _Py_open_cloexec_works - +Python/fileutils.c set_inheritable ioctl_works - + +#----------------------- +# effectively const but set once lazily (*after* first init) + +Objects/longobject.c long_from_non_binary_base log_base_BASE - +Objects/longobject.c long_from_non_binary_base convwidth_base - +Objects/longobject.c long_from_non_binary_base convmultmax_base - +Objects/unicodeobject.c - bloom_linebreak - +Objects/unicodeobject.c _init_global_state initialized - + +# XXX Move to _PyRuntimeState? +Parser/action_helpers.c _PyPegen_dummy_name cache - + + +################################## +# state tied to C main() (only in main thread) + +#----------------------- +# handling C argv + +Python/getopt.c - _PyOS_optarg - +Python/getopt.c - _PyOS_opterr - +Python/getopt.c - _PyOS_optind - +Python/getopt.c - opt_ptr - +Python/pathconfig.c - _Py_path_config - + +#----------------------- +# REPL + +Parser/myreadline.c - _PyOS_ReadlineLock - +Parser/myreadline.c - _PyOS_ReadlineTState - +Parser/myreadline.c - PyOS_InputHook - +Parser/myreadline.c - PyOS_ReadlineFunctionPointer - + + ################################## -# mutable but known to be safe +# state tied to each runtime init/fini cycle Python/pylifecycle.c - _PyRuntime - +Python/pylifecycle.c - runtime_initialized - # All uses of _PyArg_Parser are handled in c-analyzr/cpython/_analyzer.py. #----------------------- -# others +# effectively const once init finishes + +# set by embedders before init (whether directly or through a call) +Python/initconfig.c - _Py_StandardStreamEncoding - +Python/initconfig.c - _Py_StandardStreamErrors - +Python/initconfig.c - orig_argv - + +# deprecated +Python/preconfig.c - Py_FileSystemDefaultEncoding - +Python/preconfig.c - Py_HasFileSystemDefaultEncoding - +Python/preconfig.c - Py_FileSystemDefaultEncodeErrors - +Python/preconfig.c - _Py_HasFileSystemDefaultEncodeErrors - + +# legacy config flags +Python/initconfig.c - Py_UTF8Mode - +Python/initconfig.c - Py_DebugFlag - +Python/initconfig.c - Py_VerboseFlag - +Python/initconfig.c - Py_QuietFlag - +Python/initconfig.c - Py_InteractiveFlag - +Python/initconfig.c - Py_InspectFlag - +Python/initconfig.c - Py_OptimizeFlag - +Python/initconfig.c - Py_NoSiteFlag - +Python/initconfig.c - Py_BytesWarningFlag - +Python/initconfig.c - Py_FrozenFlag - +Python/initconfig.c - Py_IgnoreEnvironmentFlag - +Python/initconfig.c - Py_DontWriteBytecodeFlag - +Python/initconfig.c - Py_NoUserSiteDirectory - +Python/initconfig.c - Py_UnbufferedStdioFlag - +Python/initconfig.c - Py_HashRandomizationFlag - +Python/initconfig.c - Py_IsolatedFlag - +Python/initconfig.c - Py_LegacyWindowsFSEncodingFlag - +Python/initconfig.c - Py_LegacyWindowsStdioFlag - + +# initialized statically, customized by embedders +Python/frozen.c - PyImport_FrozenModules - +Python/import.c - inittab_copy - +Python/import.c - PyImport_Inittab - + +# used temporarily during init +Python/sysmodule.c - _preinit_warnoptions - +Python/sysmodule.c - _preinit_xoptions - + + +################################## +# special-use diagnistic state + +Parser/pegen.c - memo_statistics - + + +################################## +# one-off temporary state + +# This is safe enough. +Python/pylifecycle.c _Py_FatalErrorFormat reentrant - +Python/pylifecycle.c fatal_error reentrant - + + +################################## +# not used (kept for compatibility) + +Python/pyfpe.c - PyFPE_counter - + + +################################## +# The analyzer should have ignored these. +# XXX Fix the analyzer. -# XXX The analyzer should have ignored these. Modules/_io/_iomodule.c - _PyIO_Module - Modules/_sqlite/module.c - _sqlite3module - -################################## # forward/extern references -# XXX The analyzer should have ignored these. Include/py_curses.h - PyCurses_API - Include/pydecimal.h - _decimal_api - @@ -77,8 +206,8 @@ Objects/object.c - _Py_GenericAliasIterType - Objects/object.c - _PyMemoryIter_Type - Objects/object.c - _PyLineIterator - Objects/object.c - _PyPositionsIterator - -Objects/perf_trampoline.c - _Py_trampoline_func_start - -Objects/perf_trampoline.c - _Py_trampoline_func_end - +Python/perf_trampoline.c - _Py_trampoline_func_start - +Python/perf_trampoline.c - _Py_trampoline_func_end - Python/importdl.h - _PyImport_DynLoadFiletab - Modules/expat/xmlrole.c - prolog0 - @@ -179,6 +308,8 @@ Modules/_testbuffer.c ndarray_memoryview_from_buffer strides - Modules/_testbuffer.c ndarray_memoryview_from_buffer suboffsets - Modules/_testbuffer.c ndarray_push kwlist - Modules/_testbuffer.c staticarray_init kwlist - +Modules/_testcapi/heaptype.c - _testcapimodule - +Modules/_testcapi/unicode.c - _testcapimodule - Modules/_testcapimodule.c - ContainerNoGC_members - Modules/_testcapimodule.c - ContainerNoGC_type - Modules/_testcapimodule.c - FmData - @@ -261,6 +392,10 @@ Modules/_testcapimodule.c test_capsule buffer - Modules/_testcapimodule.c test_empty_argparse kwlist - Modules/_testcapimodule.c test_structmembers_new keywords - Modules/_testcapimodule.c getargs_s_hash_int keywords - +Modules/_testcapimodule.c - g_dict_watch_events - +Modules/_testcapimodule.c - g_dict_watchers_installed - +Modules/_testcapimodule.c - g_type_modified_events - +Modules/_testcapimodule.c - g_type_watchers_installed - Modules/_testimportmultiple.c - _barmodule - Modules/_testimportmultiple.c - _foomodule - Modules/_testimportmultiple.c - _testimportmultiple - @@ -373,6 +508,7 @@ Modules/_decimal/_decimal.c - ssize_constants - Modules/_elementtree.c - ExpatMemoryHandler - Modules/_io/_iomodule.c - static_types - Modules/_io/textio.c - encodefuncs - +Modules/_io/winconsoleio.c - _PyWindowsConsoleIO_Type - Modules/_localemodule.c - langinfo_constants - Modules/_pickle.c - READ_WHOLE_LINE - Modules/_sqlite/module.c - error_codes - @@ -465,8 +601,8 @@ Objects/obmalloc.c - _PyMem_Debug - Objects/obmalloc.c - _PyMem_Raw - Objects/obmalloc.c - _PyObject - Objects/obmalloc.c - usedpools - -Objects/perf_trampoline.c - _Py_perfmap_callbacks - Objects/typeobject.c - name_op - +Objects/typeobject.c - slotdefs - Objects/unicodeobject.c - stripfuncnames - Objects/unicodeobject.c - utf7_category - Objects/unicodeobject.c unicode_decode_call_errorhandler_wchar argparse - @@ -489,10 +625,15 @@ Python/frozen.c - aliases - Python/frozen.c - bootstrap_modules - Python/frozen.c - stdlib_modules - Python/frozen.c - test_modules - +Python/frozen.c - _PyImport_FrozenAliases - +Python/frozen.c - _PyImport_FrozenBootstrap - +Python/frozen.c - _PyImport_FrozenStdlib - +Python/frozen.c - _PyImport_FrozenTest - Python/getopt.c - longopts - Python/import.c - _PyImport_Inittab - Python/import.c - _PySys_ImplCacheTag - Python/opcode_targets.h - opcode_targets - +Python/perf_trampoline.c - _Py_perfmap_callbacks - Python/pyhash.c - PyHash_Func - Python/pylifecycle.c - _C_LOCALE_WARNING - Python/pylifecycle.c - _PyOS_mystrnicmp_hack - @@ -501,6 +642,7 @@ Python/pystate.c - initial - Python/specialize.c - adaptive_opcodes - Python/specialize.c - cache_requirements - Python/specialize.c - compare_masks - -Python/specialize.c - _PyOpcode_Adaptive - Python/stdlib_module_names.h - _Py_stdlib_module_names - +Python/sysmodule.c - _PySys_ImplCacheTag - +Python/sysmodule.c - _PySys_ImplName - Python/sysmodule.c - whatstrings - diff --git a/Tools/cases_generator/README.md b/Tools/cases_generator/README.md new file mode 100644 index 00000000000000..dc055ead1941cd --- /dev/null +++ b/Tools/cases_generator/README.md @@ -0,0 +1,33 @@ +# Tooling to generate interpreters + +What's currently here: + +- `lexer.py`: lexer for C, originally written by Mark Shannon +- `plexer.py`: OO interface on top of lexer.py; main class: `PLexer` +- `parser.py`: Parser for instruction definition DSL; main class `Parser` +- `generate_cases.py`: driver script to read `Python/bytecodes.c` and + write `Python/generated_cases.c.h` + +The DSL for the instruction definitions in `Python/bytecodes.c` is described +[here](https://github.com/faster-cpython/ideas/blob/main/3.12/interpreter_definition.md). +Note that there is some dummy C code at the top and bottom of the file +to fool text editors like VS Code into believing this is valid C code. + +## A bit about the parser + +The parser class uses a pretty standard recursive descent scheme, +but with unlimited backtracking. +The `PLexer` class tokenizes the entire input before parsing starts. +We do not run the C preprocessor. +Each parsing method returns either an AST node (a `Node` instance) +or `None`, or raises `SyntaxError` (showing the error in the C source). + +Most parsing methods are decorated with `@contextual`, which automatically +resets the tokenizer input position when `None` is returned. +Parsing methods may also raise `SyntaxError`, which is irrecoverable. +When a parsing method returns `None`, it is possible that after backtracking +a different parsing method returns a valid AST. + +Neither the lexer nor the parsers are complete or fully correct. +Most known issues are tersely indicated by `# TODO:` comments. +We plan to fix issues as they become relevant. diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py new file mode 100644 index 00000000000000..424b15ede2aadf --- /dev/null +++ b/Tools/cases_generator/generate_cases.py @@ -0,0 +1,519 @@ +"""Generate the main interpreter switch. + +Reads the instruction definitions from bytecodes.c. +Writes the cases to generated_cases.c.h, which is #included in ceval.c. +""" + +import argparse +import contextlib +import dataclasses +import os +import re +import sys +import typing + +import parser + +DEFAULT_INPUT = "Python/bytecodes.c" +DEFAULT_OUTPUT = "Python/generated_cases.c.h" +BEGIN_MARKER = "// BEGIN BYTECODES //" +END_MARKER = "// END BYTECODES //" +RE_PREDICTED = r"(?s)(?:PREDICT\(|GO_TO_INSTRUCTION\(|DEOPT_IF\(.*?,\s*)(\w+)\);" +UNUSED = "unused" +BITS_PER_CODE_UNIT = 16 + +arg_parser = argparse.ArgumentParser() +arg_parser.add_argument("-i", "--input", type=str, default=DEFAULT_INPUT) +arg_parser.add_argument("-o", "--output", type=str, default=DEFAULT_OUTPUT) + + +# This is not a data class +class Instruction(parser.InstDef): + """An instruction with additional data and code.""" + + # Computed by constructor + always_exits: bool + cache_offset: int + cache_effects: list[parser.CacheEffect] + input_effects: list[parser.StackEffect] + output_effects: list[parser.StackEffect] + + # Set later + family: parser.Family | None = None + predicted: bool = False + + def __init__(self, inst: parser.InstDef): + super().__init__(inst.header, inst.block) + self.context = inst.context + self.always_exits = always_exits(self.block) + self.cache_effects = [ + effect for effect in self.inputs if isinstance(effect, parser.CacheEffect) + ] + self.cache_offset = sum(c.size for c in self.cache_effects) + self.input_effects = [ + effect for effect in self.inputs if isinstance(effect, parser.StackEffect) + ] + self.output_effects = self.outputs # For consistency/completeness + + def write(self, f: typing.TextIO, indent: str, dedent: int = 0) -> None: + """Write one instruction, sans prologue and epilogue.""" + if dedent < 0: + indent += " " * -dedent # DO WE NEED THIS? + + # Get cache offset and maybe assert that it is correct + if family := self.family: + if self.name == family.members[0]: + if cache_size := family.size: + f.write( + f"{indent} static_assert({cache_size} == " + f'{self.cache_offset}, "incorrect cache size");\n' + ) + + # Write cache effect variable declarations + cache_offset = 0 + for ceffect in self.cache_effects: + if ceffect.name != UNUSED: + bits = ceffect.size * BITS_PER_CODE_UNIT + if bits == 64: + # NOTE: We assume that 64-bit data in the cache + # is always an object pointer. + # If this becomes false, we need a way to specify + # syntactically what type the cache data is. + f.write( + f"{indent} PyObject *{ceffect.name} = " + f"read_obj(next_instr + {cache_offset});\n" + ) + else: + f.write(f"{indent} uint{bits}_t {ceffect.name} = " + f"read_u{bits}(next_instr + {cache_offset});\n") + cache_offset += ceffect.size + assert cache_offset == self.cache_offset + + # Write input stack effect variable declarations and initializations + for i, seffect in enumerate(reversed(self.input_effects), 1): + if seffect.name != UNUSED: + f.write(f"{indent} PyObject *{seffect.name} = PEEK({i});\n") + + # Write output stack effect variable declarations + input_names = {seffect.name for seffect in self.input_effects} + input_names.add(UNUSED) + for seffect in self.output_effects: + if seffect.name not in input_names: + f.write(f"{indent} PyObject *{seffect.name};\n") + + self.write_body(f, indent, dedent) + + # Skip the rest if the block always exits + if always_exits(self.block): + return + + # Write net stack growth/shrinkage + diff = len(self.output_effects) - len(self.input_effects) + if diff > 0: + f.write(f"{indent} STACK_GROW({diff});\n") + elif diff < 0: + f.write(f"{indent} STACK_SHRINK({-diff});\n") + + # Write output stack effect assignments + unmoved_names = {UNUSED} + for ieffect, oeffect in zip(self.input_effects, self.output_effects): + if ieffect.name == oeffect.name: + unmoved_names.add(ieffect.name) + for i, seffect in enumerate(reversed(self.output_effects)): + if seffect.name not in unmoved_names: + f.write(f"{indent} POKE({i+1}, {seffect.name});\n") + + # Write cache effect + if self.cache_offset: + f.write(f"{indent} next_instr += {self.cache_offset};\n") + + def write_body(self, f: typing.TextIO, ndent: str, dedent: int) -> None: + """Write the instruction body.""" + + # Get lines of text with proper dedent + blocklines = self.block.to_text(dedent=dedent).splitlines(True) + + # Remove blank lines from both ends + while blocklines and not blocklines[0].strip(): + blocklines.pop(0) + while blocklines and not blocklines[-1].strip(): + blocklines.pop() + + # Remove leading and trailing braces + assert blocklines and blocklines[0].strip() == "{" + assert blocklines and blocklines[-1].strip() == "}" + blocklines.pop() + blocklines.pop(0) + + # Remove trailing blank lines + while blocklines and not blocklines[-1].strip(): + blocklines.pop() + + # Write the body, substituting a goto for ERROR_IF() + for line in blocklines: + if m := re.match(r"(\s*)ERROR_IF\((.+), (\w+)\);\s*$", line): + space, cond, label = m.groups() + # ERROR_IF() must pop the inputs from the stack. + # The code block is responsible for DECREF()ing them. + # NOTE: If the label doesn't exist, just add it to ceval.c. + ninputs = len(self.input_effects) + # Don't pop common input/output effects at the bottom! + # These aren't DECREF'ed so they can stay. + for ieff, oeff in zip(self.input_effects, self.output_effects): + if ieff.name == oeff.name: + ninputs -= 1 + else: + break + if ninputs: + f.write(f"{space}if ({cond}) goto pop_{ninputs}_{label};\n") + else: + f.write(f"{space}if ({cond}) goto {label};\n") + else: + f.write(line) + + +@dataclasses.dataclass +class SuperComponent: + instr: Instruction + input_mapping: dict[str, parser.StackEffect] + output_mapping: dict[str, parser.StackEffect] + + +class SuperInstruction(parser.Super): + + stack: list[str] + initial_sp: int + final_sp: int + parts: list[SuperComponent] + + def __init__(self, sup: parser.Super): + super().__init__(sup.kind, sup.name, sup.ops) + self.context = sup.context + + def analyze(self, a: "Analyzer") -> None: + components = self.check_components(a) + self.stack, self.initial_sp = self.super_macro_analysis(a, components) + sp = self.initial_sp + self.parts = [] + for instr in components: + input_mapping = {} + for ieffect in reversed(instr.input_effects): + sp -= 1 + if ieffect.name != UNUSED: + input_mapping[self.stack[sp]] = ieffect + output_mapping = {} + for oeffect in instr.output_effects: + if oeffect.name != UNUSED: + output_mapping[self.stack[sp]] = oeffect + sp += 1 + self.parts.append(SuperComponent(instr, input_mapping, output_mapping)) + self.final_sp = sp + + def check_components(self, a: "Analyzer") -> list[Instruction]: + components: list[Instruction] = [] + if not self.ops: + a.error(f"{self.kind.capitalize()}-instruction has no operands", self) + for name in self.ops: + if name not in a.instrs: + a.error(f"Unknown instruction {name!r}", self) + else: + instr = a.instrs[name] + if self.kind == "super" and instr.kind != "inst": + a.error(f"Super-instruction operand {instr.name} must be inst, not op", instr) + components.append(instr) + return components + + def super_macro_analysis( + self, a: "Analyzer", components: list[Instruction] + ) -> tuple[list[str], int]: + """Analyze a super-instruction or macro. + + Print an error if there's a cache effect (which we don't support yet). + + Return the list of variable names and the initial stack pointer. + """ + lowest = current = highest = 0 + for instr in components: + if instr.cache_effects: + a.error( + f"Super-instruction {self.name!r} has cache effects in {instr.name!r}", + instr, + ) + current -= len(instr.input_effects) + lowest = min(lowest, current) + current += len(instr.output_effects) + highest = max(highest, current) + # At this point, 'current' is the net stack effect, + # and 'lowest' and 'highest' are the extremes. + # Note that 'lowest' may be negative. + stack = [f"_tmp_{i+1}" for i in range(highest - lowest)] + return stack, -lowest + + +class Analyzer: + """Parse input, analyze it, and write to output.""" + + filename: str + src: str + errors: int = 0 + + def error(self, msg: str, node: parser.Node) -> None: + lineno = 0 + if context := node.context: + # Use line number of first non-comment in the node + for token in context.owner.tokens[context.begin : context.end]: + lineno = token.line + if token.kind != "COMMENT": + break + print(f"{self.filename}:{lineno}: {msg}", file=sys.stderr) + self.errors += 1 + + def __init__(self, filename: str): + """Read the input file.""" + self.filename = filename + with open(filename) as f: + self.src = f.read() + + instrs: dict[str, Instruction] # Includes ops + supers: dict[str, parser.Super] # Includes macros + super_instrs: dict[str, SuperInstruction] + families: dict[str, parser.Family] + + def parse(self) -> None: + """Parse the source text.""" + psr = parser.Parser(self.src, filename=self.filename) + + # Skip until begin marker + while tkn := psr.next(raw=True): + if tkn.text == BEGIN_MARKER: + break + else: + raise psr.make_syntax_error( + f"Couldn't find {BEGIN_MARKER!r} in {psr.filename}" + ) + + # Parse until end marker + self.instrs = {} + self.supers = {} + self.families = {} + while (tkn := psr.peek(raw=True)) and tkn.text != END_MARKER: + if inst := psr.inst_def(): + self.instrs[inst.name] = instr = Instruction(inst) + elif super := psr.super_def(): + self.supers[super.name] = super + elif family := psr.family_def(): + self.families[family.name] = family + else: + raise psr.make_syntax_error(f"Unexpected token") + + print( + f"Read {len(self.instrs)} instructions, " + f"{len(self.supers)} supers/macros, " + f"and {len(self.families)} families from {self.filename}", + file=sys.stderr, + ) + + def analyze(self) -> None: + """Analyze the inputs. + + Raises SystemExit if there is an error. + """ + self.find_predictions() + self.map_families() + self.check_families() + self.analyze_supers() + + def find_predictions(self) -> None: + """Find the instructions that need PREDICTED() labels.""" + for instr in self.instrs.values(): + for target in re.findall(RE_PREDICTED, instr.block.text): + if target_instr := self.instrs.get(target): + target_instr.predicted = True + else: + self.error( + f"Unknown instruction {target!r} predicted in {instr.name!r}", + instr, # TODO: Use better location + ) + + def map_families(self) -> None: + """Make instruction names back to their family, if they have one.""" + for family in self.families.values(): + for member in family.members: + if member_instr := self.instrs.get(member): + member_instr.family = family + else: + self.error( + f"Unknown instruction {member!r} referenced in family {family.name!r}", + family, + ) + + def check_families(self) -> None: + """Check each family: + + - Must have at least 2 members + - All members must be known instructions + - All members must have the same cache, input and output effects + """ + for family in self.families.values(): + if len(family.members) < 2: + self.error(f"Family {family.name!r} has insufficient members", family) + members = [member for member in family.members if member in self.instrs] + if members != family.members: + unknown = set(family.members) - set(members) + self.error(f"Family {family.name!r} has unknown members: {unknown}", family) + if len(members) < 2: + continue + head = self.instrs[members[0]] + cache = head.cache_offset + input = len(head.input_effects) + output = len(head.output_effects) + for member in members[1:]: + instr = self.instrs[member] + c = instr.cache_offset + i = len(instr.input_effects) + o = len(instr.output_effects) + if (c, i, o) != (cache, input, output): + self.error( + f"Family {family.name!r} has inconsistent " + f"(cache, inputs, outputs) effects:\n" + f" {family.members[0]} = {(cache, input, output)}; " + f"{member} = {(c, i, o)}", + family, + ) + + def analyze_supers(self) -> None: + """Analyze each super instruction.""" + self.super_instrs = {} + for name, sup in self.supers.items(): + dup = SuperInstruction(sup) + dup.analyze(self) + self.super_instrs[name] = dup + + def write_instructions(self, filename: str) -> None: + """Write instructions to output file.""" + indent = " " * 8 + with open(filename, "w") as f: + # Write provenance header + f.write(f"// This file is generated by {os.path.relpath(__file__)}\n") + f.write(f"// from {os.path.relpath(self.filename)}\n") + f.write(f"// Do not edit!\n") + + # Write regular instructions + n_instrs = 0 + for name, instr in self.instrs.items(): + if instr.kind != "inst": + continue # ops are not real instructions + n_instrs += 1 + f.write(f"\n{indent}TARGET({name}) {{\n") + if instr.predicted: + f.write(f"{indent} PREDICTED({name});\n") + instr.write(f, indent) + if not always_exits(instr.block): + f.write(f"{indent} DISPATCH();\n") + f.write(f"{indent}}}\n") + + # Write super-instructions and macros + n_supers = 0 + n_macros = 0 + for sup in self.super_instrs.values(): + if sup.kind == "super": + n_supers += 1 + elif sup.kind == "macro": + n_macros += 1 + self.write_super_macro(f, sup, indent) + + print( + f"Wrote {n_instrs} instructions, {n_supers} supers, " + f"and {n_macros} macros to {filename}", + file=sys.stderr, + ) + + def write_super_macro( + self, f: typing.TextIO, sup: SuperInstruction, indent: str = "" + ) -> None: + + # TODO: Make write() and block() methods of some Formatter class + def write(arg: str) -> None: + if arg: + f.write(f"{indent}{arg}\n") + else: + f.write("\n") + + @contextlib.contextmanager + def block(head: str): + if head: + write(head + " {") + else: + write("{") + nonlocal indent + indent += " " + yield + indent = indent[:-4] + write("}") + + write("") + with block(f"TARGET({sup.name})"): + for i, var in enumerate(sup.stack): + if i < sup.initial_sp: + write(f"PyObject *{var} = PEEK({sup.initial_sp - i});") + else: + write(f"PyObject *{var};") + + for i, comp in enumerate(sup.parts): + if i > 0 and sup.kind == "super": + write("NEXTOPARG();") + write("next_instr++;") + + with block(""): + for var, ieffect in comp.input_mapping.items(): + write(f"PyObject *{ieffect.name} = {var};") + for oeffect in comp.output_mapping.values(): + write(f"PyObject *{oeffect.name};") + comp.instr.write_body(f, indent, dedent=-4) + for var, oeffect in comp.output_mapping.items(): + write(f"{var} = {oeffect.name};") + + if sup.final_sp > sup.initial_sp: + write(f"STACK_GROW({sup.final_sp - sup.initial_sp});") + elif sup.final_sp < sup.initial_sp: + write(f"STACK_SHRINK({sup.initial_sp - sup.final_sp});") + for i, var in enumerate(reversed(sup.stack[:sup.final_sp]), 1): + write(f"POKE({i}, {var});") + write("DISPATCH();") + + +def always_exits(block: parser.Block) -> bool: + """Determine whether a block always ends in a return/goto/etc.""" + text = block.text + lines = text.splitlines() + while lines and not lines[-1].strip(): + lines.pop() + if not lines or lines[-1].strip() != "}": + return False + lines.pop() + if not lines: + return False + line = lines.pop().rstrip() + # Indent must match exactly (TODO: Do something better) + if line[:12] != " " * 12: + return False + line = line[12:] + return line.startswith( + ("goto ", "return ", "DISPATCH", "GO_TO_", "Py_UNREACHABLE()") + ) + + +def main(): + """Parse command line, parse input, analyze, write output.""" + args = arg_parser.parse_args() # Prints message and sys.exit(2) on error + a = Analyzer(args.input) # Raises OSError if file not found + a.parse() # Raises SyntaxError on failure + a.analyze() # Prints messages and raises SystemExit on failure + if a.errors: + sys.exit(f"Found {a.errors} errors") + + a.write_instructions(args.output) # Raises OSError if file can't be written + + +if __name__ == "__main__": + main() diff --git a/Tools/cases_generator/lexer.py b/Tools/cases_generator/lexer.py new file mode 100644 index 00000000000000..980c920bf357f4 --- /dev/null +++ b/Tools/cases_generator/lexer.py @@ -0,0 +1,257 @@ +# Parser for C code +# Originally by Mark Shannon (mark@hotpy.org) +# https://gist.github.com/markshannon/db7ab649440b5af765451bb77c7dba34 + +import re +import sys +import collections +from dataclasses import dataclass + +def choice(*opts): + return "|".join("(%s)" % opt for opt in opts) + +# Regexes + +# Longer operators must go before shorter ones. + +PLUSPLUS = r'\+\+' +MINUSMINUS = r'--' + +# -> +ARROW = r'->' +ELLIPSIS = r'\.\.\.' + +# Assignment operators +TIMESEQUAL = r'\*=' +DIVEQUAL = r'/=' +MODEQUAL = r'%=' +PLUSEQUAL = r'\+=' +MINUSEQUAL = r'-=' +LSHIFTEQUAL = r'<<=' +RSHIFTEQUAL = r'>>=' +ANDEQUAL = r'&=' +OREQUAL = r'\|=' +XOREQUAL = r'\^=' + +# Operators +PLUS = r'\+' +MINUS = r'-' +TIMES = r'\*' +DIVIDE = r'/' +MOD = r'%' +NOT = r'~' +XOR = r'\^' +LOR = r'\|\|' +LAND = r'&&' +LSHIFT = r'<<' +RSHIFT = r'>>' +LE = r'<=' +GE = r'>=' +EQ = r'==' +NE = r'!=' +LT = r'<' +GT = r'>' +LNOT = r'!' +OR = r'\|' +AND = r'&' +EQUALS = r'=' + +# ? +CONDOP = r'\?' + +# Delimiters +LPAREN = r'\(' +RPAREN = r'\)' +LBRACKET = r'\[' +RBRACKET = r'\]' +LBRACE = r'\{' +RBRACE = r'\}' +COMMA = r',' +PERIOD = r'\.' +SEMI = r';' +COLON = r':' +BACKSLASH = r'\\' + +operators = { op: pattern for op, pattern in globals().items() if op == op.upper() } +for op in operators: + globals()[op] = op +opmap = { pattern.replace("\\", "") or '\\' : op for op, pattern in operators.items() } + +# Macros +macro = r'# *(ifdef|ifndef|undef|define|error|endif|if|else|include|#)' +MACRO = 'MACRO' + +id_re = r'[a-zA-Z_][0-9a-zA-Z_]*' +IDENTIFIER = 'IDENTIFIER' + +suffix = r'([uU]?[lL]?[lL]?)' +octal = r'0[0-7]+' + suffix +hex = r'0[xX][0-9a-fA-F]+' +decimal_digits = r'(0|[1-9][0-9]*)' +decimal = decimal_digits + suffix + + +exponent = r"""([eE][-+]?[0-9]+)""" +fraction = r"""([0-9]*\.[0-9]+)|([0-9]+\.)""" +float = '(((('+fraction+')'+exponent+'?)|([0-9]+'+exponent+'))[FfLl]?)' + +number_re = choice(octal, hex, float, decimal) +NUMBER = 'NUMBER' + +simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])""" +decimal_escape = r"""(\d+)""" +hex_escape = r"""(x[0-9a-fA-F]+)""" +escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))' +string_char = r"""([^"\\\n]|"""+escape_sequence+')' +str_re = '"'+string_char+'*"' +STRING = 'STRING' +char = r'\'.\'' # TODO: escape sequence +CHARACTER = 'CHARACTER' + +comment_re = r'//.*|/\*([^*]|\*[^/])*\*/' +COMMENT = 'COMMENT' + +newline = r"\n" +invalid = r"\S" # A single non-space character that's not caught by any of the other patterns +matcher = re.compile(choice(id_re, number_re, str_re, char, newline, macro, comment_re, *operators.values(), invalid)) +letter = re.compile(r'[a-zA-Z_]') + +kwds = ( + 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', + 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN', + 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG', + 'REGISTER', 'OFFSETOF', + 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', + 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID', + 'VOLATILE', 'WHILE' +) +for name in kwds: + globals()[name] = name +keywords = { name.lower() : name for name in kwds } + + +def make_syntax_error( + message: str, filename: str, line: int, column: int, line_text: str, +) -> SyntaxError: + return SyntaxError(message, (filename, line, column, line_text)) + + +@dataclass(slots=True) +class Token: + kind: str + text: str + begin: tuple[int, int] + end: tuple[int, int] + + @property + def line(self): + return self.begin[0] + + @property + def column(self): + return self.begin[1] + + @property + def end_line(self): + return self.end[0] + + @property + def end_column(self): + return self.end[1] + + @property + def width(self): + return self.end[1] - self.begin[1] + + def replaceText(self, txt): + assert isinstance(txt, str) + return Token(self.kind, txt, self.begin, self.end) + + def __repr__(self): + b0, b1 = self.begin + e0, e1 = self.end + if b0 == e0: + return f"{self.kind}({self.text!r}, {b0}:{b1}:{e1})" + else: + return f"{self.kind}({self.text!r}, {b0}:{b1}, {e0}:{e1})" + + +def tokenize(src, line=1, filename=None): + linestart = -1 + for m in matcher.finditer(src): + start, end = m.span() + text = m.group(0) + if text in keywords: + kind = keywords[text] + elif letter.match(text): + kind = IDENTIFIER + elif text == '...': + kind = ELLIPSIS + elif text == '.': + kind = PERIOD + elif text[0] in '0123456789.': + kind = NUMBER + elif text[0] == '"': + kind = STRING + elif text in opmap: + kind = opmap[text] + elif text == '\n': + linestart = start + line += 1 + kind = '\n' + elif text[0] == "'": + kind = CHARACTER + elif text[0] == '#': + kind = MACRO + elif text[0] == '/' and text[1] in '/*': + kind = COMMENT + else: + lineend = src.find("\n", start) + if lineend == -1: + lineend = len(src) + raise make_syntax_error(f"Bad token: {text}", + filename, line, start-linestart+1, src[linestart:lineend]) + if kind == COMMENT: + begin = line, start-linestart + newlines = text.count('\n') + if newlines: + linestart = start + text.rfind('\n') + line += newlines + else: + begin = line, start-linestart + if kind != "\n": + yield Token(kind, text, begin, (line, start-linestart+len(text))) + + +__all__ = [] +__all__.extend([kind for kind in globals() if kind.upper() == kind]) + + +def to_text(tkns: list[Token], dedent: int = 0) -> str: + res: list[str] = [] + line, col = -1, 1+dedent + for tkn in tkns: + if line == -1: + line, _ = tkn.begin + l, c = tkn.begin + #assert(l >= line), (line, txt, start, end) + while l > line: + line += 1 + res.append('\n') + col = 1+dedent + res.append(' '*(c-col)) + res.append(tkn.text) + line, col = tkn.end + return ''.join(res) + + +if __name__ == "__main__": + import sys + filename = sys.argv[1] + if filename == "-c": + src = sys.argv[2] + else: + src = open(filename).read() + # print(to_text(tokenize(src))) + for tkn in tokenize(src, filename=filename): + print(tkn) diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py new file mode 100644 index 00000000000000..ae5ef1e26ea1c2 --- /dev/null +++ b/Tools/cases_generator/parser.py @@ -0,0 +1,304 @@ +"""Parser for bytecodes.inst.""" + +from dataclasses import dataclass, field +from typing import NamedTuple, Callable, TypeVar, Literal + +import lexer as lx +from plexer import PLexer + + +P = TypeVar("P", bound="Parser") +N = TypeVar("N", bound="Node") +def contextual(func: Callable[[P], N|None]) -> Callable[[P], N|None]: + # Decorator to wrap grammar methods. + # Resets position if `func` returns None. + def contextual_wrapper(self: P) -> N|None: + begin = self.getpos() + res = func(self) + if res is None: + self.setpos(begin) + return + end = self.getpos() + res.context = Context(begin, end, self) + return res + return contextual_wrapper + + +class Context(NamedTuple): + begin: int + end: int + owner: PLexer + + def __repr__(self): + return f"<{self.begin}-{self.end}>" + + +@dataclass +class Node: + context: Context|None = field(init=False, default=None) + + @property + def text(self) -> str: + return self.to_text() + + def to_text(self, dedent: int = 0) -> str: + context = self.context + if not context: + return "" + tokens = context.owner.tokens + begin = context.begin + end = context.end + return lx.to_text(tokens[begin:end], dedent) + + +@dataclass +class Block(Node): + tokens: list[lx.Token] + + +@dataclass +class StackEffect(Node): + name: str + # TODO: type, condition + + +@dataclass +class CacheEffect(Node): + name: str + size: int + + +InputEffect = StackEffect | CacheEffect +OutputEffect = StackEffect + + +@dataclass +class InstHeader(Node): + kind: Literal["inst", "op"] + name: str + inputs: list[InputEffect] + outputs: list[OutputEffect] + + +@dataclass +class InstDef(Node): + # TODO: Merge InstHeader and InstDef + header: InstHeader + block: Block + + @property + def kind(self) -> str: + return self.header.kind + + @property + def name(self) -> str: + return self.header.name + + @property + def inputs(self) -> list[InputEffect]: + return self.header.inputs + + @property + def outputs(self) -> list[OutputEffect]: + return self.header.outputs + + +@dataclass +class Super(Node): + kind: Literal["macro", "super"] + name: str + ops: list[str] + + +@dataclass +class Family(Node): + name: str + size: str # Variable giving the cache size in code units + members: list[str] + + +class Parser(PLexer): + + @contextual + def inst_def(self) -> InstDef | None: + if header := self.inst_header(): + if block := self.block(): + return InstDef(header, block) + raise self.make_syntax_error("Expected block") + return None + + @contextual + def inst_header(self) -> InstHeader | None: + # inst(NAME) + # | inst(NAME, (inputs -- outputs)) + # | op(NAME, (inputs -- outputs)) + # TODO: Error out when there is something unexpected. + # TODO: Make INST a keyword in the lexer. + if (tkn := self.expect(lx.IDENTIFIER)) and (kind := tkn.text) in ("inst", "op"): + if (self.expect(lx.LPAREN) + and (tkn := self.expect(lx.IDENTIFIER))): + name = tkn.text + if self.expect(lx.COMMA): + inp, outp = self.stack_effect() + if self.expect(lx.RPAREN): + if ((tkn := self.peek()) + and tkn.kind == lx.LBRACE): + return InstHeader(kind, name, inp, outp) + elif self.expect(lx.RPAREN) and kind == "inst": + # No legacy stack effect if kind is "op". + return InstHeader(kind, name, [], []) + return None + + def stack_effect(self) -> tuple[list[InputEffect], list[OutputEffect]]: + # '(' [inputs] '--' [outputs] ')' + if self.expect(lx.LPAREN): + inputs = self.inputs() or [] + if self.expect(lx.MINUSMINUS): + outputs = self.outputs() or [] + if self.expect(lx.RPAREN): + return inputs, outputs + raise self.make_syntax_error("Expected stack effect") + + def inputs(self) -> list[InputEffect] | None: + # input (',' input)* + here = self.getpos() + if inp := self.input(): + near = self.getpos() + if self.expect(lx.COMMA): + if rest := self.inputs(): + return [inp] + rest + self.setpos(near) + return [inp] + self.setpos(here) + return None + + @contextual + def input(self) -> InputEffect | None: + # IDENTIFIER '/' INTEGER (CacheEffect) + # IDENTIFIER (StackEffect) + if (tkn := self.expect(lx.IDENTIFIER)): + if self.expect(lx.DIVIDE): + if num := self.expect(lx.NUMBER): + try: + size = int(num.text) + except ValueError: + raise self.make_syntax_error( + f"Expected integer, got {num.text!r}") + else: + return CacheEffect(tkn.text, size) + raise self.make_syntax_error("Expected integer") + else: + return StackEffect(tkn.text) + + def outputs(self) -> list[OutputEffect] | None: + # output (, output)* + here = self.getpos() + if outp := self.output(): + near = self.getpos() + if self.expect(lx.COMMA): + if rest := self.outputs(): + return [outp] + rest + self.setpos(near) + return [outp] + self.setpos(here) + return None + + @contextual + def output(self) -> OutputEffect | None: + if (tkn := self.expect(lx.IDENTIFIER)): + return StackEffect(tkn.text) + + @contextual + def super_def(self) -> Super | None: + if (tkn := self.expect(lx.IDENTIFIER)) and (kind := tkn.text) in ("super", "macro"): + if self.expect(lx.LPAREN): + if (tkn := self.expect(lx.IDENTIFIER)): + if self.expect(lx.RPAREN): + if self.expect(lx.EQUALS): + if ops := self.ops(): + res = Super(kind, tkn.text, ops) + return res + + def ops(self) -> list[str] | None: + if tkn := self.expect(lx.IDENTIFIER): + ops = [tkn.text] + while self.expect(lx.PLUS): + if tkn := self.require(lx.IDENTIFIER): + ops.append(tkn.text) + self.require(lx.SEMI) + return ops + + @contextual + def family_def(self) -> Family | None: + if (tkn := self.expect(lx.IDENTIFIER)) and tkn.text == "family": + size = None + if self.expect(lx.LPAREN): + if (tkn := self.expect(lx.IDENTIFIER)): + if self.expect(lx.COMMA): + if not (size := self.expect(lx.IDENTIFIER)): + raise self.make_syntax_error( + "Expected identifier") + if self.expect(lx.RPAREN): + if self.expect(lx.EQUALS): + if not self.expect(lx.LBRACE): + raise self.make_syntax_error("Expected {") + if members := self.members(): + if self.expect(lx.RBRACE) and self.expect(lx.SEMI): + return Family(tkn.text, size.text if size else "", members) + return None + + def members(self) -> list[str] | None: + here = self.getpos() + if tkn := self.expect(lx.IDENTIFIER): + members = [tkn.text] + while self.expect(lx.COMMA): + if tkn := self.expect(lx.IDENTIFIER): + members.append(tkn.text) + else: + break + peek = self.peek() + if not peek or peek.kind != lx.RBRACE: + raise self.make_syntax_error("Expected comma or right paren") + return members + self.setpos(here) + return None + + @contextual + def block(self) -> Block: + tokens = self.c_blob() + return Block(tokens) + + def c_blob(self) -> list[lx.Token]: + tokens: list[lx.Token] = [] + level = 0 + while tkn := self.next(raw=True): + if tkn.kind in (lx.LBRACE, lx.LPAREN, lx.LBRACKET): + level += 1 + elif tkn.kind in (lx.RBRACE, lx.RPAREN, lx.RBRACKET): + level -= 1 + if level <= 0: + break + tokens.append(tkn) + return tokens + + +if __name__ == "__main__": + import sys + if sys.argv[1:]: + filename = sys.argv[1] + if filename == "-c" and sys.argv[2:]: + src = sys.argv[2] + filename = "" + else: + with open(filename) as f: + src = f.read() + srclines = src.splitlines() + begin = srclines.index("// BEGIN BYTECODES //") + end = srclines.index("// END BYTECODES //") + src = "\n".join(srclines[begin+1 : end]) + else: + filename = "" + src = "if (x) { x.foo; // comment\n}" + parser = Parser(src, filename) + x = parser.inst_def() or parser.super_def() or parser.family_def() + print(x) diff --git a/Tools/cases_generator/plexer.py b/Tools/cases_generator/plexer.py new file mode 100644 index 00000000000000..a73254ed5b1daa --- /dev/null +++ b/Tools/cases_generator/plexer.py @@ -0,0 +1,105 @@ +import lexer as lx +Token = lx.Token + + +class PLexer: + def __init__(self, src: str, filename: str): + self.src = src + self.filename = filename + self.tokens = list(lx.tokenize(self.src, filename=filename)) + self.pos = 0 + + def getpos(self) -> int: + # Current position + return self.pos + + def eof(self) -> bool: + # Are we at EOF? + return self.pos >= len(self.tokens) + + def setpos(self, pos: int) -> None: + # Reset position + assert 0 <= pos <= len(self.tokens), (pos, len(self.tokens)) + self.pos = pos + + def backup(self) -> None: + # Back up position by 1 + assert self.pos > 0 + self.pos -= 1 + + def next(self, raw: bool = False) -> Token | None: + # Return next token and advance position; None if at EOF + # TODO: Return synthetic EOF token instead of None? + while self.pos < len(self.tokens): + tok = self.tokens[self.pos] + self.pos += 1 + if raw or tok.kind != "COMMENT": + return tok + return None + + def peek(self, raw: bool = False) -> Token | None: + # Return next token without advancing position + tok = self.next(raw=raw) + self.backup() + return tok + + def maybe(self, kind: str, raw: bool = False) -> Token | None: + # Return next token without advancing position if kind matches + tok = self.peek(raw=raw) + if tok and tok.kind == kind: + return tok + return None + + def expect(self, kind: str) -> Token | None: + # Return next token and advance position if kind matches + tkn = self.next() + if tkn is not None: + if tkn.kind == kind: + return tkn + self.backup() + return None + + def require(self, kind: str) -> Token: + # Return next token and advance position, requiring kind to match + tkn = self.next() + if tkn is not None and tkn.kind == kind: + return tkn + raise self.make_syntax_error(f"Expected {kind!r} but got {tkn and tkn.text!r}", tkn) + + def extract_line(self, lineno: int) -> str: + # Return source line `lineno` (1-based) + lines = self.src.splitlines() + if lineno > len(lines): + return "" + return lines[lineno - 1] + + def make_syntax_error(self, message: str, tkn: Token|None = None) -> SyntaxError: + # Construct a SyntaxError instance from message and token + if tkn is None: + tkn = self.peek() + if tkn is None: + tkn = self.tokens[-1] + return lx.make_syntax_error(message, + self.filename, tkn.line, tkn.column, self.extract_line(tkn.line)) + + +if __name__ == "__main__": + import sys + if sys.argv[1:]: + filename = sys.argv[1] + if filename == "-c" and sys.argv[2:]: + src = sys.argv[2] + filename = "" + else: + with open(filename) as f: + src = f.read() + else: + filename = "" + src = "if (x) { x.foo; // comment\n}" + p = PLexer(src, filename) + while not p.eof(): + tok = p.next(raw=True) + assert tok + left = repr(tok) + right = lx.to_text([tok]).rstrip() + print(f"{left:40.40} {right}") diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index 30a676328706f6..0ece814e8f1883 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -348,6 +348,12 @@ def __init__(self): # "goto exit" if there are any. self.return_conversion = [] + # The C statements required to do some operations + # after the end of parsing but before cleaning up. + # These operations may be, for example, memory deallocations which + # can only be done without any error happening during argument parsing. + self.post_parsing = [] + # The C statements required to clean up after the impl call. self.cleanup = [] @@ -495,7 +501,8 @@ def permute_optional_groups(left, required, right): result = [] if not required: - assert not left + if left: + raise ValueError("required is empty but left is not") accumulator = [] counts = set() @@ -712,7 +719,7 @@ def output_templates(self, f): vararg = NO_VARARG pos_only = min_pos = max_pos = min_kw_only = pseudo_args = 0 for i, p in enumerate(parameters, 1): - if p.is_keyword_only() or vararg != NO_VARARG: + if p.is_keyword_only(): assert not p.is_positional_only() if not p.is_optional(): min_kw_only = i - max_pos @@ -819,6 +826,7 @@ def parser_body(prototype, *fields, declarations=''): {modifications} {return_value} = {c_basename}_impl({impl_arguments}); {return_conversion} + {post_parsing} {exit_label} {cleanup} @@ -955,7 +963,7 @@ def parser_body(prototype, *fields, declarations=''): parser_code.append(normalize_snippet(""" %s = PyTuple_New(%s); for (Py_ssize_t i = 0; i < %s; ++i) {{ - PyTuple_SET_ITEM(%s, i, args[%d + i]); + PyTuple_SET_ITEM(%s, i, Py_NewRef(args[%d + i])); }} """ % ( p.converter.parser_name, @@ -1008,13 +1016,14 @@ def parser_body(prototype, *fields, declarations=''): parser_definition = parser_body(parser_prototype, *parser_code) else: - has_optional_kw = (max(pos_only, min_pos) + min_kw_only < len(converters)) + has_optional_kw = (max(pos_only, min_pos) + min_kw_only < len(converters) - int(vararg != NO_VARARG)) if vararg == NO_VARARG: args_declaration = "_PyArg_UnpackKeywords", "%s, %s, %s" % ( min_pos, max_pos, min_kw_only ) + nargs = "nargs" else: args_declaration = "_PyArg_UnpackKeywordsWithVararg", "%s, %s, %s, %s" % ( min_pos, @@ -1022,6 +1031,7 @@ def parser_body(prototype, *fields, declarations=''): min_kw_only, vararg ) + nargs = f"Py_MIN(nargs, {max_pos})" if max_pos else "0" if not new_or_init: flags = "METH_FASTCALL|METH_KEYWORDS" parser_prototype = parser_prototype_fastcall_keywords @@ -1029,8 +1039,7 @@ def parser_body(prototype, *fields, declarations=''): declarations = declare_parser(f) declarations += "\nPyObject *argsbuf[%s];" % len(converters) if has_optional_kw: - pre_buffer = "0" if vararg != NO_VARARG else "nargs" - declarations += "\nPy_ssize_t noptargs = %s + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - %d;" % (pre_buffer, min_pos + min_kw_only) + declarations += "\nPy_ssize_t noptargs = %s + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - %d;" % (nargs, min_pos + min_kw_only) parser_code = [normalize_snippet(""" args = %s(args, nargs, NULL, kwnames, &_parser, %s, argsbuf); if (!args) {{ @@ -1047,7 +1056,7 @@ def parser_body(prototype, *fields, declarations=''): declarations += "\nPyObject * const *fastargs;" declarations += "\nPy_ssize_t nargs = PyTuple_GET_SIZE(args);" if has_optional_kw: - declarations += "\nPy_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - %d;" % (min_pos + min_kw_only) + declarations += "\nPy_ssize_t noptargs = %s + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - %d;" % (nargs, min_pos + min_kw_only) parser_code = [normalize_snippet(""" fastargs = %s(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, %s, argsbuf); if (!fastargs) {{ @@ -1459,6 +1468,7 @@ def render_function(self, clinic, f): template_dict['impl_parameters'] = ", ".join(data.impl_parameters) template_dict['impl_arguments'] = ", ".join(data.impl_arguments) template_dict['return_conversion'] = format_escape("".join(data.return_conversion).rstrip()) + template_dict['post_parsing'] = format_escape("".join(data.post_parsing).rstrip()) template_dict['cleanup'] = format_escape("".join(data.cleanup)) template_dict['return_value'] = data.return_value @@ -1483,6 +1493,7 @@ def render_function(self, clinic, f): return_conversion=template_dict['return_conversion'], initializers=template_dict['initializers'], modifications=template_dict['modifications'], + post_parsing=template_dict['post_parsing'], cleanup=template_dict['cleanup'], ) @@ -2724,6 +2735,10 @@ def _render_non_self(self, parameter, data): # parse_arguments self.parse_argument(data.parse_arguments) + # post_parsing + if post_parsing := self.post_parsing(): + data.post_parsing.append('/* Post parse cleanup for ' + name + ' */\n' + post_parsing.rstrip() + '\n') + # cleanup cleanup = self.cleanup() if cleanup: @@ -2819,6 +2834,14 @@ def modify(self): """ return "" + def post_parsing(self): + """ + The C statements required to do some operations after the end of parsing but before cleaning up. + Return a string containing this code indented at column 0. + If no operation is necessary, return an empty string. + """ + return "" + def cleanup(self): """ The C statements required to clean up after this variable. @@ -3415,10 +3438,10 @@ def converter_init(self, *, accept={str}, encoding=None, zeroes=False): if NoneType in accept and self.c_default == "Py_None": self.c_default = "NULL" - def cleanup(self): + def post_parsing(self): if self.encoding: name = self.name - return "".join(["if (", name, ") {\n PyMem_FREE(", name, ");\n}\n"]) + return f"PyMem_FREE({name});\n" def parse_arg(self, argname, displayname): if self.format_unit == 's': diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py index 303409cb0077d1..c003c1ab4a23bd 100755 --- a/Tools/gdb/libpython.py +++ b/Tools/gdb/libpython.py @@ -82,6 +82,8 @@ def _sizeof_void_p(): Py_TPFLAGS_BASE_EXC_SUBCLASS = (1 << 30) Py_TPFLAGS_TYPE_SUBCLASS = (1 << 31) +#From pycore_frame.h +FRAME_OWNED_BY_CSTACK = 3 MAX_OUTPUT_LEN=1024 @@ -1077,8 +1079,8 @@ def _f_lasti(self): first_instr = self._f_code().field("co_code_adaptive").cast(codeunit_p) return int(prev_instr - first_instr) - def is_entry(self): - return self._f_special("is_entry", bool) + def is_shim(self): + return self._f_special("owner", int) == FRAME_OWNED_BY_CSTACK def previous(self): return self._f_special("previous", PyFramePtr) @@ -1821,14 +1823,14 @@ def print_summary(self): interp_frame = self.get_pyop() while True: if interp_frame: + if interp_frame.is_shim(): + break line = interp_frame.get_truncated_repr(MAX_OUTPUT_LEN) sys.stdout.write('#%i %s\n' % (self.get_index(), line)) if not interp_frame.is_optimized_out(): line = interp_frame.current_line() if line is not None: sys.stdout.write(' %s\n' % line.strip()) - if interp_frame.is_entry(): - break else: sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index()) break @@ -1845,13 +1847,13 @@ def print_traceback(self): interp_frame = self.get_pyop() while True: if interp_frame: + if interp_frame.is_shim(): + break interp_frame.print_traceback() if not interp_frame.is_optimized_out(): line = interp_frame.current_line() if line is not None: sys.stdout.write(' %s\n' % line.strip()) - if interp_frame.is_entry(): - break else: sys.stdout.write(' (unable to read python frame information)\n') break @@ -2106,6 +2108,8 @@ def invoke(self, args, from_tty): while True: if not pyop_frame: print(UNABLE_READ_INFO_PYTHON_FRAME) + if pyop_frame.is_shim(): + break sys.stdout.write('Locals for %s\n' % (pyop_frame.co_name.proxyval(set()))) @@ -2114,8 +2118,6 @@ def invoke(self, args, from_tty): % (pyop_name.proxyval(set()), pyop_value.get_truncated_repr(MAX_OUTPUT_LEN))) - if pyop_frame.is_entry(): - break pyop_frame = pyop_frame.previous() diff --git a/Tools/i18n/pygettext.py b/Tools/i18n/pygettext.py index 6f889adffe6c7a..7ada79105db1ca 100755 --- a/Tools/i18n/pygettext.py +++ b/Tools/i18n/pygettext.py @@ -335,9 +335,10 @@ def __waiting(self, ttype, tstring, lineno): if ttype == tokenize.STRING and is_literal_string(tstring): self.__addentry(safe_eval(tstring), lineno, isdocstring=1) self.__freshmodule = 0 - elif ttype not in (tokenize.COMMENT, tokenize.NL): - self.__freshmodule = 0 - return + return + if ttype in (tokenize.COMMENT, tokenize.NL, tokenize.ENCODING): + return + self.__freshmodule = 0 # class or func/method docstring? if ttype == tokenize.NAME and tstring in ('class', 'def'): self.__state = self.__suiteseen diff --git a/Tools/msi/test/test_files.wxs b/Tools/msi/test/test_files.wxs index 9127ce894819e5..b5f68faef30e02 100644 --- a/Tools/msi/test/test_files.wxs +++ b/Tools/msi/test/test_files.wxs @@ -1,6 +1,6 @@ - + diff --git a/Tools/scripts/patchcheck.py b/Tools/patchcheck/patchcheck.py similarity index 100% rename from Tools/scripts/patchcheck.py rename to Tools/patchcheck/patchcheck.py diff --git a/Tools/scripts/reindent.py b/Tools/patchcheck/reindent.py similarity index 100% rename from Tools/scripts/reindent.py rename to Tools/patchcheck/reindent.py diff --git a/Tools/scripts/untabify.py b/Tools/patchcheck/untabify.py similarity index 100% rename from Tools/scripts/untabify.py rename to Tools/patchcheck/untabify.py diff --git a/Tools/peg_generator/peg_extension/peg_extension.c b/Tools/peg_generator/peg_extension/peg_extension.c index 3ebb7bdd9b38c9..7df134b5ade8bb 100644 --- a/Tools/peg_generator/peg_extension/peg_extension.c +++ b/Tools/peg_generator/peg_extension/peg_extension.c @@ -12,8 +12,7 @@ _build_return_object(mod_ty module, int mode, PyObject *filename_ob, PyArena *ar } else if (mode == 1) { result = PyAST_mod2obj(module); } else { - result = Py_None; - Py_INCREF(result); + result = Py_NewRef(Py_None); } return result; diff --git a/Tools/scripts/README b/Tools/scripts/README index b53b0f21d7c293..b9522681595901 100644 --- a/Tools/scripts/README +++ b/Tools/scripts/README @@ -2,23 +2,9 @@ This directory contains a collection of executable Python scripts that are useful while building, extending or managing Python. 2to3 Main script for running the 2to3 conversion tool -abitype.py Converts a C file to use the PEP 384 type definition API combinerefs.py A helper for analyzing PYTHONDUMPREFS output -diff.py Print file diffs in context, unified, or ndiff formats -eptags.py Create Emacs TAGS file for Python modules -gprof2html.py Transform gprof(1) output into useful HTML idle3 Main program to start IDLE -md5sum.py Print MD5 checksums of argument files -ndiff.py Intelligent diff between text files (Tim Peters) -nm2def.py Create a template for PC/python_nt.def (Marc Lemburg) -parseentities.py Utility for parsing HTML entity definitions parse_html5_entities.py Utility for parsing HTML5 entity definitions -patchcheck.py Perform common checks and cleanup before committing -pathfix.py Change #!/usr/local/bin/python into something else -ptags.py Create vi tags file for Python modules pydoc3 Python documentation browser -reindent.py Change .py files to use 4-space indents run_tests.py Run the test suite with more sensible default options stable_abi.py Stable ABI checks and file generators. -untabify.py Replace tabs with spaces in argument files -win_add2path.py Add Python to the search path on Windows diff --git a/Tools/scripts/abitype.py b/Tools/scripts/abitype.py deleted file mode 100755 index d6a74a1fe64117..00000000000000 --- a/Tools/scripts/abitype.py +++ /dev/null @@ -1,202 +0,0 @@ -#!/usr/bin/env python3 -# This script converts a C file to use the PEP 384 type definition API -# Usage: abitype.py < old_code > new_code -import re, sys - -###### Replacement of PyTypeObject static instances ############## - -# classify each token, giving it a one-letter code: -# S: static -# T: PyTypeObject -# I: ident -# W: whitespace -# =, {, }, ; : themselves -def classify(): - res = [] - for t,v in tokens: - if t == 'other' and v in "={};": - res.append(v) - elif t == 'ident': - if v == 'PyTypeObject': - res.append('T') - elif v == 'static': - res.append('S') - else: - res.append('I') - elif t == 'ws': - res.append('W') - else: - res.append('.') - return ''.join(res) - -# Obtain a list of fields of a PyTypeObject, in declaration order, -# skipping ob_base -# All comments are dropped from the variable (which are typically -# just the slot names, anyway), and information is discarded whether -# the original type was static. -def get_fields(start, real_end): - pos = start - # static? - if tokens[pos][1] == 'static': - pos += 2 - # PyTypeObject - pos += 2 - # name - name = tokens[pos][1] - pos += 1 - while tokens[pos][1] != '{': - pos += 1 - pos += 1 - # PyVarObject_HEAD_INIT - while tokens[pos][0] in ('ws', 'comment'): - pos += 1 - if tokens[pos][1] != 'PyVarObject_HEAD_INIT': - raise Exception('%s has no PyVarObject_HEAD_INIT' % name) - while tokens[pos][1] != ')': - pos += 1 - pos += 1 - # field definitions: various tokens, comma-separated - fields = [] - while True: - while tokens[pos][0] in ('ws', 'comment'): - pos += 1 - end = pos - while tokens[end][1] not in ',}': - if tokens[end][1] == '(': - nesting = 1 - while nesting: - end += 1 - if tokens[end][1] == '(': nesting+=1 - if tokens[end][1] == ')': nesting-=1 - end += 1 - assert end < real_end - # join field, excluding separator and trailing ws - end1 = end-1 - while tokens[end1][0] in ('ws', 'comment'): - end1 -= 1 - fields.append(''.join(t[1] for t in tokens[pos:end1+1])) - if tokens[end][1] == '}': - break - pos = end+1 - return name, fields - -# List of type slots as of Python 3.2, omitting ob_base -typeslots = [ - 'tp_name', - 'tp_basicsize', - 'tp_itemsize', - 'tp_dealloc', - 'tp_print', - 'tp_getattr', - 'tp_setattr', - 'tp_reserved', - 'tp_repr', - 'tp_as_number', - 'tp_as_sequence', - 'tp_as_mapping', - 'tp_hash', - 'tp_call', - 'tp_str', - 'tp_getattro', - 'tp_setattro', - 'tp_as_buffer', - 'tp_flags', - 'tp_doc', - 'tp_traverse', - 'tp_clear', - 'tp_richcompare', - 'tp_weaklistoffset', - 'tp_iter', - 'iternextfunc', - 'tp_methods', - 'tp_members', - 'tp_getset', - 'tp_base', - 'tp_dict', - 'tp_descr_get', - 'tp_descr_set', - 'tp_dictoffset', - 'tp_init', - 'tp_alloc', - 'tp_new', - 'tp_free', - 'tp_is_gc', - 'tp_bases', - 'tp_mro', - 'tp_cache', - 'tp_subclasses', - 'tp_weaklist', - 'tp_del', - 'tp_version_tag', -] - -# Generate a PyType_Spec definition -def make_slots(name, fields): - res = [] - res.append('static PyType_Slot %s_slots[] = {' % name) - # defaults for spec - spec = { 'tp_itemsize':'0' } - for i, val in enumerate(fields): - if val.endswith('0'): - continue - if typeslots[i] in ('tp_name', 'tp_doc', 'tp_basicsize', - 'tp_itemsize', 'tp_flags'): - spec[typeslots[i]] = val - continue - res.append(' {Py_%s, %s},' % (typeslots[i], val)) - res.append('};') - res.append('static PyType_Spec %s_spec = {' % name) - res.append(' %s,' % spec['tp_name']) - res.append(' %s,' % spec['tp_basicsize']) - res.append(' %s,' % spec['tp_itemsize']) - res.append(' %s,' % spec['tp_flags']) - res.append(' %s_slots,' % name) - res.append('};\n') - return '\n'.join(res) - - -if __name__ == '__main__': - - ############ Simplistic C scanner ################################## - tokenizer = re.compile( - r"(?P#.*\n)" - r"|(?P/\*.*?\*/)" - r"|(?P[a-zA-Z_][a-zA-Z0-9_]*)" - r"|(?P[ \t\n]+)" - r"|(?P.)", - re.MULTILINE) - - tokens = [] - source = sys.stdin.read() - pos = 0 - while pos != len(source): - m = tokenizer.match(source, pos) - tokens.append([m.lastgroup, m.group()]) - pos += len(tokens[-1][1]) - if tokens[-1][0] == 'preproc': - # continuation lines are considered - # only in preprocess statements - while tokens[-1][1].endswith('\\\n'): - nl = source.find('\n', pos) - if nl == -1: - line = source[pos:] - else: - line = source[pos:nl+1] - tokens[-1][1] += line - pos += len(line) - - # Main loop: replace all static PyTypeObjects until - # there are none left. - while 1: - c = classify() - m = re.search('(SW)?TWIW?=W?{.*?};', c) - if not m: - break - start = m.start() - end = m.end() - name, fields = get_fields(start, end) - tokens[start:end] = [('',make_slots(name, fields))] - - # Output result to stdout - for t, v in tokens: - sys.stdout.write(v) diff --git a/Tools/scripts/divmod_threshold.py b/Tools/scripts/divmod_threshold.py new file mode 100644 index 00000000000000..69819fe3098120 --- /dev/null +++ b/Tools/scripts/divmod_threshold.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +# +# Determine threshold for switching from longobject.c divmod to +# _pylong.int_divmod(). + +from random import randrange +from time import perf_counter as now +from _pylong import int_divmod as divmod_fast + +BITS_PER_DIGIT = 30 + + +def rand_digits(n): + top = 1 << (n * BITS_PER_DIGIT) + return randrange(top >> 1, top) + + +def probe_den(nd): + den = rand_digits(nd) + count = 0 + for nn in range(nd, nd + 3000): + num = rand_digits(nn) + t0 = now() + e1, e2 = divmod(num, den) + t1 = now() + f1, f2 = divmod_fast(num, den) + t2 = now() + s1 = t1 - t0 + s2 = t2 - t1 + assert e1 == f1 + assert e2 == f2 + if s2 < s1: + count += 1 + if count >= 3: + print( + "for", + nd, + "denom digits,", + nn - nd, + "extra num digits is enough", + ) + break + else: + count = 0 + else: + print("for", nd, "denom digits, no num seems big enough") + + +def main(): + for nd in range(30): + nd = (nd + 1) * 100 + probe_den(nd) + + +if __name__ == '__main__': + main() diff --git a/Tools/scripts/eptags.py b/Tools/scripts/eptags.py deleted file mode 100755 index 7f8059ba71adf3..00000000000000 --- a/Tools/scripts/eptags.py +++ /dev/null @@ -1,57 +0,0 @@ -#! /usr/bin/env python3 -"""Create a TAGS file for Python programs, usable with GNU Emacs. - -usage: eptags pyfiles... - -The output TAGS file is usable with Emacs version 18, 19, 20. -Tagged are: - - functions (even inside other defs or classes) - - classes - -eptags warns about files it cannot open. -eptags will not give warnings about duplicate tags. - -BUGS: - Because of tag duplication (methods with the same name in different - classes), TAGS files are not very useful for most object-oriented - python projects. -""" -import sys,re - -expr = r'^[ \t]*(def|class)[ \t]+([a-zA-Z_][a-zA-Z0-9_]*)[ \t]*[:\(]' -matcher = re.compile(expr) - -def treat_file(filename, outfp): - """Append tags found in file named 'filename' to the open file 'outfp'""" - try: - fp = open(filename, 'r') - except OSError: - sys.stderr.write('Cannot open %s\n'%filename) - return - with fp: - charno = 0 - lineno = 0 - tags = [] - size = 0 - while 1: - line = fp.readline() - if not line: - break - lineno = lineno + 1 - m = matcher.search(line) - if m: - tag = m.group(0) + '\177%d,%d\n' % (lineno, charno) - tags.append(tag) - size = size + len(tag) - charno = charno + len(line) - outfp.write('\f\n%s,%d\n' % (filename,size)) - for tag in tags: - outfp.write(tag) - -def main(): - with open('TAGS', 'w') as outfp: - for filename in sys.argv[1:]: - treat_file(filename, outfp) - -if __name__=="__main__": - main() diff --git a/Tools/scripts/gprof2html.py b/Tools/scripts/gprof2html.py deleted file mode 100755 index bf0530ef3e4337..00000000000000 --- a/Tools/scripts/gprof2html.py +++ /dev/null @@ -1,87 +0,0 @@ -#! /usr/bin/env python3 - -"""Transform gprof(1) output into useful HTML.""" - -import html -import os -import re -import sys -import webbrowser - -header = """\ - - - gprof output (%s) - - -
-"""
-
-trailer = """\
-
- - -""" - -def add_escapes(filename): - with open(filename, encoding="utf-8") as fp: - for line in fp: - yield html.escape(line) - -def gprof2html(input, output, filename): - output.write(header % filename) - for line in input: - output.write(line) - if line.startswith(" time"): - break - labels = {} - for line in input: - m = re.match(r"(.* )(\w+)\n", line) - if not m: - output.write(line) - break - stuff, fname = m.group(1, 2) - labels[fname] = fname - output.write('%s%s\n' % - (stuff, fname, fname, fname)) - for line in input: - output.write(line) - if line.startswith("index % time"): - break - for line in input: - m = re.match(r"(.* )(\w+)(( <cycle.*>)? \[\d+\])\n", line) - if not m: - output.write(line) - if line.startswith("Index by function name"): - break - continue - prefix, fname, suffix = m.group(1, 2, 3) - if fname not in labels: - output.write(line) - continue - if line.startswith("["): - output.write('%s%s%s\n' % - (prefix, fname, fname, fname, suffix)) - else: - output.write('%s%s%s\n' % - (prefix, fname, fname, suffix)) - for line in input: - for part in re.findall(r"(\w+(?:\.c)?|\W+)", line): - if part in labels: - part = '%s' % (part, part) - output.write(part) - output.write(trailer) - - -def main(): - filename = "gprof.out" - if sys.argv[1:]: - filename = sys.argv[1] - outputfilename = filename + ".html" - input = add_escapes(filename) - with open(outputfilename, "w", encoding="utf-8") as output: - gprof2html(input, output, filename) - webbrowser.open("file:" + os.path.abspath(outputfilename)) - -if __name__ == '__main__': - main() diff --git a/Tools/scripts/md5sum.py b/Tools/scripts/md5sum.py deleted file mode 100755 index f910576377aa3d..00000000000000 --- a/Tools/scripts/md5sum.py +++ /dev/null @@ -1,93 +0,0 @@ -#! /usr/bin/env python3 - -"""Python utility to print MD5 checksums of argument files. -""" - - -bufsize = 8096 -fnfilter = None -rmode = 'rb' - -usage = """ -usage: md5sum.py [-b] [-t] [-l] [-s bufsize] [file ...] --b : read files in binary mode (default) --t : read files in text mode (you almost certainly don't want this!) --l : print last pathname component only --s bufsize: read buffer size (default %d) -file ... : files to sum; '-' or no files means stdin -""" % bufsize - -import io -import sys -import os -import getopt -from hashlib import md5 - -def sum(*files): - sts = 0 - if files and isinstance(files[-1], io.IOBase): - out, files = files[-1], files[:-1] - else: - out = sys.stdout - if len(files) == 1 and not isinstance(files[0], str): - files = files[0] - for f in files: - if isinstance(f, str): - if f == '-': - sts = printsumfp(sys.stdin, '', out) or sts - else: - sts = printsum(f, out) or sts - else: - sts = sum(f, out) or sts - return sts - -def printsum(filename, out=sys.stdout): - try: - fp = open(filename, rmode) - except IOError as msg: - sys.stderr.write('%s: Can\'t open: %s\n' % (filename, msg)) - return 1 - with fp: - if fnfilter: - filename = fnfilter(filename) - sts = printsumfp(fp, filename, out) - return sts - -def printsumfp(fp, filename, out=sys.stdout): - m = md5() - try: - while 1: - data = fp.read(bufsize) - if not data: - break - if isinstance(data, str): - data = data.encode(fp.encoding) - m.update(data) - except IOError as msg: - sys.stderr.write('%s: I/O error: %s\n' % (filename, msg)) - return 1 - out.write('%s %s\n' % (m.hexdigest(), filename)) - return 0 - -def main(args = sys.argv[1:], out=sys.stdout): - global fnfilter, rmode, bufsize - try: - opts, args = getopt.getopt(args, 'blts:') - except getopt.error as msg: - sys.stderr.write('%s: %s\n%s' % (sys.argv[0], msg, usage)) - return 2 - for o, a in opts: - if o == '-l': - fnfilter = os.path.basename - elif o == '-b': - rmode = 'rb' - elif o == '-t': - rmode = 'r' - elif o == '-s': - bufsize = int(a) - if not args: - args = ['-'] - return sum(args, out) - -if __name__ == '__main__' or __name__ == sys.argv[0]: - sys.exit(main(sys.argv[1:], sys.stdout)) diff --git a/Tools/scripts/nm2def.py b/Tools/scripts/nm2def.py deleted file mode 100755 index a885ebd6fecc2f..00000000000000 --- a/Tools/scripts/nm2def.py +++ /dev/null @@ -1,104 +0,0 @@ -#! /usr/bin/env python3 -"""nm2def.py - -Helpers to extract symbols from Unix libs and auto-generate -Windows definition files from them. Depends on nm(1). Tested -on Linux and Solaris only (-p option to nm is for Solaris only). - -By Marc-Andre Lemburg, Aug 1998. - -Additional notes: the output of nm is supposed to look like this: - -acceler.o: -000001fd T PyGrammar_AddAccelerators - U PyGrammar_FindDFA -00000237 T PyGrammar_RemoveAccelerators - U _IO_stderr_ - U exit - U fprintf - U free - U malloc - U printf - -grammar1.o: -00000000 T PyGrammar_FindDFA -00000034 T PyGrammar_LabelRepr - U _PyParser_TokenNames - U abort - U printf - U sprintf - -... - -Even if this isn't the default output of your nm, there is generally an -option to produce this format (since it is the original v7 Unix format). - -""" -import os, sys - -PYTHONLIB = 'libpython%d.%d.a' % sys.version_info[:2] -PC_PYTHONLIB = 'Python%d%d.dll' % sys.version_info[:2] -NM = 'nm -p -g %s' # For Linux, use "nm -g %s" - -def symbols(lib=PYTHONLIB,types=('T','C','D')): - - with os.popen(NM % lib) as pipe: - lines = pipe.readlines() - lines = [s.strip() for s in lines] - symbols = {} - for line in lines: - if len(line) == 0 or ':' in line: - continue - items = line.split() - if len(items) != 3: - continue - address, type, name = items - if type not in types: - continue - symbols[name] = address,type - return symbols - -def export_list(symbols): - - data = [] - code = [] - for name,(addr,type) in symbols.items(): - if type in ('C','D'): - data.append('\t'+name) - else: - code.append('\t'+name) - data.sort() - data.append('') - code.sort() - return ' DATA\n'.join(data)+'\n'+'\n'.join(code) - -# Definition file template -DEF_TEMPLATE = """\ -EXPORTS -%s -""" - -# Special symbols that have to be included even though they don't -# pass the filter -SPECIALS = ( - ) - -def filter_Python(symbols,specials=SPECIALS): - - for name in list(symbols.keys()): - if name[:2] == 'Py' or name[:3] == '_Py': - pass - elif name not in specials: - del symbols[name] - -def main(): - - s = symbols(PYTHONLIB) - filter_Python(s) - exports = export_list(s) - f = sys.stdout # open('PC/python_nt.def','w') - f.write(DEF_TEMPLATE % (exports)) - # f.close() - -if __name__ == '__main__': - main() diff --git a/Tools/scripts/pathfix.py b/Tools/scripts/pathfix.py deleted file mode 100755 index f957b11547179d..00000000000000 --- a/Tools/scripts/pathfix.py +++ /dev/null @@ -1,225 +0,0 @@ -#!/usr/bin/env python3 - -# Change the #! line (shebang) occurring in Python scripts. The new interpreter -# pathname must be given with a -i option. -# -# Command line arguments are files or directories to be processed. -# Directories are searched recursively for files whose name looks -# like a python module. -# Symbolic links are always ignored (except as explicit directory -# arguments). -# The original file is kept as a back-up (with a "~" attached to its name), -# -n flag can be used to disable this. - -# Sometimes you may find shebangs with flags such as `#! /usr/bin/env python -si`. -# Normally, pathfix overwrites the entire line, including the flags. -# To change interpreter and keep flags from the original shebang line, use -k. -# If you want to keep flags and add to them one single literal flag, use option -a. - - -# Undoubtedly you can do this using find and sed or perl, but this is -# a nice example of Python code that recurses down a directory tree -# and uses regular expressions. Also note several subtleties like -# preserving the file's mode and avoiding to even write a temp file -# when no changes are needed for a file. -# -# NB: by changing only the function fixfile() you can turn this -# into a program for a different change to Python programs... - -import sys -import os -from stat import * -import getopt - -err = sys.stderr.write -dbg = err -rep = sys.stdout.write - -new_interpreter = None -preserve_timestamps = False -create_backup = True -keep_flags = False -add_flags = b'' - - -def main(): - global new_interpreter - global preserve_timestamps - global create_backup - global keep_flags - global add_flags - - usage = ('usage: %s -i /interpreter -p -n -k -a file-or-directory ...\n' % - sys.argv[0]) - try: - opts, args = getopt.getopt(sys.argv[1:], 'i:a:kpn') - except getopt.error as msg: - err(str(msg) + '\n') - err(usage) - sys.exit(2) - for o, a in opts: - if o == '-i': - new_interpreter = a.encode() - if o == '-p': - preserve_timestamps = True - if o == '-n': - create_backup = False - if o == '-k': - keep_flags = True - if o == '-a': - add_flags = a.encode() - if b' ' in add_flags: - err("-a option doesn't support whitespaces") - sys.exit(2) - if not new_interpreter or not new_interpreter.startswith(b'/') or \ - not args: - err('-i option or file-or-directory missing\n') - err(usage) - sys.exit(2) - bad = 0 - for arg in args: - if os.path.isdir(arg): - if recursedown(arg): bad = 1 - elif os.path.islink(arg): - err(arg + ': will not process symbolic links\n') - bad = 1 - else: - if fix(arg): bad = 1 - sys.exit(bad) - - -def ispython(name): - return name.endswith('.py') - - -def recursedown(dirname): - dbg('recursedown(%r)\n' % (dirname,)) - bad = 0 - try: - names = os.listdir(dirname) - except OSError as msg: - err('%s: cannot list directory: %r\n' % (dirname, msg)) - return 1 - names.sort() - subdirs = [] - for name in names: - if name in (os.curdir, os.pardir): continue - fullname = os.path.join(dirname, name) - if os.path.islink(fullname): pass - elif os.path.isdir(fullname): - subdirs.append(fullname) - elif ispython(name): - if fix(fullname): bad = 1 - for fullname in subdirs: - if recursedown(fullname): bad = 1 - return bad - - -def fix(filename): -## dbg('fix(%r)\n' % (filename,)) - try: - f = open(filename, 'rb') - except IOError as msg: - err('%s: cannot open: %r\n' % (filename, msg)) - return 1 - with f: - line = f.readline() - fixed = fixline(line) - if line == fixed: - rep(filename+': no change\n') - return - head, tail = os.path.split(filename) - tempname = os.path.join(head, '@' + tail) - try: - g = open(tempname, 'wb') - except IOError as msg: - err('%s: cannot create: %r\n' % (tempname, msg)) - return 1 - with g: - rep(filename + ': updating\n') - g.write(fixed) - BUFSIZE = 8*1024 - while 1: - buf = f.read(BUFSIZE) - if not buf: break - g.write(buf) - - # Finishing touch -- move files - - mtime = None - atime = None - # First copy the file's mode to the temp file - try: - statbuf = os.stat(filename) - mtime = statbuf.st_mtime - atime = statbuf.st_atime - os.chmod(tempname, statbuf[ST_MODE] & 0o7777) - except OSError as msg: - err('%s: warning: chmod failed (%r)\n' % (tempname, msg)) - # Then make a backup of the original file as filename~ - if create_backup: - try: - os.rename(filename, filename + '~') - except OSError as msg: - err('%s: warning: backup failed (%r)\n' % (filename, msg)) - else: - try: - os.remove(filename) - except OSError as msg: - err('%s: warning: removing failed (%r)\n' % (filename, msg)) - # Now move the temp file to the original file - try: - os.rename(tempname, filename) - except OSError as msg: - err('%s: rename failed (%r)\n' % (filename, msg)) - return 1 - if preserve_timestamps: - if atime and mtime: - try: - os.utime(filename, (atime, mtime)) - except OSError as msg: - err('%s: reset of timestamp failed (%r)\n' % (filename, msg)) - return 1 - # Return success - return 0 - - -def parse_shebang(shebangline): - shebangline = shebangline.rstrip(b'\n') - start = shebangline.find(b' -') - if start == -1: - return b'' - return shebangline[start:] - - -def populate_flags(shebangline): - old_flags = b'' - if keep_flags: - old_flags = parse_shebang(shebangline) - if old_flags: - old_flags = old_flags[2:] - if not (old_flags or add_flags): - return b'' - # On Linux, the entire string following the interpreter name - # is passed as a single argument to the interpreter. - # e.g. "#! /usr/bin/python3 -W Error -s" runs "/usr/bin/python3 "-W Error -s" - # so shebang should have single '-' where flags are given and - # flag might need argument for that reasons adding new flags is - # between '-' and original flags - # e.g. #! /usr/bin/python3 -sW Error - return b' -' + add_flags + old_flags - - -def fixline(line): - if not line.startswith(b'#!'): - return line - - if b"python" not in line: - return line - - flags = populate_flags(line) - return b'#! ' + new_interpreter + flags + b'\n' - - -if __name__ == '__main__': - main() diff --git a/Tools/scripts/pep384_macrocheck.py b/Tools/scripts/pep384_macrocheck.py deleted file mode 100644 index ab9dd7c972aab5..00000000000000 --- a/Tools/scripts/pep384_macrocheck.py +++ /dev/null @@ -1,148 +0,0 @@ -""" -pep384_macrocheck.py - -This program tries to locate errors in the relevant Python header -files where macros access type fields when they are reachable from -the limited API. - -The idea is to search macros with the string "->tp_" in it. -When the macro name does not begin with an underscore, -then we have found a dormant error. - -Christian Tismer -2018-06-02 -""" - -import sys -import os -import re - - -DEBUG = False - -def dprint(*args, **kw): - if DEBUG: - print(*args, **kw) - -def parse_headerfiles(startpath): - """ - Scan all header files which are reachable fronm Python.h - """ - search = "Python.h" - name = os.path.join(startpath, search) - if not os.path.exists(name): - raise ValueError("file {} was not found in {}\n" - "Please give the path to Python's include directory." - .format(search, startpath)) - errors = 0 - with open(name) as python_h: - while True: - line = python_h.readline() - if not line: - break - found = re.match(r'^\s*#\s*include\s*"(\w+\.h)"', line) - if not found: - continue - include = found.group(1) - dprint("Scanning", include) - name = os.path.join(startpath, include) - if not os.path.exists(name): - name = os.path.join(startpath, "../PC", include) - errors += parse_file(name) - return errors - -def ifdef_level_gen(): - """ - Scan lines for #ifdef and track the level. - """ - level = 0 - ifdef_pattern = r"^\s*#\s*if" # covers ifdef and ifndef as well - endif_pattern = r"^\s*#\s*endif" - while True: - line = yield level - if re.match(ifdef_pattern, line): - level += 1 - elif re.match(endif_pattern, line): - level -= 1 - -def limited_gen(): - """ - Scan lines for Py_LIMITED_API yes(1) no(-1) or nothing (0) - """ - limited = [0] # nothing - unlimited_pattern = r"^\s*#\s*ifndef\s+Py_LIMITED_API" - limited_pattern = "|".join([ - r"^\s*#\s*ifdef\s+Py_LIMITED_API", - r"^\s*#\s*(el)?if\s+!\s*defined\s*\(\s*Py_LIMITED_API\s*\)\s*\|\|", - r"^\s*#\s*(el)?if\s+defined\s*\(\s*Py_LIMITED_API" - ]) - else_pattern = r"^\s*#\s*else" - ifdef_level = ifdef_level_gen() - status = next(ifdef_level) - wait_for = -1 - while True: - line = yield limited[-1] - new_status = ifdef_level.send(line) - dir = new_status - status - status = new_status - if dir == 1: - if re.match(unlimited_pattern, line): - limited.append(-1) - wait_for = status - 1 - elif re.match(limited_pattern, line): - limited.append(1) - wait_for = status - 1 - elif dir == -1: - # this must have been an endif - if status == wait_for: - limited.pop() - wait_for = -1 - else: - # it could be that we have an elif - if re.match(limited_pattern, line): - limited.append(1) - wait_for = status - 1 - elif re.match(else_pattern, line): - limited.append(-limited.pop()) # negate top - -def parse_file(fname): - errors = 0 - with open(fname) as f: - lines = f.readlines() - type_pattern = r"^.*?->\s*tp_" - define_pattern = r"^\s*#\s*define\s+(\w+)" - limited = limited_gen() - status = next(limited) - for nr, line in enumerate(lines): - status = limited.send(line) - line = line.rstrip() - dprint(fname, nr, status, line) - if status != -1: - if re.match(define_pattern, line): - name = re.match(define_pattern, line).group(1) - if not name.startswith("_"): - # found a candidate, check it! - macro = line + "\n" - idx = nr - while line.endswith("\\"): - idx += 1 - line = lines[idx].rstrip() - macro += line + "\n" - if re.match(type_pattern, macro, re.DOTALL): - # this type field can reach the limited API - report(fname, nr + 1, macro) - errors += 1 - return errors - -def report(fname, nr, macro): - f = sys.stderr - print(fname + ":" + str(nr), file=f) - print(macro, file=f) - -if __name__ == "__main__": - p = sys.argv[1] if sys.argv[1:] else "../../Include" - errors = parse_headerfiles(p) - if errors: - # somehow it makes sense to raise a TypeError :-) - raise TypeError("These {} locations contradict the limited API." - .format(errors)) diff --git a/Tools/scripts/ptags.py b/Tools/scripts/ptags.py deleted file mode 100755 index eedd411702c199..00000000000000 --- a/Tools/scripts/ptags.py +++ /dev/null @@ -1,54 +0,0 @@ -#! /usr/bin/env python3 - -# ptags -# -# Create a tags file for Python programs, usable with vi. -# Tagged are: -# - functions (even inside other defs or classes) -# - classes -# - filenames -# Warns about files it cannot open. -# No warnings about duplicate tags. - -import sys, re, os - -tags = [] # Modified global variable! - -def main(): - args = sys.argv[1:] - for filename in args: - treat_file(filename) - if tags: - with open('tags', 'w') as fp: - tags.sort() - for s in tags: fp.write(s) - - -expr = r'^[ \t]*(def|class)[ \t]+([a-zA-Z0-9_]+)[ \t]*[:\(]' -matcher = re.compile(expr) - -def treat_file(filename): - try: - fp = open(filename, 'r') - except: - sys.stderr.write('Cannot open %s\n' % filename) - return - with fp: - base = os.path.basename(filename) - if base[-3:] == '.py': - base = base[:-3] - s = base + '\t' + filename + '\t' + '1\n' - tags.append(s) - while 1: - line = fp.readline() - if not line: - break - m = matcher.match(line) - if m: - content = m.group(0) - name = m.group(2) - s = name + '\t' + filename + '\t/^' + content + '/\n' - tags.append(s) - -if __name__ == '__main__': - main() diff --git a/Tools/scripts/startuptime.py b/Tools/scripts/startuptime.py deleted file mode 100644 index 1bb5b208f66e04..00000000000000 --- a/Tools/scripts/startuptime.py +++ /dev/null @@ -1,22 +0,0 @@ -# Quick script to time startup for various binaries - -import subprocess -import sys -import time - -NREPS = 100 - - -def main(): - binaries = sys.argv[1:] - for bin in binaries: - t0 = time.time() - for _ in range(NREPS): - result = subprocess.run([bin, "-c", "pass"]) - result.check_returncode() - t1 = time.time() - print(f"{(t1-t0)/NREPS:6.3f} {bin}") - - -if __name__ == "__main__": - main() diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py index 2e8261a4755b99..8d91bda5a43a0d 100644 --- a/Tools/scripts/summarize_stats.py +++ b/Tools/scripts/summarize_stats.py @@ -2,7 +2,9 @@ default stats folders. """ +import argparse import collections +import json import os.path import opcode from datetime import date @@ -32,6 +34,93 @@ TOTAL = "specialization.deferred", "specialization.hit", "specialization.miss", "execution_count" +def join_rows(a_rows, b_rows): + """ + Joins two tables together, side-by-side, where the first column in each is a + common key. + """ + if len(a_rows) == 0 and len(b_rows) == 0: + return [] + + if len(a_rows): + a_ncols = list(set(len(x) for x in a_rows)) + if len(a_ncols) != 1: + raise ValueError("Table a is ragged") + + if len(b_rows): + b_ncols = list(set(len(x) for x in b_rows)) + if len(b_ncols) != 1: + raise ValueError("Table b is ragged") + + if len(a_rows) and len(b_rows) and a_ncols[0] != b_ncols[0]: + raise ValueError("Tables have different widths") + + if len(a_rows): + ncols = a_ncols[0] + else: + ncols = b_ncols[0] + + default = [""] * (ncols - 1) + a_data = {x[0]: x[1:] for x in a_rows} + b_data = {x[0]: x[1:] for x in b_rows} + + if len(a_data) != len(a_rows) or len(b_data) != len(b_rows): + raise ValueError("Duplicate keys") + + # To preserve ordering, use A's keys as is and then add any in B that aren't + # in A + keys = list(a_data.keys()) + [k for k in b_data.keys() if k not in a_data] + return [(k, *a_data.get(k, default), *b_data.get(k, default)) for k in keys] + +def calculate_specialization_stats(family_stats, total): + rows = [] + for key in sorted(family_stats): + if key.startswith("specialization.failure_kinds"): + continue + if key in ("specialization.hit", "specialization.miss"): + label = key[len("specialization."):] + elif key == "execution_count": + label = "unquickened" + elif key in ("specialization.success", "specialization.failure", "specializable"): + continue + elif key.startswith("pair"): + continue + else: + label = key + rows.append((f"{label:>12}", f"{family_stats[key]:>12}", f"{100*family_stats[key]/total:0.1f}%")) + return rows + +def calculate_specialization_success_failure(family_stats): + total_attempts = 0 + for key in ("specialization.success", "specialization.failure"): + total_attempts += family_stats.get(key, 0) + rows = [] + if total_attempts: + for key in ("specialization.success", "specialization.failure"): + label = key[len("specialization."):] + label = label[0].upper() + label[1:] + val = family_stats.get(key, 0) + rows.append((label, val, f"{100*val/total_attempts:0.1f}%")) + return rows + +def calculate_specialization_failure_kinds(name, family_stats, defines): + total_failures = family_stats.get("specialization.failure", 0) + failure_kinds = [ 0 ] * 30 + for key in family_stats: + if not key.startswith("specialization.failure_kind"): + continue + _, index = key[:-1].split("[") + index = int(index) + failure_kinds[index] = family_stats[key] + failures = [(value, index) for (index, value) in enumerate(failure_kinds)] + failures.sort(reverse=True) + rows = [] + for value, index in failures: + if not value: + continue + rows.append((kind_to_text(index, defines, name), value, f"{100*value/total_failures:0.1f}%")) + return rows + def print_specialization_stats(name, family_stats, defines): if "specializable" not in family_stats: return @@ -39,65 +128,65 @@ def print_specialization_stats(name, family_stats, defines): if total == 0: return with Section(name, 3, f"specialization stats for {name} family"): - rows = [] - for key in sorted(family_stats): - if key.startswith("specialization.failure_kinds"): - continue - if key in ("specialization.hit", "specialization.miss"): - label = key[len("specialization."):] - elif key == "execution_count": - label = "unquickened" - elif key in ("specialization.success", "specialization.failure", "specializable"): - continue - elif key.startswith("pair"): - continue - else: - label = key - rows.append((f"{label:>12}", f"{family_stats[key]:>12}", f"{100*family_stats[key]/total:0.1f}%")) + rows = calculate_specialization_stats(family_stats, total) emit_table(("Kind", "Count", "Ratio"), rows) - print_title("Specialization attempts", 4) - total_attempts = 0 - for key in ("specialization.success", "specialization.failure"): - total_attempts += family_stats.get(key, 0) - rows = [] - if total_attempts: - for key in ("specialization.success", "specialization.failure"): - label = key[len("specialization."):] - label = label[0].upper() + label[1:] - val = family_stats.get(key, 0) - rows.append((label, val, f"{100*val/total_attempts:0.1f}%")) + rows = calculate_specialization_success_failure(family_stats) + if rows: + print_title("Specialization attempts", 4) emit_table(("", "Count:", "Ratio:"), rows) - total_failures = family_stats.get("specialization.failure", 0) - failure_kinds = [ 0 ] * 30 - for key in family_stats: - if not key.startswith("specialization.failure_kind"): - continue - _, index = key[:-1].split("[") - index = int(index) - failure_kinds[index] = family_stats[key] - failures = [(value, index) for (index, value) in enumerate(failure_kinds)] - failures.sort(reverse=True) - rows = [] - for value, index in failures: - if not value: - continue - rows.append((kind_to_text(index, defines, name), value, f"{100*value/total_failures:0.1f}%")) - emit_table(("Failure kind", "Count:", "Ratio:"), rows) - -def gather_stats(): - stats = collections.Counter() - for filename in os.listdir(DEFAULT_DIR): - with open(os.path.join(DEFAULT_DIR, filename)) as fd: - for line in fd: - try: - key, value = line.split(":") - except ValueError: - print (f"Unparsable line: '{line.strip()}' in {filename}", file=sys.stderr) - continue - key = key.strip() - value = int(value) - stats[key] += value - return stats + rows = calculate_specialization_failure_kinds(name, family_stats, defines) + emit_table(("Failure kind", "Count:", "Ratio:"), rows) + +def print_comparative_specialization_stats(name, base_family_stats, head_family_stats, defines): + if "specializable" not in base_family_stats: + return + + base_total = sum(base_family_stats.get(kind, 0) for kind in TOTAL) + head_total = sum(head_family_stats.get(kind, 0) for kind in TOTAL) + if base_total + head_total == 0: + return + with Section(name, 3, f"specialization stats for {name} family"): + base_rows = calculate_specialization_stats(base_family_stats, base_total) + head_rows = calculate_specialization_stats(head_family_stats, head_total) + emit_table( + ("Kind", "Base Count", "Base Ratio", "Head Count", "Head Ratio"), + join_rows(base_rows, head_rows) + ) + base_rows = calculate_specialization_success_failure(base_family_stats) + head_rows = calculate_specialization_success_failure(head_family_stats) + rows = join_rows(base_rows, head_rows) + if rows: + print_title("Specialization attempts", 4) + emit_table(("", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), rows) + base_rows = calculate_specialization_failure_kinds(name, base_family_stats, defines) + head_rows = calculate_specialization_failure_kinds(name, head_family_stats, defines) + emit_table( + ("Failure kind", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), + join_rows(base_rows, head_rows) + ) + +def gather_stats(input): + # Note the output of this function must be JSON-serializable + + if os.path.isfile(input): + with open(input, "r") as fd: + return json.load(fd) + elif os.path.isdir(input): + stats = collections.Counter() + for filename in os.listdir(input): + with open(os.path.join(input, filename)) as fd: + for line in fd: + try: + key, value = line.split(":") + except ValueError: + print(f"Unparsable line: '{line.strip()}' in {filename}", file=sys.stderr) + continue + key = key.strip() + value = int(value) + stats[key] += value + return stats + else: + raise ValueError(f"{input:r} is not a file or directory path") def extract_opcode_stats(stats): opcode_stats = [ {} for _ in range(256) ] @@ -213,50 +302,98 @@ def emit_table(header, rows): print("|", " | ".join(to_str(i) for i in row), "|") print() +def calculate_execution_counts(opcode_stats, total): + counts = [] + for i, opcode_stat in enumerate(opcode_stats): + if "execution_count" in opcode_stat: + count = opcode_stat['execution_count'] + miss = 0 + if "specializable" not in opcode_stat: + miss = opcode_stat.get("specialization.miss") + counts.append((count, opname[i], miss)) + counts.sort(reverse=True) + cumulative = 0 + rows = [] + for (count, name, miss) in counts: + cumulative += count + if miss: + miss = f"{100*miss/count:0.1f}%" + else: + miss = "" + rows.append((name, count, f"{100*count/total:0.1f}%", + f"{100*cumulative/total:0.1f}%", miss)) + return rows + def emit_execution_counts(opcode_stats, total): with Section("Execution counts", summary="execution counts for all instructions"): - counts = [] - for i, opcode_stat in enumerate(opcode_stats): - if "execution_count" in opcode_stat: - count = opcode_stat['execution_count'] - miss = 0 - if "specializable" not in opcode_stat: - miss = opcode_stat.get("specialization.miss") - counts.append((count, opname[i], miss)) - counts.sort(reverse=True) - cumulative = 0 - rows = [] - for (count, name, miss) in counts: - cumulative += count - if miss: - miss = f"{100*miss/count:0.1f}%" - else: - miss = "" - rows.append((name, count, f"{100*count/total:0.1f}%", - f"{100*cumulative/total:0.1f}%", miss)) + rows = calculate_execution_counts(opcode_stats, total) emit_table( ("Name", "Count:", "Self:", "Cumulative:", "Miss ratio:"), rows ) +def emit_comparative_execution_counts( + base_opcode_stats, base_total, head_opcode_stats, head_total +): + with Section("Execution counts", summary="execution counts for all instructions"): + base_rows = calculate_execution_counts(base_opcode_stats, base_total) + head_rows = calculate_execution_counts(head_opcode_stats, head_total) + base_data = dict((x[0], x[1:]) for x in base_rows) + head_data = dict((x[0], x[1:]) for x in head_rows) + opcodes = set(base_data.keys()) | set(head_data.keys()) -def emit_specialization_stats(opcode_stats): + rows = [] + default = [0, "0.0%", "0.0%", 0] + for opcode in opcodes: + base_entry = base_data.get(opcode, default) + head_entry = head_data.get(opcode, default) + if base_entry[0] == 0: + change = 1 + else: + change = (head_entry[0] - base_entry[0]) / base_entry[0] + rows.append( + (opcode, base_entry[0], head_entry[0], + f"{100*change:0.1f}%")) + + rows.sort(key=lambda x: -abs(float(x[-1][:-1]))) + + emit_table( + ("Name", "Base Count:", "Head Count:", "Change:"), + rows + ) + +def get_defines(): spec_path = os.path.join(os.path.dirname(__file__), "../../Python/specialize.c") with open(spec_path) as spec_src: defines = parse_kinds(spec_src) + return defines + +def emit_specialization_stats(opcode_stats): + defines = get_defines() with Section("Specialization stats", summary="specialization stats by family"): for i, opcode_stat in enumerate(opcode_stats): name = opname[i] print_specialization_stats(name, opcode_stat, defines) -def emit_specialization_overview(opcode_stats, total): +def emit_comparative_specialization_stats(base_opcode_stats, head_opcode_stats): + defines = get_defines() + with Section("Specialization stats", summary="specialization stats by family"): + for i, (base_opcode_stat, head_opcode_stat) in enumerate(zip(base_opcode_stats, head_opcode_stats)): + name = opname[i] + print_comparative_specialization_stats(name, base_opcode_stat, head_opcode_stat, defines) + +def calculate_specialization_effectiveness(opcode_stats, total): basic, not_specialized, specialized = categorized_counts(opcode_stats) + return [ + ("Basic", basic, f"{basic*100/total:0.1f}%"), + ("Not specialized", not_specialized, f"{not_specialized*100/total:0.1f}%"), + ("Specialized", specialized, f"{specialized*100/total:0.1f}%"), + ] + +def emit_specialization_overview(opcode_stats, total): with Section("Specialization effectiveness"): - emit_table(("Instructions", "Count:", "Ratio:"), ( - ("Basic", basic, f"{basic*100/total:0.1f}%"), - ("Not specialized", not_specialized, f"{not_specialized*100/total:0.1f}%"), - ("Specialized", specialized, f"{specialized*100/total:0.1f}%"), - )) + rows = calculate_specialization_effectiveness(opcode_stats, total) + emit_table(("Instructions", "Count:", "Ratio:"), rows) for title, field in (("Deferred", "specialization.deferred"), ("Misses", "specialization.miss")): total = 0 counts = [] @@ -270,53 +407,91 @@ def emit_specialization_overview(opcode_stats, total): rows = [ (name, count, f"{100*count/total:0.1f}%") for (count, name) in counts[:10] ] emit_table(("Name", "Count:", "Ratio:"), rows) -def emit_call_stats(stats): +def emit_comparative_specialization_overview(base_opcode_stats, base_total, head_opcode_stats, head_total): + with Section("Specialization effectiveness"): + base_rows = calculate_specialization_effectiveness(base_opcode_stats, base_total) + head_rows = calculate_specialization_effectiveness(head_opcode_stats, head_total) + emit_table( + ("Instructions", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), + join_rows(base_rows, head_rows) + ) + +def get_stats_defines(): stats_path = os.path.join(os.path.dirname(__file__), "../../Include/pystats.h") with open(stats_path) as stats_src: defines = parse_kinds(stats_src, prefix="EVAL_CALL") + return defines + +def calculate_call_stats(stats): + defines = get_stats_defines() + total = 0 + for key, value in stats.items(): + if "Calls to" in key: + total += value + rows = [] + for key, value in stats.items(): + if "Calls to" in key: + rows.append((key, value, f"{100*value/total:0.1f}%")) + elif key.startswith("Calls "): + name, index = key[:-1].split("[") + index = int(index) + label = name + " (" + pretty(defines[index][0]) + ")" + rows.append((label, value, f"{100*value/total:0.1f}%")) + for key, value in stats.items(): + if key.startswith("Frame"): + rows.append((key, value, f"{100*value/total:0.1f}%")) + return rows + +def emit_call_stats(stats): with Section("Call stats", summary="Inlined calls and frame stats"): - total = 0 - for key, value in stats.items(): - if "Calls to" in key: - total += value - rows = [] - for key, value in stats.items(): - if "Calls to" in key: - rows.append((key, value, f"{100*value/total:0.1f}%")) - elif key.startswith("Calls "): - name, index = key[:-1].split("[") - index = int(index) - label = name + " (" + pretty(defines[index][0]) + ")" - rows.append((label, value, f"{100*value/total:0.1f}%")) - for key, value in stats.items(): - if key.startswith("Frame"): - rows.append((key, value, f"{100*value/total:0.1f}%")) + rows = calculate_call_stats(stats) emit_table(("", "Count:", "Ratio:"), rows) +def emit_comparative_call_stats(base_stats, head_stats): + with Section("Call stats", summary="Inlined calls and frame stats"): + base_rows = calculate_call_stats(base_stats) + head_rows = calculate_call_stats(head_stats) + rows = join_rows(base_rows, head_rows) + rows.sort(key=lambda x: -float(x[-1][:-1])) + emit_table( + ("", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), + rows + ) + +def calculate_object_stats(stats): + total_materializations = stats.get("Object new values") + total_allocations = stats.get("Object allocations") + stats.get("Object allocations from freelist") + total_increfs = stats.get("Object interpreter increfs") + stats.get("Object increfs") + total_decrefs = stats.get("Object interpreter decrefs") + stats.get("Object decrefs") + rows = [] + for key, value in stats.items(): + if key.startswith("Object"): + if "materialize" in key: + ratio = f"{100*value/total_materializations:0.1f}%" + elif "allocations" in key: + ratio = f"{100*value/total_allocations:0.1f}%" + elif "increfs" in key: + ratio = f"{100*value/total_increfs:0.1f}%" + elif "decrefs" in key: + ratio = f"{100*value/total_decrefs:0.1f}%" + else: + ratio = "" + label = key[6:].strip() + label = label[0].upper() + label[1:] + rows.append((label, value, ratio)) + return rows + def emit_object_stats(stats): with Section("Object stats", summary="allocations, frees and dict materializatons"): - total_materializations = stats.get("Object new values") - total_allocations = stats.get("Object allocations") + stats.get("Object allocations from freelist") - total_increfs = stats.get("Object interpreter increfs") + stats.get("Object increfs") - total_decrefs = stats.get("Object interpreter decrefs") + stats.get("Object decrefs") - rows = [] - for key, value in stats.items(): - if key.startswith("Object"): - if "materialize" in key: - ratio = f"{100*value/total_materializations:0.1f}%" - elif "allocations" in key: - ratio = f"{100*value/total_allocations:0.1f}%" - elif "increfs" in key: - ratio = f"{100*value/total_increfs:0.1f}%" - elif "decrefs" in key: - ratio = f"{100*value/total_decrefs:0.1f}%" - else: - ratio = "" - label = key[6:].strip() - label = label[0].upper() + label[1:] - rows.append((label, value, ratio)) + rows = calculate_object_stats(stats) emit_table(("", "Count:", "Ratio:"), rows) +def emit_comparative_object_stats(base_stats, head_stats): + with Section("Object stats", summary="allocations, frees and dict materializatons"): + base_rows = calculate_object_stats(base_stats) + head_rows = calculate_object_stats(head_stats) + emit_table(("", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), join_rows(base_rows, head_rows)) + def get_total(opcode_stats): total = 0 for opcode_stat in opcode_stats: @@ -377,8 +552,7 @@ def emit_pair_counts(opcode_stats, total): succ_rows ) -def main(): - stats = gather_stats() +def output_single_stats(stats): opcode_stats = extract_opcode_stats(stats) total = get_total(opcode_stats) emit_execution_counts(opcode_stats, total) @@ -387,8 +561,76 @@ def main(): emit_specialization_overview(opcode_stats, total) emit_call_stats(stats) emit_object_stats(stats) + +def output_comparative_stats(base_stats, head_stats): + base_opcode_stats = extract_opcode_stats(base_stats) + base_total = get_total(base_opcode_stats) + + head_opcode_stats = extract_opcode_stats(head_stats) + head_total = get_total(head_opcode_stats) + + emit_comparative_execution_counts( + base_opcode_stats, base_total, head_opcode_stats, head_total + ) + emit_comparative_specialization_stats( + base_opcode_stats, head_opcode_stats + ) + emit_comparative_specialization_overview( + base_opcode_stats, base_total, head_opcode_stats, head_total + ) + emit_comparative_call_stats(base_stats, head_stats) + emit_comparative_object_stats(base_stats, head_stats) + +def output_stats(inputs, json_output=None): + if len(inputs) == 1: + stats = gather_stats(inputs[0]) + if json_output is not None: + json.dump(stats, json_output) + output_single_stats(stats) + elif len(inputs) == 2: + if json_output is not None: + raise ValueError( + "Can not output to JSON when there are multiple inputs" + ) + + base_stats = gather_stats(inputs[0]) + head_stats = gather_stats(inputs[1]) + output_comparative_stats(base_stats, head_stats) + print("---") print("Stats gathered on:", date.today()) +def main(): + parser = argparse.ArgumentParser(description="Summarize pystats results") + + parser.add_argument( + "inputs", + nargs="*", + type=str, + default=[DEFAULT_DIR], + help=f""" + Input source(s). + For each entry, if a .json file, the output provided by --json-output from a previous run; + if a directory, a directory containing raw pystats .txt files. + If one source is provided, its stats are printed. + If two sources are provided, comparative stats are printed. + Default is {DEFAULT_DIR}. + """ + ) + + parser.add_argument( + "--json-output", + nargs="?", + type=argparse.FileType("w"), + help="Output complete raw results to the given JSON file." + ) + + args = parser.parse_args() + + if len(args.inputs) > 2: + raise ValueError("0-2 arguments may be provided.") + + output_stats(args.inputs, json_output=args.json_output) + if __name__ == "__main__": main() diff --git a/Tools/scripts/win_add2path.py b/Tools/scripts/win_add2path.py deleted file mode 100644 index 1c9aedc5ed8dca..00000000000000 --- a/Tools/scripts/win_add2path.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Add Python to the search path on Windows - -This is a simple script to add Python to the Windows search path. It -modifies the current user (HKCU) tree of the registry. - -Copyright (c) 2008 by Christian Heimes -Licensed to PSF under a Contributor Agreement. -""" - -import sys -import site -import os -import winreg - -HKCU = winreg.HKEY_CURRENT_USER -ENV = "Environment" -PATH = "PATH" -DEFAULT = "%PATH%" - -def modify(): - pythonpath = os.path.dirname(os.path.normpath(sys.executable)) - scripts = os.path.join(pythonpath, "Scripts") - appdata = os.environ["APPDATA"] - if hasattr(site, "USER_SITE"): - usersite = site.USER_SITE.replace(appdata, "%APPDATA%") - userpath = os.path.dirname(usersite) - userscripts = os.path.join(userpath, "Scripts") - else: - userscripts = None - - with winreg.CreateKey(HKCU, ENV) as key: - try: - envpath = winreg.QueryValueEx(key, PATH)[0] - except OSError: - envpath = DEFAULT - - paths = [envpath] - for path in (pythonpath, scripts, userscripts): - if path and path not in envpath and os.path.isdir(path): - paths.append(path) - - envpath = os.pathsep.join(paths) - winreg.SetValueEx(key, PATH, 0, winreg.REG_EXPAND_SZ, envpath) - return paths, envpath - -def main(): - paths, envpath = modify() - if len(paths) > 1: - print("Path(s) added:") - print('\n'.join(paths[1:])) - else: - print("No path was added") - print("\nPATH is now:\n%s\n" % envpath) - print("Expanded:") - print(winreg.ExpandEnvironmentStrings(envpath)) - -if __name__ == '__main__': - main() diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index d64b4f66f5192b..30d66964fd1dcf 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -46,8 +46,8 @@ ] OPENSSL_RECENT_VERSIONS = [ - "1.1.1q", - "3.0.5" + "1.1.1s", + "3.0.7" ] LIBRESSL_OLD_VERSIONS = [ diff --git a/Tools/wasm/README.md b/Tools/wasm/README.md index fe9a1dc99b30f4..f8dd6e675ea7f1 100644 --- a/Tools/wasm/README.md +++ b/Tools/wasm/README.md @@ -157,7 +157,7 @@ functions. - Threading is disabled by default. The ``configure`` option ``--enable-wasm-pthreads`` adds compiler flag ``-pthread`` and - linker flags ``-sUSE_PTHREADS -sPROXY_TO_PTHREAD``. + linker flags ``-sUSE_PTHREADS -sPROXY_TO_PTHREAD``. - pthread support requires WASM threads and SharedArrayBuffer (bulk memory). The Node.JS runtime keeps a pool of web workers around. Each web worker uses several file descriptors (eventfd, epoll, pipe). @@ -203,7 +203,7 @@ functions. - The interactive shell does not handle copy 'n paste and unicode support well. - The bundled stdlib is limited. Network-related modules, - distutils, multiprocessing, dbm, tests and similar modules + multiprocessing, dbm, tests and similar modules are not shipped. All other modules are bundled as pre-compiled ``pyc`` files. - In-memory file system (MEMFS) is not persistent and limited. @@ -283,7 +283,7 @@ popd ## WASI limitations and issues (WASI SDK 15.0) -A lot of Emscripten limitations also apply to WASI. Noticable restrictions +A lot of Emscripten limitations also apply to WASI. Noticeable restrictions are: - Call stack size is limited. Default recursion limit and parser stack size diff --git a/Tools/wasm/config.site-wasm32-emscripten b/Tools/wasm/config.site-wasm32-emscripten index b695a7bf8f04da..1471546a5eec17 100644 --- a/Tools/wasm/config.site-wasm32-emscripten +++ b/Tools/wasm/config.site-wasm32-emscripten @@ -14,9 +14,6 @@ ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptmx=no ac_cv_file__dev_ptc=no -# dummy readelf, Emscripten build does not need readelf. -ac_cv_prog_ac_ct_READELF=true - # new undefined symbols / unsupported features ac_cv_func_posix_spawn=no ac_cv_func_posix_spawnp=no diff --git a/Tools/wasm/config.site-wasm32-wasi b/Tools/wasm/config.site-wasm32-wasi index 893a0d132cda59..4b8df2229915cc 100644 --- a/Tools/wasm/config.site-wasm32-wasi +++ b/Tools/wasm/config.site-wasm32-wasi @@ -9,9 +9,6 @@ ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptmx=no ac_cv_file__dev_ptc=no -# dummy readelf, WASI build does not need readelf. -ac_cv_prog_ac_ct_READELF=true - # get/setrlimit are not supported ac_cv_header_sys_resource_h=no diff --git a/Tools/wasm/wasm_assets.py b/Tools/wasm/wasm_assets.py index 98a2841ce2b0e0..9dc8bda4017e2c 100755 --- a/Tools/wasm/wasm_assets.py +++ b/Tools/wasm/wasm_assets.py @@ -41,16 +41,12 @@ "ensurepip/", "venv/", # build system - "distutils/", "lib2to3/", # deprecated - "asyncore.py", - "asynchat.py", "uu.py", "xdrlib.py", # other platforms "_aix_support.py", - "_bootsubprocess.py", "_osx_support.py", # webbrowser "antigravity.py", diff --git a/Tools/wasm/wasm_build.py b/Tools/wasm/wasm_build.py index 63812c6f3153f2..493682c5b138a3 100755 --- a/Tools/wasm/wasm_build.py +++ b/Tools/wasm/wasm_build.py @@ -137,7 +137,7 @@ def read_python_version(configure: pathlib.Path = CONFIGURE) -> str: configure and configure.ac are the canonical source for major and minor version number. """ - version_re = re.compile("^PACKAGE_VERSION='(\d\.\d+)'") + version_re = re.compile(r"^PACKAGE_VERSION='(\d\.\d+)'") with configure.open(encoding="utf-8") as f: for line in f: mo = version_re.match(line) diff --git a/configure b/configure index 0e9f72faa7b120..3f8daf9dad5fd8 100755 --- a/configure +++ b/configure @@ -642,6 +642,8 @@ MODULE__TESTBUFFER_FALSE MODULE__TESTBUFFER_TRUE MODULE__TESTINTERNALCAPI_FALSE MODULE__TESTINTERNALCAPI_TRUE +MODULE__TESTCLINIC_FALSE +MODULE__TESTCLINIC_TRUE MODULE__TESTCAPI_FALSE MODULE__TESTCAPI_TRUE MODULE__HASHLIB_FALSE @@ -904,8 +906,6 @@ MKDIR_P INSTALL_DATA INSTALL_SCRIPT INSTALL_PROGRAM -ac_ct_READELF -READELF ARFLAGS ac_ct_AR AR @@ -3361,7 +3361,7 @@ fi -for ac_prog in python$PACKAGE_VERSION python3.10 python3.9 python3.8 python3.7 python3.6 python3 python +for ac_prog in python$PACKAGE_VERSION python3.11 python3.10 python3.9 python3.8 python3.7 python3.6 python3 python do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 @@ -6755,7 +6755,7 @@ if test "x$enable_profiling" = xyes; then CC="$CC -pg" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -int main() { return 0; } +int main(void) { return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : @@ -7181,116 +7181,6 @@ then ARFLAGS="rcs" fi -if test -n "$ac_tool_prefix"; then - for ac_prog in readelf - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_READELF+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$READELF"; then - ac_cv_prog_READELF="$READELF" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_READELF="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -READELF=$ac_cv_prog_READELF -if test -n "$READELF"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $READELF" >&5 -$as_echo "$READELF" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$READELF" && break - done -fi -if test -z "$READELF"; then - ac_ct_READELF=$READELF - for ac_prog in readelf -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_READELF+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_READELF"; then - ac_cv_prog_ac_ct_READELF="$ac_ct_READELF" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_READELF="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_READELF=$ac_cv_prog_ac_ct_READELF -if test -n "$ac_ct_READELF"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_READELF" >&5 -$as_echo "$ac_ct_READELF" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_READELF" && break -done - - if test "x$ac_ct_READELF" = x; then - READELF=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - READELF=$ac_ct_READELF - fi -fi - -if test "$cross_compiling" = yes; then - case "$READELF" in - readelf|:) - as_fn_error $? "readelf for the host is required for cross builds" "$LINENO" 5 - ;; - esac -fi - - - case $MACHDEP in hp*|HP*) # install -d does not work on HP-UX @@ -9330,7 +9220,7 @@ else void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -9385,7 +9275,7 @@ else void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -9434,7 +9324,7 @@ else void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -9483,7 +9373,7 @@ else void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -9558,7 +9448,7 @@ for ac_header in \ alloca.h asm/types.h bluetooth.h conio.h crypt.h direct.h dlfcn.h endian.h errno.h fcntl.h grp.h \ ieeefp.h io.h langinfo.h libintl.h libutil.h linux/auxvec.h sys/auxv.h linux/fs.h linux/memfd.h \ linux/random.h linux/soundcard.h \ - linux/tipc.h linux/wait.h netdb.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \ + linux/tipc.h linux/wait.h netdb.h net/ethernet.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \ sched.h setjmp.h shadow.h signal.h spawn.h stropts.h sys/audioio.h sys/bsdtty.h sys/devpoll.h \ sys/endian.h sys/epoll.h sys/event.h sys/eventfd.h sys/file.h sys/ioctl.h sys/kern_control.h \ sys/loadavg.h sys/lock.h sys/memfd.h sys/mkdev.h sys/mman.h sys/modem.h sys/param.h sys/poll.h \ @@ -11629,7 +11519,7 @@ if test "x$perf_trampoline" = xyes; then : $as_echo "#define PY_HAVE_PERF_TRAMPOLINE 1" >>confdefs.h - PERF_TRAMPOLINE_OBJ=Objects/asm_trampoline.o + PERF_TRAMPOLINE_OBJ=Python/asm_trampoline.o if test "x$Py_DEBUG" = xtrue; then : @@ -12343,7 +12233,7 @@ else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -int main() +int main(void) { char s[16]; int i, *p1, *p2; @@ -12619,7 +12509,7 @@ else LIBEXPAT_CFLAGS="-I\$(srcdir)/Modules/expat" LIBEXPAT_LDFLAGS="-lm \$(LIBEXPAT_A)" - LIBEXPAT_INTERNAL="\$(LIBEXPAT_A)" + LIBEXPAT_INTERNAL="\$(LIBEXPAT_HEADERS) \$(LIBEXPAT_A)" fi @@ -13128,7 +13018,7 @@ else LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec" LIBMPDEC_LDFLAGS="-lm \$(LIBMPDEC_A)" - LIBMPDEC_INTERNAL="\$(LIBMPDEC_A)" + LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)" if test "x$with_pydebug" = xyes; then : @@ -15152,6 +15042,7 @@ $as_echo_n "checking for pthread_create in -lpthread... " >&6; } /* end confdefs.h. */ #include +#include #include void * start_routine (void *arg) { exit (0); } @@ -15462,7 +15353,7 @@ else void *foo(void *parm) { return NULL; } - main() { + int main(void) { pthread_attr_t attr; pthread_t id; if (pthread_attr_init(&attr)) return (-1); @@ -17064,7 +16955,7 @@ else #include #include -int main(int argc, char*argv[]) +int main(int argc, char *argv[]) { if(chflags(argv[0], 0) != 0) return 1; @@ -17113,7 +17004,7 @@ else #include #include -int main(int argc, char*argv[]) +int main(int argc, char *argv[]) { if(lchflags(argv[0], 0) != 0) return 1; @@ -19111,6 +19002,20 @@ fi done +# check for namespace functions +for ac_func in setns unshare +do : + as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` +ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" +if eval test \"x\$"$as_ac_var"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 +_ACEOF + +fi +done + + @@ -19807,7 +19712,7 @@ else #include #include -int main() +int main(void) { int passive, gaierr, inet4 = 0, inet6 = 0; struct addrinfo hints, *ai, *aitop; @@ -21004,7 +20909,7 @@ else #include #include -int main() { +int main(void) { volatile double x, y, z; /* 1./(1-2**-53) -> 1+2**-52 (correct), 1.0 (double rounding) */ x = 0.99999999999999989; /* 1-2**-53 */ @@ -21783,7 +21688,7 @@ else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -int main() +int main(void) { return (((-1)>>3 == -1) ? 0 : 1); } @@ -22685,7 +22590,7 @@ else #include #include -int main() +int main(void) { int val1 = nice(1); if (val1 != -1 && val1 == nice(2)) @@ -22727,7 +22632,7 @@ else #include #include -int main() +int main(void) { struct pollfd poll_struct = { 42, POLLIN|POLLPRI|POLLOUT, 0 }; int poll_test; @@ -22784,7 +22689,7 @@ else extern char *tzname[]; #endif -int main() +int main(void) { /* Note that we need to ensure that not only does tzset(3) do 'something' with localtime, but it works as documented @@ -24446,9 +24351,10 @@ else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ +#include #include -#include -int main() { +#include +int main(void) { size_t len = -1; const char *str = "text"; len = mbstowcs(NULL, str, 0); @@ -24648,7 +24554,7 @@ else #include #include void foo(void *p, void *q) { memmove(p, q, 19); } -int main() { +int main(void) { char a[32] = "123456789000000000"; foo(&a[9], a); if (strcmp(a, "123456789123456789000000000") != 0) @@ -24703,7 +24609,7 @@ else ); return r; } - int main() { + int main(void) { int p = 8; if ((foo(&p) ? : p) != 6) return 1; @@ -24746,7 +24652,7 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext #include atomic_int int_var; atomic_uintptr_t uintptr_var; - int main() { + int main(void) { atomic_store_explicit(&int_var, 5, memory_order_relaxed); atomic_store_explicit(&uintptr_var, 0, memory_order_relaxed); int loaded_value = atomic_load_explicit(&int_var, memory_order_seq_cst); @@ -24787,7 +24693,7 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext int val; - int main() { + int main(void) { __atomic_store_n(&val, 1, __ATOMIC_SEQ_CST); (void)__atomic_load_n(&val, __ATOMIC_SEQ_CST); return 0; @@ -24863,7 +24769,7 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext #include - int main() { + int main(void) { struct dirent entry; return entry.d_type == DT_UNKNOWN; } @@ -24901,11 +24807,12 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ + #include #include #include #include - int main() { + int main(void) { char buffer[1]; const size_t buflen = sizeof(buffer); const int flags = GRND_NONBLOCK; @@ -24948,9 +24855,10 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ + #include #include - int main() { + int main(void) { char buffer[1]; const size_t buflen = sizeof(buffer); const int flags = 0; @@ -27845,6 +27753,40 @@ fi $as_echo "$py_cv_module__testcapi" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for stdlib extension module _testclinic" >&5 +$as_echo_n "checking for stdlib extension module _testclinic... " >&6; } + if test "$py_cv_module__testclinic" != "n/a"; then : + + if test "$TEST_MODULES" = yes; then : + if true; then : + py_cv_module__testclinic=yes +else + py_cv_module__testclinic=missing +fi +else + py_cv_module__testclinic=disabled +fi + +fi + as_fn_append MODULE_BLOCK "MODULE__TESTCLINIC_STATE=$py_cv_module__testclinic$as_nl" + if test "x$py_cv_module__testclinic" = xyes; then : + + + + +fi + if test "$py_cv_module__testclinic" = yes; then + MODULE__TESTCLINIC_TRUE= + MODULE__TESTCLINIC_FALSE='#' +else + MODULE__TESTCLINIC_TRUE='#' + MODULE__TESTCLINIC_FALSE= +fi + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $py_cv_module__testclinic" >&5 +$as_echo "$py_cv_module__testclinic" >&6; } + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for stdlib extension module _testinternalcapi" >&5 $as_echo_n "checking for stdlib extension module _testinternalcapi... " >&6; } if test "$py_cv_module__testinternalcapi" != "n/a"; then : @@ -28564,6 +28506,10 @@ if test -z "${MODULE__TESTCAPI_TRUE}" && test -z "${MODULE__TESTCAPI_FALSE}"; th as_fn_error $? "conditional \"MODULE__TESTCAPI\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi +if test -z "${MODULE__TESTCLINIC_TRUE}" && test -z "${MODULE__TESTCLINIC_FALSE}"; then + as_fn_error $? "conditional \"MODULE__TESTCLINIC\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi if test -z "${MODULE__TESTINTERNALCAPI_TRUE}" && test -z "${MODULE__TESTINTERNALCAPI_FALSE}"; then as_fn_error $? "conditional \"MODULE__TESTINTERNALCAPI\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 diff --git a/configure.ac b/configure.ac index a4f3b0ab84664a..734a4db8389915 100644 --- a/configure.ac +++ b/configure.ac @@ -203,7 +203,7 @@ AC_SUBST([FREEZE_MODULE_DEPS]) AC_SUBST([PYTHON_FOR_BUILD_DEPS]) AC_CHECK_PROGS([PYTHON_FOR_REGEN], - [python$PACKAGE_VERSION python3.10 python3.9 python3.8 python3.7 python3.6 python3 python], + [python$PACKAGE_VERSION python3.11 python3.10 python3.9 python3.8 python3.7 python3.6 python3 python], [python3]) AC_SUBST(PYTHON_FOR_REGEN) @@ -1427,7 +1427,7 @@ AC_ARG_ENABLE(profiling, if test "x$enable_profiling" = xyes; then ac_save_cc="$CC" CC="$CC -pg" - AC_LINK_IFELSE([AC_LANG_SOURCE([[int main() { return 0; }]])], + AC_LINK_IFELSE([AC_LANG_SOURCE([[int main(void) { return 0; }]])], [], [enable_profiling=no]) CC="$ac_save_cc" @@ -1617,17 +1617,6 @@ then ARFLAGS="rcs" fi -AC_CHECK_TOOLS([READELF], [readelf], [:]) -if test "$cross_compiling" = yes; then - case "$READELF" in - readelf|:) - AC_MSG_ERROR([readelf for the host is required for cross builds]) - ;; - esac -fi -AC_SUBST(READELF) - - case $MACHDEP in hp*|HP*) # install -d does not work on HP-UX @@ -2564,7 +2553,7 @@ AC_CACHE_CHECK([whether pthreads are available without options], void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -2597,7 +2586,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -2624,7 +2613,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -2651,7 +2640,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ void* routine(void* p){return NULL;} -int main(){ +int main(void){ pthread_t p; if(pthread_create(&p,NULL,routine,NULL)!=0) return 1; @@ -2719,7 +2708,7 @@ AC_CHECK_HEADERS([ \ alloca.h asm/types.h bluetooth.h conio.h crypt.h direct.h dlfcn.h endian.h errno.h fcntl.h grp.h \ ieeefp.h io.h langinfo.h libintl.h libutil.h linux/auxvec.h sys/auxv.h linux/fs.h linux/memfd.h \ linux/random.h linux/soundcard.h \ - linux/tipc.h linux/wait.h netdb.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \ + linux/tipc.h linux/wait.h netdb.h net/ethernet.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \ sched.h setjmp.h shadow.h signal.h spawn.h stropts.h sys/audioio.h sys/bsdtty.h sys/devpoll.h \ sys/endian.h sys/epoll.h sys/event.h sys/eventfd.h sys/file.h sys/ioctl.h sys/kern_control.h \ sys/loadavg.h sys/lock.h sys/memfd.h sys/mkdev.h sys/mman.h sys/modem.h sys/param.h sys/poll.h \ @@ -3474,7 +3463,7 @@ AC_MSG_RESULT([$perf_trampoline]) AS_VAR_IF([perf_trampoline], [yes], [ AC_DEFINE([PY_HAVE_PERF_TRAMPOLINE], [1], [Define to 1 if you have the perf trampoline.]) - PERF_TRAMPOLINE_OBJ=Objects/asm_trampoline.o + PERF_TRAMPOLINE_OBJ=Python/asm_trampoline.o dnl perf needs frame pointers for unwinding, include compiler option in debug builds AS_VAR_IF([Py_DEBUG], [true], [ @@ -3589,7 +3578,7 @@ esac # check for systems that require aligned memory access AC_CACHE_CHECK([aligned memory access is required], [ac_cv_aligned_required], [AC_RUN_IFELSE([AC_LANG_SOURCE([[ -int main() +int main(void) { char s[16]; int i, *p1, *p2; @@ -3710,7 +3699,7 @@ AS_VAR_IF([with_system_expat], [yes], [ ], [ LIBEXPAT_CFLAGS="-I\$(srcdir)/Modules/expat" LIBEXPAT_LDFLAGS="-lm \$(LIBEXPAT_A)" - LIBEXPAT_INTERNAL="\$(LIBEXPAT_A)" + LIBEXPAT_INTERNAL="\$(LIBEXPAT_HEADERS) \$(LIBEXPAT_A)" ]) AC_SUBST([LIBEXPAT_CFLAGS]) @@ -3819,7 +3808,7 @@ AS_VAR_IF([with_system_libmpdec], [yes], [ ], [ LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec" LIBMPDEC_LDFLAGS="-lm \$(LIBMPDEC_A)" - LIBMPDEC_INTERNAL="\$(LIBMPDEC_A)" + LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)" dnl Disable forced inlining in debug builds, see GH-94847 AS_VAR_IF([with_pydebug], [yes], [ @@ -4303,6 +4292,7 @@ yes AC_MSG_CHECKING([for pthread_create in -lpthread]) AC_LINK_IFELSE([AC_LANG_PROGRAM([[ #include +#include #include void * start_routine (void *arg) { exit (0); }]], [[ @@ -4372,7 +4362,7 @@ if test "$posix_threads" = "yes"; then void *foo(void *parm) { return NULL; } - main() { + int main(void) { pthread_attr_t attr; pthread_t id; if (pthread_attr_init(&attr)) return (-1); @@ -4909,7 +4899,7 @@ AC_CACHE_CHECK([for chflags], [ac_cv_have_chflags], [dnl AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include #include -int main(int argc, char*argv[]) +int main(int argc, char *argv[]) { if(chflags(argv[0], 0) != 0) return 1; @@ -4931,7 +4921,7 @@ AC_CACHE_CHECK([for lchflags], [ac_cv_have_lchflags], [dnl AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include #include -int main(int argc, char*argv[]) +int main(int argc, char *argv[]) { if(lchflags(argv[0], 0) != 0) return 1; @@ -5083,6 +5073,9 @@ AC_CHECK_FUNCS(setpgrp, []) ) +# check for namespace functions +AC_CHECK_FUNCS([setns unshare]) + dnl We search for both crypt and crypt_r as one or the other may be defined dnl libxcrypt provides and libcrypt with crypt_r() since dnl at least 3.1.1 from 2015. @@ -5204,7 +5197,7 @@ AS_VAR_IF([ac_cv_func_getaddrinfo], [yes], [ #include #include -int main() +int main(void) { int passive, gaierr, inet4 = 0, inet6 = 0; struct addrinfo hints, *ai, *aitop; @@ -5620,7 +5613,7 @@ CC="$CC $BASECFLAGS" AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include #include -int main() { +int main(void) { volatile double x, y, z; /* 1./(1-2**-53) -> 1+2**-52 (correct), 1.0 (double rounding) */ x = 0.99999999999999989; /* 1-2**-53 */ @@ -5927,7 +5920,7 @@ fi], # or fills with zeros (like the Cray J90, according to Tim Peters). AC_CACHE_CHECK([whether right shift extends the sign bit], [ac_cv_rshift_extends_sign], [ AC_RUN_IFELSE([AC_LANG_SOURCE([[ -int main() +int main(void) { return (((-1)>>3 == -1) ? 0 : 1); } @@ -6126,7 +6119,7 @@ AC_CACHE_CHECK([for broken nice()], [ac_cv_broken_nice], [ AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include #include -int main() +int main(void) { int val1 = nice(1); if (val1 != -1 && val1 == nice(2)) @@ -6148,7 +6141,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include #include -int main() +int main(void) { struct pollfd poll_struct = { 42, POLLIN|POLLPRI|POLLOUT, 0 }; int poll_test; @@ -6184,7 +6177,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ extern char *tzname[]; #endif -int main() +int main(void) { /* Note that we need to ensure that not only does tzset(3) do 'something' with localtime, but it works as documented @@ -6525,9 +6518,10 @@ AC_CHECK_TYPE(socklen_t,, AC_CACHE_CHECK([for broken mbstowcs], [ac_cv_broken_mbstowcs], AC_RUN_IFELSE([AC_LANG_SOURCE([[ +#include #include -#include -int main() { +#include +int main(void) { size_t len = -1; const char *str = "text"; len = mbstowcs(NULL, str, 0); @@ -6654,7 +6648,7 @@ AC_RUN_IFELSE([AC_LANG_SOURCE([[ #include #include void foo(void *p, void *q) { memmove(p, q, 19); } -int main() { +int main(void) { char a[32] = "123456789000000000"; foo(&a[9], a); if (strcmp(a, "123456789123456789000000000") != 0) @@ -6695,7 +6689,7 @@ if test "$ac_cv_gcc_asm_for_x87" = yes; then ); return r; } - int main() { + int main(void) { int p = 8; if ((foo(&p) ? : p) != 6) return 1; @@ -6723,7 +6717,7 @@ AC_LINK_IFELSE( #include atomic_int int_var; atomic_uintptr_t uintptr_var; - int main() { + int main(void) { atomic_store_explicit(&int_var, 5, memory_order_relaxed); atomic_store_explicit(&uintptr_var, 0, memory_order_relaxed); int loaded_value = atomic_load_explicit(&int_var, memory_order_seq_cst); @@ -6744,7 +6738,7 @@ AC_LINK_IFELSE( [ AC_LANG_SOURCE([[ int val; - int main() { + int main(void) { __atomic_store_n(&val, 1, __ATOMIC_SEQ_CST); (void)__atomic_load_n(&val, __ATOMIC_SEQ_CST); return 0; @@ -6785,7 +6779,7 @@ AC_LINK_IFELSE( AC_LANG_SOURCE([[ #include - int main() { + int main(void) { struct dirent entry; return entry.d_type == DT_UNKNOWN; } @@ -6803,11 +6797,12 @@ AC_CACHE_CHECK([for the Linux getrandom() syscall], [ac_cv_getrandom_syscall], [ AC_LINK_IFELSE( [ AC_LANG_SOURCE([[ + #include #include #include #include - int main() { + int main(void) { char buffer[1]; const size_t buflen = sizeof(buffer); const int flags = GRND_NONBLOCK; @@ -6830,9 +6825,10 @@ AC_CACHE_CHECK([for the getrandom() function], [ac_cv_func_getrandom], [ AC_LINK_IFELSE( [ AC_LANG_SOURCE([[ + #include #include - int main() { + int main(void) { char buffer[1]; const size_t buflen = sizeof(buffer); const int flags = 0; @@ -7358,6 +7354,7 @@ PY_STDLIB_MOD([_hashlib], [], [test "$ac_cv_working_openssl_hashlib" = yes], dnl test modules PY_STDLIB_MOD([_testcapi], [test "$TEST_MODULES" = yes]) +PY_STDLIB_MOD([_testclinic], [test "$TEST_MODULES" = yes]) PY_STDLIB_MOD([_testinternalcapi], [test "$TEST_MODULES" = yes]) PY_STDLIB_MOD([_testbuffer], [test "$TEST_MODULES" = yes]) PY_STDLIB_MOD([_testimportmultiple], [test "$TEST_MODULES" = yes], [test "$ac_cv_func_dlopen" = yes]) diff --git a/pyconfig.h.in b/pyconfig.h.in index 1ce09855f5559d..236cee6588d49b 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -844,6 +844,9 @@ /* Define to 1 if you have the header file. */ #undef HAVE_NETPACKET_PACKET_H +/* Define to 1 if you have the header file. */ +#undef HAVE_NET_ETHERNET_H + /* Define to 1 if you have the header file. */ #undef HAVE_NET_IF_H @@ -1067,6 +1070,9 @@ /* Define to 1 if you have the `setlocale' function. */ #undef HAVE_SETLOCALE +/* Define to 1 if you have the `setns' function. */ +#undef HAVE_SETNS + /* Define to 1 if you have the `setpgid' function. */ #undef HAVE_SETPGID @@ -1437,6 +1443,9 @@ /* Define to 1 if you have the `unlinkat' function. */ #undef HAVE_UNLINKAT +/* Define to 1 if you have the `unshare' function. */ +#undef HAVE_UNSHARE + /* Define if you have a useable wchar_t type defined in wchar.h; useable means wchar_t must be an unsigned type with at least 16 bits. (see Include/unicodeobject.h). */